X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/b5540dc35c35c7fa8f01979e972ca429b0b521fb..7c7d15fe82f5516f6d65308bb6318618921fb2dc:/nominatim/tokenizer/factory.py diff --git a/nominatim/tokenizer/factory.py b/nominatim/tokenizer/factory.py index 5f03ba58..fbda2462 100644 --- a/nominatim/tokenizer/factory.py +++ b/nominatim/tokenizer/factory.py @@ -1,3 +1,9 @@ +# SPDX-License-Identifier: GPL-2.0-only +# +# This file is part of Nominatim. (https://nominatim.org) +# +# Copyright (C) 2022 by the Nominatim developer community. +# For a full list of authors see the git log. """ Functions for creating a tokenizer or initialising the right one for an existing database. @@ -15,6 +21,7 @@ normalizer module is installed, when the tokenizer is created. """ import logging import importlib +from pathlib import Path from ..errors import UsageError from ..db import properties @@ -25,12 +32,13 @@ LOG = logging.getLogger() def _import_tokenizer(name): """ Load the tokenizer.py module from project directory. """ - try: - return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') - except ModuleNotFoundError as exp: + src_file = Path(__file__).parent / (name + '_tokenizer.py') + if not src_file.is_file(): LOG.fatal("No tokenizer named '%s' available. " "Check the setting of NOMINATIM_TOKENIZER.", name) - raise UsageError('Tokenizer not found') from exp + raise UsageError('Tokenizer not found') + + return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') def create_tokenizer(config, init_db=True, module_name=None): @@ -54,8 +62,7 @@ def create_tokenizer(config, init_db=True, module_name=None): tokenizer_module = _import_tokenizer(module_name) tokenizer = tokenizer_module.create(config.get_libpq_dsn(), basedir) - if init_db: - tokenizer.init_new_db(config) + tokenizer.init_new_db(config, init_db=init_db) with connect(config.get_libpq_dsn()) as conn: properties.set_property(conn, 'tokenizer', module_name) @@ -84,6 +91,6 @@ def get_tokenizer_for_db(config): tokenizer_module = _import_tokenizer(name) tokenizer = tokenizer_module.create(config.get_libpq_dsn(), basedir) - tokenizer.init_from_project() + tokenizer.init_from_project(config) return tokenizer