X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/af968d49039f5cd8f0ef5839558a57867d7ef111..18554dfed767d8ef669e15014a5b0382ad8bab43:/nominatim/tokenizer/factory.py?ds=sidebyside diff --git a/nominatim/tokenizer/factory.py b/nominatim/tokenizer/factory.py index 1079c935..069672d4 100644 --- a/nominatim/tokenizer/factory.py +++ b/nominatim/tokenizer/factory.py @@ -15,6 +15,7 @@ normalizer module is installed, when the tokenizer is created. """ import logging import importlib +from pathlib import Path from ..errors import UsageError from ..db import properties @@ -25,20 +26,24 @@ LOG = logging.getLogger() def _import_tokenizer(name): """ Load the tokenizer.py module from project directory. """ - try: - return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') - except ModuleNotFoundError as exp: + src_file = Path(__file__).parent / (name + '_tokenizer.py') + if not src_file.is_file(): LOG.fatal("No tokenizer named '%s' available. " "Check the setting of NOMINATIM_TOKENIZER.", name) - raise UsageError('Tokenizer not found') from exp + raise UsageError('Tokenizer not found') + return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') -def create_tokenizer(config): + +def create_tokenizer(config, init_db=True, module_name=None): """ Create a new tokenizer as defined by the given configuration. The tokenizer data and code is copied into the 'tokenizer' directory of the project directory and the tokenizer loaded from its new location. """ + if module_name is None: + module_name = config.TOKENIZER + # Create the directory for the tokenizer data basedir = config.project_dir / 'tokenizer' if not basedir.exists(): @@ -47,13 +52,14 @@ def create_tokenizer(config): LOG.fatal("Tokenizer directory '%s' cannot be created.", basedir) raise UsageError("Tokenizer setup failed.") - tokenizer_module = _import_tokenizer(config.TOKENIZER) + # Import and initialize the tokenizer. + tokenizer_module = _import_tokenizer(module_name) tokenizer = tokenizer_module.create(config.get_libpq_dsn(), basedir) - tokenizer.init_new_db(config) + tokenizer.init_new_db(config, init_db=init_db) with connect(config.get_libpq_dsn()) as conn: - properties.set_property(conn, 'tokenizer', config.TOKENIZER) + properties.set_property(conn, 'tokenizer', module_name) return tokenizer