"""
import logging
import importlib
+from pathlib import Path
from ..errors import UsageError
from ..db import properties
def _import_tokenizer(name):
""" Load the tokenizer.py module from project directory.
"""
- try:
- return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer')
- except ModuleNotFoundError as exp:
+ src_file = Path(__file__).parent / (name + '_tokenizer.py')
+ if not src_file.is_file():
LOG.fatal("No tokenizer named '%s' available. "
"Check the setting of NOMINATIM_TOKENIZER.", name)
- raise UsageError('Tokenizer not found') from exp
+ raise UsageError('Tokenizer not found')
+ return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer')
-def create_tokenizer(config):
+
+def create_tokenizer(config, init_db=True, module_name=None):
""" Create a new tokenizer as defined by the given configuration.
The tokenizer data and code is copied into the 'tokenizer' directory
of the project directory and the tokenizer loaded from its new location.
"""
+ if module_name is None:
+ module_name = config.TOKENIZER
+
# Create the directory for the tokenizer data
basedir = config.project_dir / 'tokenizer'
if not basedir.exists():
LOG.fatal("Tokenizer directory '%s' cannot be created.", basedir)
raise UsageError("Tokenizer setup failed.")
- tokenizer_module = _import_tokenizer(config.TOKENIZER)
+ # Import and initialize the tokenizer.
+ tokenizer_module = _import_tokenizer(module_name)
tokenizer = tokenizer_module.create(config.get_libpq_dsn(), basedir)
- tokenizer.init_new_db(config)
+ tokenizer.init_new_db(config, init_db=init_db)
with connect(config.get_libpq_dsn()) as conn:
- properties.set_property(conn, 'tokenizer', config.TOKENIZER)
+ properties.set_property(conn, 'tokenizer', module_name)
return tokenizer