X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/af968d49039f5cd8f0ef5839558a57867d7ef111..25279d009a3d6aa0662a49fbde3d937775e26322:/nominatim/tokenizer/factory.py diff --git a/nominatim/tokenizer/factory.py b/nominatim/tokenizer/factory.py index 1079c935..d6bc5163 100644 --- a/nominatim/tokenizer/factory.py +++ b/nominatim/tokenizer/factory.py @@ -1,45 +1,61 @@ +# SPDX-License-Identifier: GPL-2.0-only +# +# This file is part of Nominatim. (https://nominatim.org) +# +# Copyright (C) 2022 by the Nominatim developer community. +# For a full list of authors see the git log. """ Functions for creating a tokenizer or initialising the right one for an existing database. A tokenizer is something that is bound to the lifetime of a database. It -can be choosen and configured before the intial import but then needs to +can be chosen and configured before the initial import but then needs to be used consistently when querying and updating the database. This module provides the functions to create and configure a new tokenizer -as well as instanciating the appropriate tokenizer for updating an existing +as well as instantiating the appropriate tokenizer for updating an existing database. A tokenizer usually also includes PHP code for querying. The appropriate PHP normalizer module is installed, when the tokenizer is created. """ +from typing import Optional import logging import importlib +from pathlib import Path -from ..errors import UsageError -from ..db import properties -from ..db.connection import connect +from nominatim.errors import UsageError +from nominatim.db import properties +from nominatim.db.connection import connect +from nominatim.config import Configuration +from nominatim.tokenizer.base import AbstractTokenizer, TokenizerModule LOG = logging.getLogger() -def _import_tokenizer(name): +def _import_tokenizer(name: str) -> TokenizerModule: """ Load the tokenizer.py module from project directory. """ - try: - return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') - except ModuleNotFoundError as exp: + src_file = Path(__file__).parent / (name + '_tokenizer.py') + if not src_file.is_file(): LOG.fatal("No tokenizer named '%s' available. " "Check the setting of NOMINATIM_TOKENIZER.", name) - raise UsageError('Tokenizer not found') from exp + raise UsageError('Tokenizer not found') + return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') -def create_tokenizer(config): + +def create_tokenizer(config: Configuration, init_db: bool = True, + module_name: Optional[str] = None) -> AbstractTokenizer: """ Create a new tokenizer as defined by the given configuration. The tokenizer data and code is copied into the 'tokenizer' directory of the project directory and the tokenizer loaded from its new location. """ + if module_name is None: + module_name = config.TOKENIZER + # Create the directory for the tokenizer data + assert config.project_dir is not None basedir = config.project_dir / 'tokenizer' if not basedir.exists(): basedir.mkdir() @@ -47,27 +63,29 @@ def create_tokenizer(config): LOG.fatal("Tokenizer directory '%s' cannot be created.", basedir) raise UsageError("Tokenizer setup failed.") - tokenizer_module = _import_tokenizer(config.TOKENIZER) + # Import and initialize the tokenizer. + tokenizer_module = _import_tokenizer(module_name) tokenizer = tokenizer_module.create(config.get_libpq_dsn(), basedir) - tokenizer.init_new_db(config) + tokenizer.init_new_db(config, init_db=init_db) with connect(config.get_libpq_dsn()) as conn: - properties.set_property(conn, 'tokenizer', config.TOKENIZER) + properties.set_property(conn, 'tokenizer', module_name) return tokenizer -def get_tokenizer_for_db(config): +def get_tokenizer_for_db(config: Configuration) -> AbstractTokenizer: """ Instantiate a tokenizer for an existing database. The function looks up the appropriate tokenizer in the database and initialises it. """ + assert config.project_dir is not None basedir = config.project_dir / 'tokenizer' if not basedir.is_dir(): - LOG.fatal("Cannot find tokenizer data in '%s'.", basedir) - raise UsageError('Cannot initialize tokenizer.') + # Directory will be repopulated by tokenizer below. + basedir.mkdir() with connect(config.get_libpq_dsn()) as conn: name = properties.get_property(conn, 'tokenizer') @@ -79,6 +97,6 @@ def get_tokenizer_for_db(config): tokenizer_module = _import_tokenizer(name) tokenizer = tokenizer_module.create(config.get_libpq_dsn(), basedir) - tokenizer.init_from_project() + tokenizer.init_from_project(config) return tokenizer