X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/f3c4196c758a7720aefc26a7043bf1c6553028e2..62e2da1f937882bf55363b0f10d8625200850642:/nominatim/tokenizer/base.py?ds=sidebyside diff --git a/nominatim/tokenizer/base.py b/nominatim/tokenizer/base.py index 00ecae44..f81b3bc2 100644 --- a/nominatim/tokenizer/base.py +++ b/nominatim/tokenizer/base.py @@ -1,3 +1,9 @@ +# SPDX-License-Identifier: GPL-2.0-only +# +# This file is part of Nominatim. (https://nominatim.org) +# +# Copyright (C) 2022 by the Nominatim developer community. +# For a full list of authors see the git log. """ Abstract class defintions for tokenizers. These base classes are here mainly for documentation purposes. @@ -6,6 +12,7 @@ from abc import ABC, abstractmethod from typing import List, Tuple, Dict, Any from nominatim.config import Configuration +from nominatim.indexer.place_info import PlaceInfo # pylint: disable=unnecessary-pass @@ -29,7 +36,6 @@ class AbstractAnalyzer(ABC): def close(self) -> None: """ Free all resources used by the analyzer. """ - pass @abstractmethod @@ -49,7 +55,6 @@ class AbstractAnalyzer(ABC): found for the given words. Each list entry is a tuple of (original word, word token, word id). """ - pass @abstractmethod @@ -65,7 +70,6 @@ class AbstractAnalyzer(ABC): Returns: The given postcode after normalization. """ - pass @abstractmethod @@ -73,7 +77,6 @@ class AbstractAnalyzer(ABC): """ Update the tokenizer's postcode tokens from the current content of the `location_postcode` table. """ - pass @abstractmethod @@ -89,7 +92,6 @@ class AbstractAnalyzer(ABC): When false, just add the given phrases to the ones that already exist. """ - pass @abstractmethod @@ -101,24 +103,16 @@ class AbstractAnalyzer(ABC): refer to. names: Dictionary of name type to name. """ - pass @abstractmethod - def process_place(self, place: Dict) -> Any: + def process_place(self, place: PlaceInfo) -> Any: """ Extract tokens for the given place and compute the information to be handed to the PL/pgSQL processor for building the search index. Arguments: - place: Dictionary with the information about the place. Currently - the following fields may be present: - - - *name* is a dictionary of names for the place together - with the designation of the name. - - *address* is a dictionary of address terms. - - *country_feature* is set to a country code when the - place describes a country. + place: Place information retrived from the database. Returns: A JSON-serialisable structure that will be handed into @@ -142,7 +136,7 @@ class AbstractTokenizer(ABC): the tokenizer remains stable over updates. Arguments: - config: Read-only object with configuration obtions. + config: Read-only object with configuration options. init_db: When set to False, then initialisation of database tables should be skipped. This option is only required for @@ -151,17 +145,18 @@ class AbstractTokenizer(ABC): TODO: can we move the init_db parameter somewhere else? """ - pass @abstractmethod - def init_from_project(self) -> None: + def init_from_project(self, config: Configuration) -> None: """ Initialise the tokenizer from an existing database setup. The function should load all previously saved configuration from the project directory and/or the property table. + + Arguments: + config: Read-only object with configuration options. """ - pass @abstractmethod @@ -172,9 +167,8 @@ class AbstractTokenizer(ABC): during query time. Arguments: - config: Read-only object with configuration obtions. + config: Read-only object with configuration options. """ - pass @abstractmethod @@ -187,24 +181,39 @@ class AbstractTokenizer(ABC): data structures or data itself must not be changed by this function. Arguments: - config: Read-only object with configuration obtions. + config: Read-only object with configuration options. """ - pass @abstractmethod - def check_database(self) -> str: + def check_database(self, config: Configuration) -> str: """ Check that the database is set up correctly and ready for being queried. + Arguments: + config: Read-only object with configuration options. + Returns: If an issue was found, return an error message with the description of the issue as well as hints for the user on - how to resolve the issue. + how to resolve the issue. If everything is okay, return `None`. + """ - Return `None`, if no issue was found. + + @abstractmethod + def update_statistics(self) -> None: + """ Recompute any tokenizer statistics necessary for efficient lookup. + This function is meant to be called from time to time by the user + to improve performance. However, the tokenizer must not depend on + it to be called in order to work. + """ + + + @abstractmethod + def update_word_tokens(self) -> None: + """ Do house-keeping on the tokenizers internal data structures. + Remove unused word tokens, resort data etc. """ - pass @abstractmethod @@ -221,4 +230,3 @@ class AbstractTokenizer(ABC): When used outside the with construct, the caller must ensure to call the close() function before destructing the analyzer. """ - pass