+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
"""
-Abstract class defintions for tokenizers. These base classes are here
+Abstract class definitions for tokenizers. These base classes are here
mainly for documentation purposes.
"""
from abc import ABC, abstractmethod
-from typing import List, Tuple, Dict, Any
+from typing import List, Tuple, Dict, Any, Optional, Iterable
+from pathlib import Path
from nominatim.config import Configuration
-from nominatim.indexer.place_info import PlaceInfo
-
-# pylint: disable=unnecessary-pass
+from nominatim.data.place_info import PlaceInfo
+from nominatim.typing import Protocol
class AbstractAnalyzer(ABC):
""" The analyzer provides the functions for analysing names and building
return self
- def __exit__(self, exc_type, exc_value, traceback) -> None:
+ def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
self.close()
def close(self) -> None:
""" Free all resources used by the analyzer.
"""
- pass
@abstractmethod
found for the given words. Each list entry is a tuple of
(original word, word token, word id).
"""
- pass
@abstractmethod
Returns:
The given postcode after normalization.
"""
- pass
@abstractmethod
""" Update the tokenizer's postcode tokens from the current content
of the `location_postcode` table.
"""
- pass
@abstractmethod
- def update_special_phrases(self, phrases: List[Tuple[str, str, str, str]],
+ def update_special_phrases(self,
+ phrases: Iterable[Tuple[str, str, str, str]],
should_replace: bool) -> None:
""" Update the tokenizer's special phrase tokens from the given
list of special phrases.
When false, just add the given phrases to the
ones that already exist.
"""
- pass
@abstractmethod
- def add_country_names(self, country_code: str, names: Dict[str, str]):
+ def add_country_names(self, country_code: str, names: Dict[str, str]) -> None:
""" Add the given names to the tokenizer's list of country tokens.
Arguments:
refer to.
names: Dictionary of name type to name.
"""
- pass
@abstractmethod
the search index.
Arguments:
- place: Place information retrived from the database.
+ place: Place information retrieved from the database.
Returns:
A JSON-serialisable structure that will be handed into
init_db: When set to False, then initialisation of database
tables should be skipped. This option is only required for
- migration purposes and can be savely ignored by custom
+ migration purposes and can be safely ignored by custom
tokenizers.
TODO: can we move the init_db parameter somewhere else?
"""
- pass
@abstractmethod
Arguments:
config: Read-only object with configuration options.
"""
- pass
@abstractmethod
Arguments:
config: Read-only object with configuration options.
"""
- pass
@abstractmethod
Arguments:
config: Read-only object with configuration options.
"""
- pass
@abstractmethod
- def check_database(self, config: Configuration) -> str:
+ def check_database(self, config: Configuration) -> Optional[str]:
""" Check that the database is set up correctly and ready for being
queried.
description of the issue as well as hints for the user on
how to resolve the issue. If everything is okay, return `None`.
"""
- pass
+
+
+ @abstractmethod
+ def update_statistics(self) -> None:
+ """ Recompute any tokenizer statistics necessary for efficient lookup.
+ This function is meant to be called from time to time by the user
+ to improve performance. However, the tokenizer must not depend on
+ it to be called in order to work.
+ """
+
+
+ @abstractmethod
+ def update_word_tokens(self) -> None:
+ """ Do house-keeping on the tokenizers internal data structures.
+ Remove unused word tokens, resort data etc.
+ """
@abstractmethod
When used outside the with construct, the caller must ensure to
call the close() function before destructing the analyzer.
"""
- pass
+
+
+class TokenizerModule(Protocol):
+ """ Interface that must be exported by modules that implement their
+ own tokenizer.
+ """
+
+ def create(self, dsn: str, data_dir: Path) -> AbstractTokenizer:
+ """ Factory for new tokenizers.
+ """