# Copyright (C) 2022 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
-Abstract class defintions for tokenizers. These base classes are here
+Abstract class definitions for tokenizers. These base classes are here
mainly for documentation purposes.
"""
from abc import ABC, abstractmethod
-from typing import List, Tuple, Dict, Any, Optional
+from typing import List, Tuple, Dict, Any, Optional, Iterable
from pathlib import Path
-from typing_extensions import Protocol
-
from nominatim.config import Configuration
+from nominatim.db.connection import Connection
from nominatim.data.place_info import PlaceInfo
+from nominatim.typing import Protocol
class AbstractAnalyzer(ABC):
""" The analyzer provides the functions for analysing names and building
@abstractmethod
- def update_special_phrases(self, phrases: List[Tuple[str, str, str, str]],
+ def update_special_phrases(self,
+ phrases: Iterable[Tuple[str, str, str, str]],
should_replace: bool) -> None:
""" Update the tokenizer's special phrase tokens from the given
list of special phrases.
the search index.
Arguments:
- place: Place information retrived from the database.
+ place: Place information retrieved from the database.
Returns:
A JSON-serialisable structure that will be handed into
init_db: When set to False, then initialisation of database
tables should be skipped. This option is only required for
- migration purposes and can be savely ignored by custom
+ migration purposes and can be safely ignored by custom
tokenizers.
TODO: can we move the init_db parameter somewhere else?
"""
+ @abstractmethod
+ def most_frequent_words(self, conn: Connection, num: int) -> List[str]:
+ """ Return a list of the `num` most frequent full words
+ in the database.
+ """
+
+
class TokenizerModule(Protocol):
""" Interface that must be exported by modules that implement their
own tokenizer.