]> git.openstreetmap.org Git - nominatim.git/blobdiff - test/python/dummy_tokenizer.py
Merge pull request #3492 from lonvia/drop-waste-disposal
[nominatim.git] / test / python / dummy_tokenizer.py
index ceea4a7ededdaa1e829e6ebf902ef42455e33e47..4739e3f0af831303f4860f601b802537e781d898 100644 (file)
@@ -1,6 +1,14 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2024 by the Nominatim developer community.
+# For a full list of authors see the git log.
 """
 Tokenizer for testing.
 """
 """
 Tokenizer for testing.
 """
+from nominatim_db.data.place_info import PlaceInfo
+from nominatim_db.config import Configuration
 
 def create(dsn, data_dir):
     """ Create a new instance of the tokenizer provided by this module.
 
 def create(dsn, data_dir):
     """ Create a new instance of the tokenizer provided by this module.
@@ -13,20 +21,27 @@ class DummyTokenizer:
         self.dsn = dsn
         self.data_dir = data_dir
         self.init_state = None
         self.dsn = dsn
         self.data_dir = data_dir
         self.init_state = None
+        self.analyser_cache = {}
 
 
 
 
-    def init_new_db(self, config):
-        assert self.init_state == None
+    def init_new_db(self, *args, **kwargs):
+        assert self.init_state is None
         self.init_state = "new"
 
 
         self.init_state = "new"
 
 
-    def init_from_project(self):
-        assert self.init_state == None
+    def init_from_project(self, config):
+        assert isinstance(config, Configuration)
+        assert self.init_state is None
         self.init_state = "loaded"
 
 
         self.init_state = "loaded"
 
 
+    @staticmethod
+    def finalize_import(_):
+        pass
+
+
     def name_analyzer(self):
     def name_analyzer(self):
-        return DummyNameAnalyzer()
+        return DummyNameAnalyzer(self.analyser_cache)
 
 
 class DummyNameAnalyzer:
 
 
 class DummyNameAnalyzer:
@@ -38,18 +53,29 @@ class DummyNameAnalyzer:
         self.close()
 
 
         self.close()
 
 
+    def __init__(self, cache):
+        self.analyser_cache = cache
+        cache['countries'] = []
+
+
     def close(self):
     def close(self):
-        """ Free all resources used by the analyzer.
-        """
         pass
 
         pass
 
-    def add_postcodes_from_db(self):
+    @staticmethod
+    def normalize_postcode(postcode):
+        return postcode
+
+    @staticmethod
+    def update_postcodes_from_db():
         pass
 
         pass
 
-    def process_place(self, place):
-        """ Determine tokenizer information about the given place.
+    def update_special_phrases(self, phrases, should_replace):
+        self.analyser_cache['special_phrases'] = phrases
+
+    def add_country_names(self, code, names):
+        self.analyser_cache['countries'].append((code, names))
 
 
-            Returns a JSON-serialisable structure that will be handed into
-            the database via the token_info field.
-        """
+    @staticmethod
+    def process_place(place):
+        assert isinstance(place, PlaceInfo)
         return {}
         return {}