]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/tokenizer/legacy_tokenizer.py
Merge pull request #2310 from RhinoDevel/master
[nominatim.git] / nominatim / tokenizer / legacy_tokenizer.py
index d6755835cae40fb496c287b44f15166917040bf5..2f060b84aa98761b653f15a41f68566dc31cb2d8 100644 (file)
@@ -119,6 +119,15 @@ class LegacyTokenizer:
             self.normalization = properties.get_property(conn, DBCFG_NORMALIZATION)
 
 
+    def finalize_import(self, config):
+        """ Do any required postprocessing to make the tokenizer data ready
+            for use.
+        """
+        with connect(self.dsn) as conn:
+            sqlp = SQLPreprocessor(conn, config)
+            sqlp.run_sql_file(conn, 'tokenizer/legacy_tokenizer_indices.sql')
+
+
     def update_sql_functions(self, config):
         """ Reimport the SQL functions for this tokenizer.
         """
@@ -132,6 +141,33 @@ class LegacyTokenizer:
                               modulepath=modulepath)
 
 
+    def check_database(self):
+        """ Check that the tokenizer is set up correctly.
+        """
+        hint = """\
+             The Postgresql extension nominatim.so was not correctly loaded.
+
+             Error: {error}
+
+             Hints:
+             * Check the output of the CMmake/make installation step
+             * Does nominatim.so exist?
+             * Does nominatim.so exist on the database server?
+             * Can nominatim.so be accessed by the database user?
+             """
+        with connect(self.dsn) as conn:
+            with conn.cursor() as cur:
+                try:
+                    out = cur.scalar("SELECT make_standard_name('a')")
+                except psycopg2.Error as err:
+                    return hint.format(error=str(err))
+
+        if out != 'a':
+            return hint.format(error='Unexpected result for make_standard_name()')
+
+        return None
+
+
     def migrate_database(self, config):
         """ Initialise the project directory of an existing database for
             use with this tokenizer.