]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/tools/migration.py
Merge remote-tracking branch 'upstream/master'
[nominatim.git] / nominatim / tools / migration.py
index 756a1d3a903326a8540dcbee9ee3b545bb11d843..d7faca31f1f431b61383825ad105bee4163a6a07 100644 (file)
@@ -3,11 +3,12 @@ Functions for database migration to newer software versions.
 """
 import logging
 
 """
 import logging
 
-from ..db import properties
-from ..db.connection import connect
-from ..version import NOMINATIM_VERSION
-from . import refresh, database_import
-from ..errors import UsageError
+from nominatim.db import properties
+from nominatim.db.connection import connect
+from nominatim.version import NOMINATIM_VERSION
+from nominatim.tools import refresh
+from nominatim.tokenizer import factory as tokenizer_factory
+from nominatim.errors import UsageError
 
 LOG = logging.getLogger()
 
 
 LOG = logging.getLogger()
 
@@ -43,11 +44,14 @@ def migrate(config, paths):
                             '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(version))
                 kwargs = dict(conn=conn, config=config, paths=paths)
                 func(**kwargs)
                             '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(version))
                 kwargs = dict(conn=conn, config=config, paths=paths)
                 func(**kwargs)
+                conn.commit()
                 has_run_migration = True
 
         if has_run_migration:
             LOG.warning('Updating SQL functions.')
                 has_run_migration = True
 
         if has_run_migration:
             LOG.warning('Updating SQL functions.')
-            refresh.create_functions(conn, config, paths.sqllib_dir)
+            refresh.create_functions(conn, config)
+            tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
+            tokenizer.update_sql_functions(config)
 
         properties.set_property(conn, 'database_version',
                                 '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
 
         properties.set_property(conn, 'database_version',
                                 '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
@@ -108,17 +112,6 @@ def import_status_timestamp_change(conn, **_):
                        TYPE timestamp with time zone;""")
 
 
                        TYPE timestamp with time zone;""")
 
 
-@_migration(3, 5, 0, 99)
-def install_database_module_in_project_directory(conn, config, paths, **_):
-    """ Install database module in project directory.
-
-        The database module needs to be present in the project directory
-        since those were introduced.
-    """
-    database_import.install_module(paths.module_dir, paths.project_dir,
-                                   config.DATABASE_MODULE_PATH, conn=conn)
-
-
 @_migration(3, 5, 0, 99)
 def add_nominatim_property_table(conn, config, **_):
     """ Add nominatim_property table.
 @_migration(3, 5, 0, 99)
 def add_nominatim_property_table(conn, config, **_):
     """ Add nominatim_property table.
@@ -130,3 +123,72 @@ def add_nominatim_property_table(conn, config, **_):
                                value TEXT);
                            GRANT SELECT ON TABLE nominatim_properties TO "{}";
                         """.format(config.DATABASE_WEBUSER))
                                value TEXT);
                            GRANT SELECT ON TABLE nominatim_properties TO "{}";
                         """.format(config.DATABASE_WEBUSER))
+
+@_migration(3, 6, 0, 0)
+def change_housenumber_transliteration(conn, **_):
+    """ Transliterate housenumbers.
+
+        The database schema switched from saving raw housenumbers in
+        placex.housenumber to saving transliterated ones.
+
+        Note: the function create_housenumber_id() has been dropped in later
+              versions.
+    """
+    with conn.cursor() as cur:
+        cur.execute("""CREATE OR REPLACE FUNCTION create_housenumber_id(housenumber TEXT)
+                       RETURNS TEXT AS $$
+                       DECLARE
+                         normtext TEXT;
+                       BEGIN
+                         SELECT array_to_string(array_agg(trans), ';')
+                           INTO normtext
+                           FROM (SELECT lookup_word as trans,
+                                        getorcreate_housenumber_id(lookup_word)
+                                 FROM (SELECT make_standard_name(h) as lookup_word
+                                       FROM regexp_split_to_table(housenumber, '[,;]') h) x) y;
+                         return normtext;
+                       END;
+                       $$ LANGUAGE plpgsql STABLE STRICT;""")
+        cur.execute("DELETE FROM word WHERE class = 'place' and type = 'house'")
+        cur.execute("""UPDATE placex
+                       SET housenumber = create_housenumber_id(housenumber)
+                       WHERE housenumber is not null""")
+
+
+@_migration(3, 7, 0, 0)
+def switch_placenode_geometry_index(conn, **_):
+    """ Replace idx_placex_geometry_reverse_placeNode index.
+
+        Make the index slightly more permissive, so that it can also be used
+        when matching up boundaries and place nodes. It makes the index
+        idx_placex_adminname index unnecessary.
+    """
+    with conn.cursor() as cur:
+        cur.execute(""" CREATE INDEX IF NOT EXISTS idx_placex_geometry_placenode ON placex
+                        USING GIST (geometry)
+                        WHERE osm_type = 'N' and rank_search < 26
+                              and class = 'place' and type != 'postcode'
+                              and linked_place_id is null""")
+        cur.execute(""" DROP INDEX IF EXISTS idx_placex_adminname """)
+
+
+@_migration(3, 7, 0, 1)
+def install_legacy_tokenizer(conn, config, **_):
+    """ Setup legacy tokenizer.
+
+        If no other tokenizer has been configured yet, then create the
+        configuration for the backwards-compatible legacy tokenizer
+    """
+    if properties.get_property(conn, 'tokenizer') is None:
+        with conn.cursor() as cur:
+            for table in ('placex', 'location_property_osmline'):
+                has_column = cur.scalar("""SELECT count(*) FROM information_schema.columns
+                                           WHERE table_name = %s
+                                           and column_name = 'token_info'""",
+                                        (table, ))
+                if has_column == 0:
+                    cur.execute('ALTER TABLE {} ADD COLUMN token_info JSONB'.format(table))
+        tokenizer = tokenizer_factory.create_tokenizer(config, init_db=False,
+                                                       module_name='legacy')
+
+        tokenizer.migrate_database(config)