]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/tools/migration.py
make word recount a tokenizer-specific function
[nominatim.git] / nominatim / tools / migration.py
index 80539702c25332bdb04d7f0cf31524a7846fb32a..d7faca31f1f431b61383825ad105bee4163a6a07 100644 (file)
@@ -44,6 +44,7 @@ def migrate(config, paths):
                             '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(version))
                 kwargs = dict(conn=conn, config=config, paths=paths)
                 func(**kwargs)
+                conn.commit()
                 has_run_migration = True
 
         if has_run_migration:
@@ -129,6 +130,9 @@ def change_housenumber_transliteration(conn, **_):
 
         The database schema switched from saving raw housenumbers in
         placex.housenumber to saving transliterated ones.
+
+        Note: the function create_housenumber_id() has been dropped in later
+              versions.
     """
     with conn.cursor() as cur:
         cur.execute("""CREATE OR REPLACE FUNCTION create_housenumber_id(housenumber TEXT)
@@ -138,7 +142,8 @@ def change_housenumber_transliteration(conn, **_):
                        BEGIN
                          SELECT array_to_string(array_agg(trans), ';')
                            INTO normtext
-                           FROM (SELECT lookup_word as trans, getorcreate_housenumber_id(lookup_word)
+                           FROM (SELECT lookup_word as trans,
+                                        getorcreate_housenumber_id(lookup_word)
                                  FROM (SELECT make_standard_name(h) as lookup_word
                                        FROM regexp_split_to_table(housenumber, '[,;]') h) x) y;
                          return normtext;
@@ -175,6 +180,14 @@ def install_legacy_tokenizer(conn, config, **_):
         configuration for the backwards-compatible legacy tokenizer
     """
     if properties.get_property(conn, 'tokenizer') is None:
+        with conn.cursor() as cur:
+            for table in ('placex', 'location_property_osmline'):
+                has_column = cur.scalar("""SELECT count(*) FROM information_schema.columns
+                                           WHERE table_name = %s
+                                           and column_name = 'token_info'""",
+                                        (table, ))
+                if has_column == 0:
+                    cur.execute('ALTER TABLE {} ADD COLUMN token_info JSONB'.format(table))
         tokenizer = tokenizer_factory.create_tokenizer(config, init_db=False,
                                                        module_name='legacy')