1 # SPDX-License-Identifier: GPL-2.0-only
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2022 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Functions for database migration to newer software versions.
12 from nominatim.db import properties
13 from nominatim.db.connection import connect
14 from nominatim.version import NOMINATIM_VERSION
15 from nominatim.tools import refresh
16 from nominatim.tokenizer import factory as tokenizer_factory
17 from nominatim.errors import UsageError
19 LOG = logging.getLogger()
21 _MIGRATION_FUNCTIONS = []
23 def migrate(config, paths):
24 """ Check for the current database version and execute migrations,
27 with connect(config.get_libpq_dsn()) as conn:
28 if conn.table_exists('nominatim_properties'):
29 db_version_str = properties.get_property(conn, 'database_version')
33 if db_version_str is not None:
34 parts = db_version_str.split('.')
35 db_version = tuple(int(x) for x in parts[:2] + parts[2].split('-'))
37 if db_version == NOMINATIM_VERSION:
38 LOG.warning("Database already at latest version (%s)", db_version_str)
41 LOG.info("Detected database version: %s", db_version_str)
43 db_version = _guess_version(conn)
46 has_run_migration = False
47 for version, func in _MIGRATION_FUNCTIONS:
48 if db_version <= version:
49 LOG.warning("Runnning: %s (%s)", func.__doc__.split('\n', 1)[0],
50 '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(version))
51 kwargs = dict(conn=conn, config=config, paths=paths)
54 has_run_migration = True
57 LOG.warning('Updating SQL functions.')
58 refresh.create_functions(conn, config)
59 tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
60 tokenizer.update_sql_functions(config)
62 properties.set_property(conn, 'database_version',
63 '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
70 def _guess_version(conn):
71 """ Guess a database version when there is no property table yet.
72 Only migrations for 3.6 and later are supported, so bail out
73 when the version seems older.
75 with conn.cursor() as cur:
76 # In version 3.6, the country_name table was updated. Check for that.
77 cnt = cur.scalar("""SELECT count(*) FROM
78 (SELECT svals(name) FROM country_name
79 WHERE country_code = 'gb')x;
82 LOG.fatal('It looks like your database was imported with a version '
83 'prior to 3.6.0. Automatic migration not possible.')
84 raise UsageError('Migration not possible.')
90 def _migration(major, minor, patch=0, dbpatch=0):
91 """ Decorator for a single migration step. The parameters describe the
92 version after which the migration is applicable, i.e before changing
93 from the given version to the next, the migration is required.
95 All migrations are run in the order in which they are defined in this
96 file. Do not run global SQL scripts for migrations as you cannot be sure
97 that these scripts do the same in later versions.
99 Functions will always be reimported in full at the end of the migration
100 process, so the migration functions may leave a temporary state behind
104 _MIGRATION_FUNCTIONS.append(((major, minor, patch, dbpatch), func))
110 @_migration(3, 5, 0, 99)
111 def import_status_timestamp_change(conn, **_):
112 """ Add timezone to timestamp in status table.
114 The import_status table has been changed to include timezone information
117 with conn.cursor() as cur:
118 cur.execute("""ALTER TABLE import_status ALTER COLUMN lastimportdate
119 TYPE timestamp with time zone;""")
122 @_migration(3, 5, 0, 99)
123 def add_nominatim_property_table(conn, config, **_):
124 """ Add nominatim_property table.
126 if not conn.table_exists('nominatim_properties'):
127 with conn.cursor() as cur:
128 cur.execute("""CREATE TABLE nominatim_properties (
131 GRANT SELECT ON TABLE nominatim_properties TO "{}";
132 """.format(config.DATABASE_WEBUSER))
134 @_migration(3, 6, 0, 0)
135 def change_housenumber_transliteration(conn, **_):
136 """ Transliterate housenumbers.
138 The database schema switched from saving raw housenumbers in
139 placex.housenumber to saving transliterated ones.
141 Note: the function create_housenumber_id() has been dropped in later
144 with conn.cursor() as cur:
145 cur.execute("""CREATE OR REPLACE FUNCTION create_housenumber_id(housenumber TEXT)
150 SELECT array_to_string(array_agg(trans), ';')
152 FROM (SELECT lookup_word as trans,
153 getorcreate_housenumber_id(lookup_word)
154 FROM (SELECT make_standard_name(h) as lookup_word
155 FROM regexp_split_to_table(housenumber, '[,;]') h) x) y;
158 $$ LANGUAGE plpgsql STABLE STRICT;""")
159 cur.execute("DELETE FROM word WHERE class = 'place' and type = 'house'")
160 cur.execute("""UPDATE placex
161 SET housenumber = create_housenumber_id(housenumber)
162 WHERE housenumber is not null""")
165 @_migration(3, 7, 0, 0)
166 def switch_placenode_geometry_index(conn, **_):
167 """ Replace idx_placex_geometry_reverse_placeNode index.
169 Make the index slightly more permissive, so that it can also be used
170 when matching up boundaries and place nodes. It makes the index
171 idx_placex_adminname index unnecessary.
173 with conn.cursor() as cur:
174 cur.execute(""" CREATE INDEX IF NOT EXISTS idx_placex_geometry_placenode ON placex
175 USING GIST (geometry)
176 WHERE osm_type = 'N' and rank_search < 26
177 and class = 'place' and type != 'postcode'
178 and linked_place_id is null""")
179 cur.execute(""" DROP INDEX IF EXISTS idx_placex_adminname """)
182 @_migration(3, 7, 0, 1)
183 def install_legacy_tokenizer(conn, config, **_):
184 """ Setup legacy tokenizer.
186 If no other tokenizer has been configured yet, then create the
187 configuration for the backwards-compatible legacy tokenizer
189 if properties.get_property(conn, 'tokenizer') is None:
190 with conn.cursor() as cur:
191 for table in ('placex', 'location_property_osmline'):
192 has_column = cur.scalar("""SELECT count(*) FROM information_schema.columns
193 WHERE table_name = %s
194 and column_name = 'token_info'""",
197 cur.execute('ALTER TABLE {} ADD COLUMN token_info JSONB'.format(table))
198 tokenizer = tokenizer_factory.create_tokenizer(config, init_db=False,
199 module_name='legacy')
201 tokenizer.migrate_database(config)
204 @_migration(4, 0, 99, 0)
205 def create_tiger_housenumber_index(conn, **_):
206 """ Create idx_location_property_tiger_parent_place_id with included
209 The inclusion is needed for efficient lookup of housenumbers in
210 full address searches.
212 if conn.server_version_tuple() >= (11, 0, 0):
213 with conn.cursor() as cur:
214 cur.execute(""" CREATE INDEX IF NOT EXISTS
215 idx_location_property_tiger_housenumber_migrated
216 ON location_property_tiger
217 USING btree(parent_place_id)
218 INCLUDE (startnumber, endnumber) """)
221 @_migration(4, 0, 99, 1)
222 def create_interpolation_index_on_place(conn, **_):
223 """ Create idx_place_interpolations for lookup of interpolation lines
226 with conn.cursor() as cur:
227 cur.execute("""CREATE INDEX IF NOT EXISTS idx_place_interpolations
228 ON place USING gist(geometry)
229 WHERE osm_type = 'W' and address ? 'interpolation'""")
232 @_migration(4, 0, 99, 2)
233 def add_step_column_for_interpolation(conn, **_):
234 """ Add a new column 'step' to the interpolations table.
236 Also convers the data into the stricter format which requires that
237 startnumbers comply with the odd/even requirements.
239 if conn.table_has_column('location_property_osmline', 'step'):
242 with conn.cursor() as cur:
243 # Mark invalid all interpolations with no intermediate numbers.
244 cur.execute("""UPDATE location_property_osmline SET startnumber = null
245 WHERE endnumber - startnumber <= 1 """)
246 # Align the start numbers where odd/even does not match.
247 cur.execute("""UPDATE location_property_osmline
248 SET startnumber = startnumber + 1,
249 linegeo = ST_LineSubString(linegeo,
250 1.0 / (endnumber - startnumber)::float,
252 WHERE (interpolationtype = 'odd' and startnumber % 2 = 0)
253 or (interpolationtype = 'even' and startnumber % 2 = 1)
255 # Mark invalid odd/even interpolations with no intermediate numbers.
256 cur.execute("""UPDATE location_property_osmline SET startnumber = null
257 WHERE interpolationtype in ('odd', 'even')
258 and endnumber - startnumber = 2""")
259 # Finally add the new column and populate it.
260 cur.execute("ALTER TABLE location_property_osmline ADD COLUMN step SMALLINT")
261 cur.execute("""UPDATE location_property_osmline
262 SET step = CASE WHEN interpolationtype = 'all'
267 @_migration(4, 0, 99, 3)
268 def add_step_column_for_tiger(conn, **_):
269 """ Add a new column 'step' to the tiger data table.
271 if conn.table_has_column('location_property_tiger', 'step'):
274 with conn.cursor() as cur:
275 cur.execute("ALTER TABLE location_property_tiger ADD COLUMN step SMALLINT")
276 cur.execute("""UPDATE location_property_tiger
277 SET step = CASE WHEN interpolationtype = 'all'
282 @_migration(4, 0, 99, 4)
283 def add_derived_name_column_for_country_names(conn, **_):
284 """ Add a new column 'derived_name' which in the future takes the
285 country names as imported from OSM data.
287 if not conn.table_has_column('country_name', 'derived_name'):
288 with conn.cursor() as cur:
289 cur.execute("ALTER TABLE country_name ADD COLUMN derived_name public.HSTORE")
292 @_migration(4, 0, 99, 5)
293 def mark_internal_country_names(conn, config, **_):
294 """ Names from the country table should be marked as internal to prevent
295 them from being deleted. Only necessary for ICU tokenizer.
297 import psycopg2.extras # pylint: disable=import-outside-toplevel
299 tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
300 with tokenizer.name_analyzer() as analyzer:
301 with conn.cursor() as cur:
302 psycopg2.extras.register_hstore(cur)
303 cur.execute("SELECT country_code, name FROM country_name")
305 for country_code, names in cur:
308 names['countrycode'] = country_code
309 analyzer.add_country_names(country_code, names)