1 # SPDX-License-Identifier: GPL-2.0-only
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2022 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Functions for database migration to newer software versions.
10 from typing import List, Tuple, Callable, Any
13 from psycopg2 import sql as pysql
15 from nominatim.config import Configuration
16 from nominatim.db import properties
17 from nominatim.db.connection import connect, Connection
18 from nominatim.version import NominatimVersion, NOMINATIM_VERSION, parse_version
19 from nominatim.tools import refresh
20 from nominatim.tokenizer import factory as tokenizer_factory
21 from nominatim.errors import UsageError
23 LOG = logging.getLogger()
25 _MIGRATION_FUNCTIONS : List[Tuple[NominatimVersion, Callable[..., None]]] = []
27 def migrate(config: Configuration, paths: Any) -> int:
28 """ Check for the current database version and execute migrations,
31 with connect(config.get_libpq_dsn()) as conn:
32 if conn.table_exists('nominatim_properties'):
33 db_version_str = properties.get_property(conn, 'database_version')
37 if db_version_str is not None:
38 db_version = parse_version(db_version_str)
40 if db_version == NOMINATIM_VERSION:
41 LOG.warning("Database already at latest version (%s)", db_version_str)
44 LOG.info("Detected database version: %s", db_version_str)
46 db_version = _guess_version(conn)
49 has_run_migration = False
50 for version, func in _MIGRATION_FUNCTIONS:
51 if db_version <= version:
52 title = func.__doc__ or ''
53 LOG.warning("Running: %s (%s)", title.split('\n', 1)[0], version)
54 kwargs = dict(conn=conn, config=config, paths=paths)
57 has_run_migration = True
60 LOG.warning('Updating SQL functions.')
61 refresh.create_functions(conn, config)
62 tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
63 tokenizer.update_sql_functions(config)
65 properties.set_property(conn, 'database_version', str(NOMINATIM_VERSION))
72 def _guess_version(conn: Connection) -> NominatimVersion:
73 """ Guess a database version when there is no property table yet.
74 Only migrations for 3.6 and later are supported, so bail out
75 when the version seems older.
77 with conn.cursor() as cur:
78 # In version 3.6, the country_name table was updated. Check for that.
79 cnt = cur.scalar("""SELECT count(*) FROM
80 (SELECT svals(name) FROM country_name
81 WHERE country_code = 'gb')x;
84 LOG.fatal('It looks like your database was imported with a version '
85 'prior to 3.6.0. Automatic migration not possible.')
86 raise UsageError('Migration not possible.')
88 return NominatimVersion(3, 5, 0, 99)
92 def _migration(major: int, minor: int, patch: int = 0,
93 dbpatch: int = 0) -> Callable[[Callable[..., None]], Callable[..., None]]:
94 """ Decorator for a single migration step. The parameters describe the
95 version after which the migration is applicable, i.e before changing
96 from the given version to the next, the migration is required.
98 All migrations are run in the order in which they are defined in this
99 file. Do not run global SQL scripts for migrations as you cannot be sure
100 that these scripts do the same in later versions.
102 Functions will always be reimported in full at the end of the migration
103 process, so the migration functions may leave a temporary state behind
106 def decorator(func: Callable[..., None]) -> Callable[..., None]:
107 version = NominatimVersion(major, minor, patch, dbpatch)
108 _MIGRATION_FUNCTIONS.append((version, func))
114 @_migration(3, 5, 0, 99)
115 def import_status_timestamp_change(conn: Connection, **_: Any) -> None:
116 """ Add timezone to timestamp in status table.
118 The import_status table has been changed to include timezone information
121 with conn.cursor() as cur:
122 cur.execute("""ALTER TABLE import_status ALTER COLUMN lastimportdate
123 TYPE timestamp with time zone;""")
126 @_migration(3, 5, 0, 99)
127 def add_nominatim_property_table(conn: Connection, config: Configuration, **_: Any) -> None:
128 """ Add nominatim_property table.
130 if not conn.table_exists('nominatim_properties'):
131 with conn.cursor() as cur:
132 cur.execute(pysql.SQL("""CREATE TABLE nominatim_properties (
135 GRANT SELECT ON TABLE nominatim_properties TO {};
136 """).format(pysql.Identifier(config.DATABASE_WEBUSER)))
138 @_migration(3, 6, 0, 0)
139 def change_housenumber_transliteration(conn: Connection, **_: Any) -> None:
140 """ Transliterate housenumbers.
142 The database schema switched from saving raw housenumbers in
143 placex.housenumber to saving transliterated ones.
145 Note: the function create_housenumber_id() has been dropped in later
148 with conn.cursor() as cur:
149 cur.execute("""CREATE OR REPLACE FUNCTION create_housenumber_id(housenumber TEXT)
154 SELECT array_to_string(array_agg(trans), ';')
156 FROM (SELECT lookup_word as trans,
157 getorcreate_housenumber_id(lookup_word)
158 FROM (SELECT make_standard_name(h) as lookup_word
159 FROM regexp_split_to_table(housenumber, '[,;]') h) x) y;
162 $$ LANGUAGE plpgsql STABLE STRICT;""")
163 cur.execute("DELETE FROM word WHERE class = 'place' and type = 'house'")
164 cur.execute("""UPDATE placex
165 SET housenumber = create_housenumber_id(housenumber)
166 WHERE housenumber is not null""")
169 @_migration(3, 7, 0, 0)
170 def switch_placenode_geometry_index(conn: Connection, **_: Any) -> None:
171 """ Replace idx_placex_geometry_reverse_placeNode index.
173 Make the index slightly more permissive, so that it can also be used
174 when matching up boundaries and place nodes. It makes the index
175 idx_placex_adminname index unnecessary.
177 with conn.cursor() as cur:
178 cur.execute(""" CREATE INDEX IF NOT EXISTS idx_placex_geometry_placenode ON placex
179 USING GIST (geometry)
180 WHERE osm_type = 'N' and rank_search < 26
181 and class = 'place' and type != 'postcode'
182 and linked_place_id is null""")
183 cur.execute(""" DROP INDEX IF EXISTS idx_placex_adminname """)
186 @_migration(3, 7, 0, 1)
187 def install_legacy_tokenizer(conn: Connection, config: Configuration, **_: Any) -> None:
188 """ Setup legacy tokenizer.
190 If no other tokenizer has been configured yet, then create the
191 configuration for the backwards-compatible legacy tokenizer
193 if properties.get_property(conn, 'tokenizer') is None:
194 with conn.cursor() as cur:
195 for table in ('placex', 'location_property_osmline'):
196 has_column = cur.scalar("""SELECT count(*) FROM information_schema.columns
197 WHERE table_name = %s
198 and column_name = 'token_info'""",
201 cur.execute(pysql.SQL('ALTER TABLE {} ADD COLUMN token_info JSONB')
202 .format(pysql.Identifier(table)))
203 tokenizer = tokenizer_factory.create_tokenizer(config, init_db=False,
204 module_name='legacy')
206 tokenizer.migrate_database(config) # type: ignore[attr-defined]
209 @_migration(4, 0, 99, 0)
210 def create_tiger_housenumber_index(conn: Connection, **_: Any) -> None:
211 """ Create idx_location_property_tiger_parent_place_id with included
214 The inclusion is needed for efficient lookup of housenumbers in
215 full address searches.
217 if conn.server_version_tuple() >= (11, 0, 0):
218 with conn.cursor() as cur:
219 cur.execute(""" CREATE INDEX IF NOT EXISTS
220 idx_location_property_tiger_housenumber_migrated
221 ON location_property_tiger
222 USING btree(parent_place_id)
223 INCLUDE (startnumber, endnumber) """)
226 @_migration(4, 0, 99, 1)
227 def create_interpolation_index_on_place(conn: Connection, **_: Any) -> None:
228 """ Create idx_place_interpolations for lookup of interpolation lines
231 with conn.cursor() as cur:
232 cur.execute("""CREATE INDEX IF NOT EXISTS idx_place_interpolations
233 ON place USING gist(geometry)
234 WHERE osm_type = 'W' and address ? 'interpolation'""")
237 @_migration(4, 0, 99, 2)
238 def add_step_column_for_interpolation(conn: Connection, **_: Any) -> None:
239 """ Add a new column 'step' to the interpolations table.
241 Also converts the data into the stricter format which requires that
242 startnumbers comply with the odd/even requirements.
244 if conn.table_has_column('location_property_osmline', 'step'):
247 with conn.cursor() as cur:
248 # Mark invalid all interpolations with no intermediate numbers.
249 cur.execute("""UPDATE location_property_osmline SET startnumber = null
250 WHERE endnumber - startnumber <= 1 """)
251 # Align the start numbers where odd/even does not match.
252 cur.execute("""UPDATE location_property_osmline
253 SET startnumber = startnumber + 1,
254 linegeo = ST_LineSubString(linegeo,
255 1.0 / (endnumber - startnumber)::float,
257 WHERE (interpolationtype = 'odd' and startnumber % 2 = 0)
258 or (interpolationtype = 'even' and startnumber % 2 = 1)
260 # Mark invalid odd/even interpolations with no intermediate numbers.
261 cur.execute("""UPDATE location_property_osmline SET startnumber = null
262 WHERE interpolationtype in ('odd', 'even')
263 and endnumber - startnumber = 2""")
264 # Finally add the new column and populate it.
265 cur.execute("ALTER TABLE location_property_osmline ADD COLUMN step SMALLINT")
266 cur.execute("""UPDATE location_property_osmline
267 SET step = CASE WHEN interpolationtype = 'all'
272 @_migration(4, 0, 99, 3)
273 def add_step_column_for_tiger(conn: Connection, **_: Any) -> None:
274 """ Add a new column 'step' to the tiger data table.
276 if conn.table_has_column('location_property_tiger', 'step'):
279 with conn.cursor() as cur:
280 cur.execute("ALTER TABLE location_property_tiger ADD COLUMN step SMALLINT")
281 cur.execute("""UPDATE location_property_tiger
282 SET step = CASE WHEN interpolationtype = 'all'
287 @_migration(4, 0, 99, 4)
288 def add_derived_name_column_for_country_names(conn: Connection, **_: Any) -> None:
289 """ Add a new column 'derived_name' which in the future takes the
290 country names as imported from OSM data.
292 if not conn.table_has_column('country_name', 'derived_name'):
293 with conn.cursor() as cur:
294 cur.execute("ALTER TABLE country_name ADD COLUMN derived_name public.HSTORE")
297 @_migration(4, 0, 99, 5)
298 def mark_internal_country_names(conn: Connection, config: Configuration, **_: Any) -> None:
299 """ Names from the country table should be marked as internal to prevent
300 them from being deleted. Only necessary for ICU tokenizer.
302 import psycopg2.extras # pylint: disable=import-outside-toplevel
304 tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
305 with tokenizer.name_analyzer() as analyzer:
306 with conn.cursor() as cur:
307 psycopg2.extras.register_hstore(cur)
308 cur.execute("SELECT country_code, name FROM country_name")
310 for country_code, names in cur:
313 names['countrycode'] = country_code
314 analyzer.add_country_names(country_code, names)
317 @_migration(4, 1, 99, 0)
318 def add_place_deletion_todo_table(conn: Connection, **_: Any) -> None:
319 """ Add helper table for deleting data on updates.
321 The table is only necessary when updates are possible, i.e.
322 the database is not in freeze mode.
324 if conn.table_exists('place'):
325 with conn.cursor() as cur:
326 cur.execute("""CREATE TABLE IF NOT EXISTS place_to_be_deleted (
331 deferred BOOLEAN)""")
334 @_migration(4, 1, 99, 1)
335 def split_pending_index(conn: Connection, **_: Any) -> None:
336 """ Reorganise indexes for pending updates.
338 if conn.table_exists('place'):
339 with conn.cursor() as cur:
340 cur.execute("""CREATE INDEX IF NOT EXISTS idx_placex_rank_address_sector
341 ON placex USING BTREE (rank_address, geometry_sector)
342 WHERE indexed_status > 0""")
343 cur.execute("""CREATE INDEX IF NOT EXISTS idx_placex_rank_boundaries_sector
344 ON placex USING BTREE (rank_search, geometry_sector)
345 WHERE class = 'boundary' and type = 'administrative'
346 and indexed_status > 0""")
347 cur.execute("DROP INDEX IF EXISTS idx_placex_pendingsector")
350 @_migration(4, 2, 99, 0)
351 def enable_forward_dependencies(conn: Connection, **_: Any) -> None:
352 """ Create indexes for updates with forward dependency tracking (long-running).
354 if conn.table_exists('planet_osm_ways'):
355 with conn.cursor() as cur:
356 cur.execute("""SELECT * FROM pg_indexes
357 WHERE tablename = 'planet_osm_ways'
358 and indexdef LIKE '%nodes%'""")
359 if cur.rowcount == 0:
360 cur.execute("""CREATE OR REPLACE FUNCTION public.planet_osm_index_bucket(bigint[])
362 LANGUAGE sql IMMUTABLE
364 SELECT ARRAY(SELECT DISTINCT unnest($1) >> 5)
366 cur.execute("""CREATE INDEX planet_osm_ways_nodes_bucket_idx
368 USING gin (planet_osm_index_bucket(nodes))
369 WITH (fastupdate=off)""")
370 cur.execute("""CREATE INDEX planet_osm_rels_parts_idx
371 ON planet_osm_rels USING gin (parts)
372 WITH (fastupdate=off)""")
373 cur.execute("ANALYZE planet_osm_ways")