1 # SPDX-License-Identifier: GPL-3.0-or-later
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2025 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Collector for BDD import acceptance tests.
10 These tests check the Nominatim import chain after the osm2pgsql import.
18 from pytest_bdd import scenarios, when, then, given
19 from pytest_bdd.parsers import re as step_parse
21 from utils.place_inserter import PlaceColumn
22 from utils.checks import check_table_content
24 from nominatim_db.config import Configuration
25 from nominatim_db import cli
26 from nominatim_db.tools.database_import import load_data, create_table_triggers
27 from nominatim_db.tools.postcodes import update_postcodes
28 from nominatim_db.tokenizer import factory as tokenizer_factory
31 def _rewrite_placeid_field(field, new_field, datatable, place_ids):
33 oidx = datatable[0].index(field)
34 datatable[0][oidx] = new_field
35 for line in datatable[1:]:
36 line[oidx] = None if line[oidx] == '-' else place_ids[line[oidx]]
41 def _collect_place_ids(conn):
43 with conn.cursor() as cur:
44 for row in cur.execute('SELECT place_id, osm_type, osm_id, class FROM placex'):
45 pids[f"{row[1]}{row[2]}"] = row[0]
46 pids[f"{row[1]}{row[2]}:{row[3]}"] = row[0]
52 def test_config_env(pytestconfig):
53 dbname = pytestconfig.getini('nominatim_test_db')
55 config = Configuration(None).get_os_env()
56 config['NOMINATIM_DATABASE_DSN'] = f"pgsql:dbname={dbname}"
57 config['NOMINATIM_LANGUAGES'] = 'en,de,fr,ja'
58 config['NOMINATIM_USE_US_TIGER_DATA'] = 'yes'
59 if pytestconfig.option.NOMINATIM_TOKENIZER is not None:
60 config['NOMINATIM_TOKENIZER'] = pytestconfig.option.NOMINATIM_TOKENIZER
66 def update_config(def_config):
67 """ Prepare the database for being updatable and return the config.
69 cli.nominatim(['refresh', '--functions'], def_config.environ)
74 @given(step_parse('the (?P<named>named )?places'), target_fixture=None)
75 def import_places(db_conn, named, datatable, node_grid):
76 """ Insert todo rows into the place table.
77 When 'named' is given, then a random name will be generated for all
80 with db_conn.cursor() as cur:
81 for row in datatable[1:]:
82 PlaceColumn(node_grid).add_row(datatable[0], row, named is not None).db_insert(cur)
85 @given('the ways', target_fixture=None)
86 def import_ways(db_conn, datatable):
87 """ Import raw ways into the osm2pgsql way middle table.
89 with db_conn.cursor() as cur:
90 id_idx = datatable[0].index('id')
91 node_idx = datatable[0].index('nodes')
92 for line in datatable[1:]:
93 tags = psycopg.types.json.Json(
94 {k[5:]: v for k, v in zip(datatable[0], line)
95 if k.startswith("tags+")})
96 nodes = [int(x) for x in line[node_idx].split(',')]
98 cur.execute("INSERT INTO planet_osm_ways (id, nodes, tags) VALUES (%s, %s, %s)",
99 (line[id_idx], nodes, tags))
102 @given('the relations', target_fixture=None)
103 def import_rels(db_conn, datatable):
104 """ Import raw relations into the osm2pgsql relation middle table.
106 with db_conn.cursor() as cur:
107 id_idx = datatable[0].index('id')
108 memb_idx = datatable[0].index('members')
109 for line in datatable[1:]:
110 tags = psycopg.types.json.Json(
111 {k[5:]: v for k, v in zip(datatable[0], line)
112 if k.startswith("tags+")})
115 for member in line[memb_idx].split(','):
116 m = re.fullmatch(r'\s*([RWN])(\d+)(?::(\S+))?\s*', member)
118 raise ValueError(f'Illegal member {member}.')
119 members.append({'ref': int(m[2]), 'role': m[3] or '', 'type': m[1]})
121 cur.execute('INSERT INTO planet_osm_rels (id, tags, members) VALUES (%s, %s, %s)',
122 (int(line[id_idx]), tags, psycopg.types.json.Json(members)))
125 @when('importing', target_fixture='place_ids')
126 def do_import(db_conn, def_config):
127 """ Run a reduced version of the Nominatim import.
129 create_table_triggers(db_conn, def_config)
130 asyncio.run(load_data(def_config.get_libpq_dsn(), 1))
131 tokenizer = tokenizer_factory.get_tokenizer_for_db(def_config)
132 update_postcodes(def_config.get_libpq_dsn(), None, tokenizer)
133 cli.nominatim(['index', '-q'], def_config.environ)
135 return _collect_place_ids(db_conn)
138 @when('updating places', target_fixture='place_ids')
139 def do_update(db_conn, update_config, node_grid, datatable):
140 """ Update the place table with the given data. Also runs all triggers
141 related to updates and reindexes the new data.
143 with db_conn.cursor() as cur:
144 for row in datatable[1:]:
145 PlaceColumn(node_grid).add_row(datatable[0], row, False).db_insert(cur)
146 cur.execute('SELECT flush_deleted_places()')
149 cli.nominatim(['index', '-q'], update_config.environ)
151 return _collect_place_ids(db_conn)
154 @when('updating postcodes')
155 def do_postcode_update(update_config):
156 """ Recompute the postcode centroids.
158 cli.nominatim(['refresh', '--postcodes'], update_config.environ)
161 @when(step_parse(r'marking for delete (?P<otype>[NRW])(?P<oid>\d+)'),
162 converters={'oid': int})
163 def do_delete_place(db_conn, update_config, node_grid, otype, oid):
164 """ Remove the given place from the database.
166 with db_conn.cursor() as cur:
167 cur.execute('TRUNCATE place_to_be_deleted')
168 cur.execute('DELETE FROM place WHERE osm_type = %s and osm_id = %s',
170 cur.execute('SELECT flush_deleted_places()')
173 cli.nominatim(['index', '-q'], update_config.environ)
176 @then(step_parse(r'(?P<table>\w+) contains(?P<exact> exactly)?'))
177 def then_check_table_content(db_conn, place_ids, datatable, node_grid, table, exact):
178 _rewrite_placeid_field('object', 'place_id', datatable, place_ids)
179 _rewrite_placeid_field('parent_place_id', 'parent_place_id', datatable, place_ids)
180 _rewrite_placeid_field('linked_place_id', 'linked_place_id', datatable, place_ids)
181 if table == 'place_addressline':
182 _rewrite_placeid_field('address', 'address_place_id', datatable, place_ids)
184 for i, title in enumerate(datatable[0]):
185 if title.startswith('addr+'):
186 datatable[0][i] = f"address+{title[5:]}"
188 check_table_content(db_conn, table, datatable, grid=node_grid, exact=bool(exact))
191 @then(step_parse(r'(DISABLED?P<table>placex?) has no entry for (?P<oid>[NRW]\d+(?::\S+)?)'))
192 def then_check_place_missing_lines(db_conn, place_ids, table, oid):
193 assert oid in place_ids
195 sql = pysql.SQL("""SELECT count(*) FROM {}
196 WHERE place_id = %s""").format(pysql.Identifier(tablename))
198 with conn.cursor(row_factory=tuple_row) as cur:
199 assert cur.execute(sql, [place_ids[oid]]).fetchone()[0] == 0
202 @then(step_parse(r'W(?P<oid>\d+) expands to interpolation'),
203 converters={'oid': int})
204 def then_check_interpolation_table(db_conn, node_grid, place_ids, oid, datatable):
205 with db_conn.cursor() as cur:
206 cur.execute('SELECT count(*) FROM location_property_osmline WHERE osm_id = %s',
208 assert cur.fetchone()[0] == len(datatable) - 1
210 converted = [['osm_id', 'startnumber', 'endnumber', 'linegeo!wkt']]
211 start_idx = datatable[0].index('start') if 'start' in datatable[0] else None
212 end_idx = datatable[0].index('end') if 'end' in datatable[0] else None
213 geom_idx = datatable[0].index('geometry') if 'geometry' in datatable[0] else None
214 converted = [['osm_id']]
215 for val, col in zip((start_idx, end_idx, geom_idx),
216 ('startnumber', 'endnumber', 'linegeo!wkt')):
218 converted[0].append(col)
220 for line in datatable[1:]:
222 for val in (start_idx, end_idx):
224 convline.append(line[val])
225 if geom_idx is not None:
226 convline.append(line[geom_idx])
227 converted.append(convline)
229 _rewrite_placeid_field('parent_place_id', 'parent_place_id', converted, place_ids)
231 check_table_content(db_conn, 'location_property_osmline', converted, grid=node_grid)
234 @then(step_parse(r'W(?P<oid>\d+) expands to no interpolation'),
235 converters={'oid': int})
236 def then_check_interpolation_table_negative(db_conn, oid):
237 with db_conn.cursor() as cur:
238 cur.execute("""SELECT count(*) FROM location_property_osmline
239 WHERE osm_id = %s and startnumber is not null""",
241 assert cur.fetchone()[0] == 0
244 scenarios('features/db')