X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/118858a55e5ec522d870842532d26ff0276c85ba..3a8c5df82d95b2b7346718e972b0d49b82514336:/test/bdd/steps/steps_db_ops.py diff --git a/test/bdd/steps/steps_db_ops.py b/test/bdd/steps/steps_db_ops.py index d1f27235..441198fd 100644 --- a/test/bdd/steps/steps_db_ops.py +++ b/test/bdd/steps/steps_db_ops.py @@ -1,3 +1,9 @@ +# SPDX-License-Identifier: GPL-3.0-or-later +# +# This file is part of Nominatim. (https://nominatim.org) +# +# Copyright (C) 2024 by the Nominatim developer community. +# For a full list of authors see the git log. import logging from itertools import chain @@ -6,19 +12,25 @@ import psycopg2.extras from place_inserter import PlaceColumn from table_compare import NominatimID, DBRow -from nominatim.indexer import indexer -from nominatim.tokenizer import factory as tokenizer_factory +from nominatim_db.indexer import indexer +from nominatim_db.tokenizer import factory as tokenizer_factory def check_database_integrity(context): """ Check some generic constraints on the tables. """ - # place_addressline should not have duplicate (place_id, address_place_id) - cur = context.db.cursor() - cur.execute("""SELECT count(*) FROM - (SELECT place_id, address_place_id, count(*) as c - FROM place_addressline GROUP BY place_id, address_place_id) x - WHERE c > 1""") - assert cur.fetchone()[0] == 0, "Duplicates found in place_addressline" + with context.db.cursor() as cur: + # place_addressline should not have duplicate (place_id, address_place_id) + cur.execute("""SELECT count(*) FROM + (SELECT place_id, address_place_id, count(*) as c + FROM place_addressline GROUP BY place_id, address_place_id) x + WHERE c > 1""") + assert cur.fetchone()[0] == 0, "Duplicates found in place_addressline" + + # word table must not have empty word_tokens + if context.nominatim.tokenizer != 'legacy': + cur.execute("SELECT count(*) FROM word WHERE word_token = ''") + assert cur.fetchone()[0] == 0, "Empty word tokens found in word table" + ################################ GIVEN ################################## @@ -40,33 +52,52 @@ def add_data_to_planet_relations(context): for tests on data that looks up members. """ with context.db.cursor() as cur: - for r in context.table: - last_node = 0 - last_way = 0 - parts = [] - if r['members']: - members = [] - for m in r['members'].split(','): - mid = NominatimID(m) - if mid.typ == 'N': - parts.insert(last_node, int(mid.oid)) - last_node += 1 - last_way += 1 - elif mid.typ == 'W': - parts.insert(last_way, int(mid.oid)) - last_way += 1 - else: - parts.append(int(mid.oid)) - - members.extend((mid.typ.lower() + mid.oid, mid.cls or '')) - else: - members = None - - tags = chain.from_iterable([(h[5:], r[h]) for h in r.headings if h.startswith("tags+")]) - - cur.execute("""INSERT INTO planet_osm_rels (id, way_off, rel_off, parts, members, tags) - VALUES (%s, %s, %s, %s, %s, %s)""", - (r['id'], last_node, last_way, parts, members, list(tags))) + cur.execute("SELECT value FROM osm2pgsql_properties WHERE property = 'db_format'") + row = cur.fetchone() + if row is None or row[0] == '1': + for r in context.table: + last_node = 0 + last_way = 0 + parts = [] + if r['members']: + members = [] + for m in r['members'].split(','): + mid = NominatimID(m) + if mid.typ == 'N': + parts.insert(last_node, int(mid.oid)) + last_node += 1 + last_way += 1 + elif mid.typ == 'W': + parts.insert(last_way, int(mid.oid)) + last_way += 1 + else: + parts.append(int(mid.oid)) + + members.extend((mid.typ.lower() + mid.oid, mid.cls or '')) + else: + members = None + + tags = chain.from_iterable([(h[5:], r[h]) for h in r.headings if h.startswith("tags+")]) + + cur.execute("""INSERT INTO planet_osm_rels (id, way_off, rel_off, parts, members, tags) + VALUES (%s, %s, %s, %s, %s, %s)""", + (r['id'], last_node, last_way, parts, members, list(tags))) + else: + for r in context.table: + if r['members']: + members = [] + for m in r['members'].split(','): + mid = NominatimID(m) + members.append({'ref': mid.oid, 'role': mid.cls or '', 'type': mid.typ}) + else: + members = [] + + tags = {h[5:]: r[h] for h in r.headings if h.startswith("tags+")} + + cur.execute("""INSERT INTO planet_osm_rels (id, tags, members) + VALUES (%s, %s, %s)""", + (r['id'], psycopg2.extras.Json(tags), + psycopg2.extras.Json(members))) @given("the ways") def add_data_to_planet_ways(context): @@ -74,12 +105,19 @@ def add_data_to_planet_ways(context): tests on that that looks up node ids in this table. """ with context.db.cursor() as cur: + cur.execute("SELECT value FROM osm2pgsql_properties WHERE property = 'db_format'") + row = cur.fetchone() + json_tags = row is not None and row[0] != '1' for r in context.table: - tags = chain.from_iterable([(h[5:], r[h]) for h in r.headings if h.startswith("tags+")]) + if json_tags: + tags = psycopg2.extras.Json({h[5:]: r[h] for h in r.headings if h.startswith("tags+")}) + else: + tags = list(chain.from_iterable([(h[5:], r[h]) + for h in r.headings if h.startswith("tags+")])) nodes = [ int(x.strip()) for x in r['nodes'].split(',') ] cur.execute("INSERT INTO planet_osm_ways (id, nodes, tags) VALUES (%s, %s, %s)", - (r['id'], nodes, list(tags))) + (r['id'], nodes, tags)) ################################ WHEN ################################## @@ -87,32 +125,17 @@ def add_data_to_planet_ways(context): def import_and_index_data_from_place_table(context): """ Import data previously set up in the place table. """ - nctx = context.nominatim - - tokenizer = tokenizer_factory.create_tokenizer(nctx.get_test_config()) - context.nominatim.copy_from_place(context.db) - - # XXX use tool function as soon as it is ported - with context.db.cursor() as cur: - with (context.nominatim.src_dir / 'lib-sql' / 'postcode_tables.sql').open('r') as fd: - cur.execute(fd.read()) - cur.execute(""" - INSERT INTO location_postcode - (place_id, indexed_status, country_code, postcode, geometry) - SELECT nextval('seq_place'), 1, country_code, - upper(trim (both ' ' from address->'postcode')) as pc, - ST_Centroid(ST_Collect(ST_Centroid(geometry))) - FROM placex - WHERE address ? 'postcode' AND address->'postcode' NOT SIMILAR TO '%(,|;)%' - AND geometry IS NOT null - GROUP BY country_code, pc""") - - # Call directly as the refresh function does not include postcodes. - indexer.LOG.setLevel(logging.ERROR) - indexer.Indexer(context.nominatim.get_libpq_dsn(), tokenizer, 1).index_full(analyse=False) + context.nominatim.run_nominatim('import', '--continue', 'load-data', + '--index-noanalyse', '-q', + '--offline') check_database_integrity(context) + # Remove the output of the input, when all was right. Otherwise it will be + # output when there are errors that had nothing to do with the import + # itself. + context.log_capture.buffer.clear() + @when("updating places") def update_place_table(context): """ Update the place table with the given data. Also runs all triggers @@ -121,11 +144,20 @@ def update_place_table(context): context.nominatim.run_nominatim('refresh', '--functions') with context.db.cursor() as cur: for row in context.table: - PlaceColumn(context).add_row(row, False).db_insert(cur) + col = PlaceColumn(context).add_row(row, False) + col.db_delete(cur) + col.db_insert(cur) + cur.execute('SELECT flush_deleted_places()') context.nominatim.reindex_placex(context.db) check_database_integrity(context) + # Remove the output of the input, when all was right. Otherwise it will be + # output when there are errors that had nothing to do with the import + # itself. + context.log_capture.buffer.clear() + + @when("updating postcodes") def update_postcodes(context): """ Rerun the calculation of postcodes. @@ -140,18 +172,25 @@ def delete_places(context, oids): """ context.nominatim.run_nominatim('refresh', '--functions') with context.db.cursor() as cur: + cur.execute('TRUNCATE place_to_be_deleted') for oid in oids.split(','): NominatimID(oid).query_osm_id(cur, 'DELETE FROM place WHERE {}') + cur.execute('SELECT flush_deleted_places()') context.nominatim.reindex_placex(context.db) + # Remove the output of the input, when all was right. Otherwise it will be + # output when there are errors that had nothing to do with the import + # itself. + context.log_capture.buffer.clear() + ################################ THEN ################################## @then("(?P