LazyFmt("Bad place id in column %s. Expected: %s, got: %s.",
column, expected, PlaceObjName(result, context.db)))
+def check_database_integrity(context):
+ """ Check some generic constraints on the tables.
+ """
+ # place_addressline should not have duplicate (place_id, address_place_id)
+ cur = context.db.cursor()
+ cur.execute("""SELECT count(*) FROM
+ (SELECT place_id, address_place_id, count(*) as c
+ FROM place_addressline GROUP BY place_id, address_place_id) x
+ WHERE c > 1""")
+ eq_(0, cur.fetchone()[0], "Duplicates found in place_addressline")
+
+
class NominatimID:
""" Splits a unique identifier for places into its components.
As place_ids cannot be used for testing, we use a unique
and ST_GeometryType(geometry) = 'ST_LineString'""")
context.db.commit()
context.nominatim.run_setup_script('calculate-postcodes', 'index', 'index-noanalyse')
+ check_database_integrity(context)
@when("updating places")
def update_place_table(context):
if cur.rowcount == 0:
break
+ check_database_integrity(context)
+
@when("marking for delete (?P<oids>.*)")
def delete_places(context, oids):
context.nominatim.run_setup_script(
context.db.commit()
-@then("search_name contains")
-def check_search_name_contents(context):
+@then("search_name contains(?P<exclude> not)?")
+def check_search_name_contents(context, exclude):
cur = context.db.cursor(cursor_factory=psycopg2.extras.DictCursor)
for row in context.table:
FROM word, (SELECT unnest(%s) as term) t
WHERE word_token = make_standard_name(t.term)""",
(terms,))
- ok_(subcur.rowcount >= len(terms),
- "No word entry found for " + row[h])
+ if not exclude:
+ ok_(subcur.rowcount >= len(terms),
+ "No word entry found for " + row[h])
for wid in subcur:
- assert_in(wid[0], res[h],
- "Missing term for %s/%s: %s" % (pid, h, wid[1]))
+ if exclude:
+ assert_not_in(wid[0], res[h],
+ "Found term for %s/%s: %s" % (pid, h, wid[1]))
+ else:
+ assert_in(wid[0], res[h],
+ "Missing term for %s/%s: %s" % (pid, h, wid[1]))
else:
assert_db_column(res, h, row[h], context)