]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/tools/check_database.py
remove tests that differ between lua and gazetteer versions
[nominatim.git] / nominatim / tools / check_database.py
index e5cefe4f9c2ef6ddb944d262d5df89f64a14fa98..437775db6a874561d5451d3a8683fd085a3ba17c 100644 (file)
@@ -114,9 +114,10 @@ def _get_indexes(conn: Connection) -> List[str]:
             indexes.extend(('idx_placex_housenumber',
                             'idx_osmline_parent_osm_id_with_hnr'))
     if conn.table_exists('place'):
             indexes.extend(('idx_placex_housenumber',
                             'idx_osmline_parent_osm_id_with_hnr'))
     if conn.table_exists('place'):
-        indexes.extend(('idx_placex_pendingsector',
-                        'idx_location_area_country_place_id',
-                        'idx_place_osm_unique'))
+        indexes.extend(('idx_location_area_country_place_id',
+                        'idx_place_osm_unique',
+                        'idx_placex_rank_address_sector',
+                        'idx_placex_rank_boundaries_sector'))
 
     return indexes
 
 
     return indexes
 
@@ -163,7 +164,7 @@ def check_placex_table(conn: Connection, config: Configuration) -> CheckResult:
     return CheckState.FATAL, dict(config=config)
 
 
     return CheckState.FATAL, dict(config=config)
 
 
-@_check(hint="""placex table has no data. Did the import finish sucessfully?""")
+@_check(hint="""placex table has no data. Did the import finish successfully?""")
 def check_placex_size(conn: Connection, _: Configuration) -> CheckResult:
     """ Checking for placex content
     """
 def check_placex_size(conn: Connection, _: Configuration) -> CheckResult:
     """ Checking for placex content
     """
@@ -181,7 +182,7 @@ def check_tokenizer(_: Connection, config: Configuration) -> CheckResult:
         tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
     except UsageError:
         return CheckState.FAIL, dict(msg="""\
         tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
     except UsageError:
         return CheckState.FAIL, dict(msg="""\
-            Cannot load tokenizer. Did the import finish sucessfully?""")
+            Cannot load tokenizer. Did the import finish successfully?""")
 
     result = tokenizer.check_database(config)
 
 
     result = tokenizer.check_database(config)
 
@@ -199,7 +200,7 @@ def check_tokenizer(_: Connection, config: Configuration) -> CheckResult:
 def check_existance_wikipedia(conn: Connection, _: Configuration) -> CheckResult:
     """ Checking for wikipedia/wikidata data
     """
 def check_existance_wikipedia(conn: Connection, _: Configuration) -> CheckResult:
     """ Checking for wikipedia/wikidata data
     """
-    if not conn.table_exists('search_name'):
+    if not conn.table_exists('search_name') or not conn.table_exists('place'):
         return CheckState.NOT_APPLICABLE
 
     with conn.cursor() as cur:
         return CheckState.NOT_APPLICABLE
 
     with conn.cursor() as cur:
@@ -268,7 +269,7 @@ def check_database_index_valid(conn: Connection, _: Configuration) -> CheckResul
                         WHERE pg_index.indisvalid = false
                         AND pg_index.indexrelid = pg_class.oid""")
 
                         WHERE pg_index.indisvalid = false
                         AND pg_index.indexrelid = pg_class.oid""")
 
-        broken = list(cur)
+        broken = [c[0] for c in cur]
 
     if broken:
         return CheckState.FAIL, dict(indexes='\n  '.join(broken))
 
     if broken:
         return CheckState.FAIL, dict(indexes='\n  '.join(broken))