]> git.openstreetmap.org Git - nominatim.git/blobdiff - test/python/tokenizer/test_icu.py
Merge pull request #3492 from lonvia/drop-waste-disposal
[nominatim.git] / test / python / tokenizer / test_icu.py
index 0d51bfc4e33006afef1830fc7170332e4318b942..a2bf676699ec9a326f59957740740899da8340dc 100644 (file)
@@ -14,8 +14,8 @@ import pytest
 
 from nominatim_db.tokenizer import icu_tokenizer
 import nominatim_db.tokenizer.icu_rule_loader
-from nominatim_core.db import properties
-from nominatim_core.db.sql_preprocessor import SQLPreprocessor
+from nominatim_db.db import properties
+from nominatim_db.db.sql_preprocessor import SQLPreprocessor
 from nominatim_db.data.place_info import PlaceInfo
 
 from mock_icu_word_table import MockIcuWordTable
@@ -199,16 +199,16 @@ def test_update_sql_functions(db_prop, temp_db_cursor,
     assert test_content == set((('1133', ), ))
 
 
-def test_finalize_import(tokenizer_factory, temp_db_conn,
-                         temp_db_cursor, test_config, sql_preprocessor_cfg):
+def test_finalize_import(tokenizer_factory, temp_db_cursor,
+                         test_config, sql_preprocessor_cfg):
     tok = tokenizer_factory()
     tok.init_new_db(test_config)
 
-    assert not temp_db_conn.index_exists('idx_word_word_id')
+    assert not temp_db_cursor.index_exists('word', 'idx_word_word_id')
 
     tok.finalize_import(test_config)
 
-    assert temp_db_conn.index_exists('idx_word_word_id')
+    assert temp_db_cursor.index_exists('word', 'idx_word_word_id')
 
 
 def test_check_database(test_config, tokenizer_factory,