X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/8f3845660f18bdbf2dd42dd2c6db6c7fa5160f3e..57598a048e9124b905572ed8dc4fa9465b5d38a6:/test/python/tokenizer/test_icu.py diff --git a/test/python/tokenizer/test_icu.py b/test/python/tokenizer/test_icu.py index 2a4865db..a2bf6766 100644 --- a/test/python/tokenizer/test_icu.py +++ b/test/python/tokenizer/test_icu.py @@ -1,8 +1,8 @@ -# SPDX-License-Identifier: GPL-2.0-only +# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # -# Copyright (C) 2022 by the Nominatim developer community. +# Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for ICU tokenizer. @@ -12,11 +12,11 @@ import itertools import pytest -from nominatim.tokenizer import icu_tokenizer -import nominatim.tokenizer.icu_rule_loader -from nominatim.db import properties -from nominatim.db.sql_preprocessor import SQLPreprocessor -from nominatim.data.place_info import PlaceInfo +from nominatim_db.tokenizer import icu_tokenizer +import nominatim_db.tokenizer.icu_rule_loader +from nominatim_db.db import properties +from nominatim_db.db.sql_preprocessor import SQLPreprocessor +from nominatim_db.data.place_info import PlaceInfo from mock_icu_word_table import MockIcuWordTable @@ -83,7 +83,7 @@ def analyzer(tokenizer_factory, test_config, monkeypatch, cfgstr['token-analysis'].append({'id': '@postcode', 'analyzer': 'postcodes'}) (test_config.project_dir / 'icu_tokenizer.yaml').write_text(yaml.dump(cfgstr)) - tok.loader = nominatim.tokenizer.icu_rule_loader.ICURuleLoader(test_config) + tok.loader = nominatim_db.tokenizer.icu_rule_loader.ICURuleLoader(test_config) return tok.name_analyzer() @@ -157,7 +157,7 @@ def test_init_new(tokenizer_factory, test_config, db_prop): tok = tokenizer_factory() tok.init_new_db(test_config) - assert db_prop(nominatim.tokenizer.icu_rule_loader.DBCFG_IMPORT_NORM_RULES) \ + assert db_prop(nominatim_db.tokenizer.icu_rule_loader.DBCFG_IMPORT_NORM_RULES) \ .startswith(':: lower ();') @@ -199,16 +199,16 @@ def test_update_sql_functions(db_prop, temp_db_cursor, assert test_content == set((('1133', ), )) -def test_finalize_import(tokenizer_factory, temp_db_conn, - temp_db_cursor, test_config, sql_preprocessor_cfg): +def test_finalize_import(tokenizer_factory, temp_db_cursor, + test_config, sql_preprocessor_cfg): tok = tokenizer_factory() tok.init_new_db(test_config) - assert not temp_db_conn.index_exists('idx_word_word_id') + assert not temp_db_cursor.index_exists('word', 'idx_word_word_id') tok.finalize_import(test_config) - assert temp_db_conn.index_exists('idx_word_word_id') + assert temp_db_cursor.index_exists('word', 'idx_word_word_id') def test_check_database(test_config, tokenizer_factory,