]> git.openstreetmap.org Git - nominatim.git/blobdiff - test/python/test_tokenizer_factory.py
Merge pull request #2341 from lonvia/cleanup-python-tests
[nominatim.git] / test / python / test_tokenizer_factory.py
index 63c6915b1f81b371295c161718e115d32e31a433..9dc0b7cb5cec8f575f7e4664425d717ca22d53de 100644 (file)
@@ -1,74 +1,63 @@
 """
 Tests for creating new tokenizers.
 """
-import importlib
 import pytest
 
 from nominatim.db import properties
 from nominatim.tokenizer import factory
 from nominatim.errors import UsageError
-import dummy_tokenizer
+from dummy_tokenizer import DummyTokenizer
 
 @pytest.fixture
-def test_config(def_config, tmp_path):
+def test_config(def_config, tmp_path, property_table, tokenizer_mock):
     def_config.project_dir = tmp_path
     return def_config
 
 
-@pytest.fixture
-def tokenizer_import(monkeypatch):
-    monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
-
-    def _import_dummy(module, *args, **kwargs):
-        return dummy_tokenizer
-
-    monkeypatch.setattr(importlib, "import_module", _import_dummy)
-
-
-def test_setup_dummy_tokenizer(temp_db_conn, test_config,
-                               tokenizer_import, property_table):
+def test_setup_dummy_tokenizer(temp_db_conn, test_config):
     tokenizer = factory.create_tokenizer(test_config)
 
-    assert isinstance(tokenizer, dummy_tokenizer.DummyTokenizer)
+    assert isinstance(tokenizer, DummyTokenizer)
     assert tokenizer.init_state == "new"
     assert (test_config.project_dir / 'tokenizer').is_dir()
 
     assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
 
 
-def test_setup_tokenizer_dir_exists(test_config, tokenizer_import, property_table):
+def test_setup_tokenizer_dir_exists(test_config):
     (test_config.project_dir / 'tokenizer').mkdir()
 
     tokenizer = factory.create_tokenizer(test_config)
 
-    assert isinstance(tokenizer, dummy_tokenizer.DummyTokenizer)
+    assert isinstance(tokenizer, DummyTokenizer)
     assert tokenizer.init_state == "new"
 
 
-def test_setup_tokenizer_dir_failure(test_config, tokenizer_import, property_table):
+def test_setup_tokenizer_dir_failure(test_config):
     (test_config.project_dir / 'tokenizer').write_text("foo")
 
     with pytest.raises(UsageError):
         factory.create_tokenizer(test_config)
 
 
-def test_setup_bad_tokenizer_name(test_config, monkeypatch):
+def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch):
+    def_config.project_dir = tmp_path
     monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
     with pytest.raises(UsageError):
-        factory.create_tokenizer(test_config)
+        factory.create_tokenizer(def_config)
 
-def test_load_tokenizer(temp_db_conn, test_config,
-                        tokenizer_import, property_table):
+
+def test_load_tokenizer(test_config):
     factory.create_tokenizer(test_config)
 
     tokenizer = factory.get_tokenizer_for_db(test_config)
 
-    assert isinstance(tokenizer, dummy_tokenizer.DummyTokenizer)
+    assert isinstance(tokenizer, DummyTokenizer)
     assert tokenizer.init_state == "loaded"
 
 
-def test_load_no_tokenizer_dir(test_config, tokenizer_import, property_table):
+def test_load_no_tokenizer_dir(test_config):
     factory.create_tokenizer(test_config)
 
     test_config.project_dir = test_config.project_dir / 'foo'
@@ -77,11 +66,10 @@ def test_load_no_tokenizer_dir(test_config, tokenizer_import, property_table):
         factory.get_tokenizer_for_db(test_config)
 
 
-def test_load_missing_propoerty(temp_db_cursor, test_config, tokenizer_import, property_table):
+def test_load_missing_propoerty(temp_db_cursor, test_config):
     factory.create_tokenizer(test_config)
 
     temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
 
     with pytest.raises(UsageError):
         factory.get_tokenizer_for_db(test_config)
-