]> git.openstreetmap.org Git - nominatim.git/blobdiff - test/python/test_tokenizer_factory.py
Merge remote-tracking branch 'upstream/master'
[nominatim.git] / test / python / test_tokenizer_factory.py
index 69517e9401739c1e23ae8cdc3d892cf84f9ca2a2..9dc0b7cb5cec8f575f7e4664425d717ca22d53de 100644 (file)
@@ -1,7 +1,6 @@
 """
 Tests for creating new tokenizers.
 """
 """
 Tests for creating new tokenizers.
 """
-import importlib
 import pytest
 
 from nominatim.db import properties
 import pytest
 
 from nominatim.db import properties
@@ -10,13 +9,12 @@ from nominatim.errors import UsageError
 from dummy_tokenizer import DummyTokenizer
 
 @pytest.fixture
 from dummy_tokenizer import DummyTokenizer
 
 @pytest.fixture
-def test_config(def_config, tmp_path):
+def test_config(def_config, tmp_path, property_table, tokenizer_mock):
     def_config.project_dir = tmp_path
     return def_config
 
 
     def_config.project_dir = tmp_path
     return def_config
 
 
-def test_setup_dummy_tokenizer(temp_db_conn, test_config,
-                               tokenizer_mock, property_table):
+def test_setup_dummy_tokenizer(temp_db_conn, test_config):
     tokenizer = factory.create_tokenizer(test_config)
 
     assert isinstance(tokenizer, DummyTokenizer)
     tokenizer = factory.create_tokenizer(test_config)
 
     assert isinstance(tokenizer, DummyTokenizer)
@@ -26,7 +24,7 @@ def test_setup_dummy_tokenizer(temp_db_conn, test_config,
     assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
 
 
     assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
 
 
-def test_setup_tokenizer_dir_exists(test_config, tokenizer_mock, property_table):
+def test_setup_tokenizer_dir_exists(test_config):
     (test_config.project_dir / 'tokenizer').mkdir()
 
     tokenizer = factory.create_tokenizer(test_config)
     (test_config.project_dir / 'tokenizer').mkdir()
 
     tokenizer = factory.create_tokenizer(test_config)
@@ -35,21 +33,22 @@ def test_setup_tokenizer_dir_exists(test_config, tokenizer_mock, property_table)
     assert tokenizer.init_state == "new"
 
 
     assert tokenizer.init_state == "new"
 
 
-def test_setup_tokenizer_dir_failure(test_config, tokenizer_mock, property_table):
+def test_setup_tokenizer_dir_failure(test_config):
     (test_config.project_dir / 'tokenizer').write_text("foo")
 
     with pytest.raises(UsageError):
         factory.create_tokenizer(test_config)
 
 
     (test_config.project_dir / 'tokenizer').write_text("foo")
 
     with pytest.raises(UsageError):
         factory.create_tokenizer(test_config)
 
 
-def test_setup_bad_tokenizer_name(test_config, monkeypatch):
+def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch):
+    def_config.project_dir = tmp_path
     monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
     with pytest.raises(UsageError):
     monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
     with pytest.raises(UsageError):
-        factory.create_tokenizer(test_config)
+        factory.create_tokenizer(def_config)
 
 
-def test_load_tokenizer(temp_db_conn, test_config,
-                        tokenizer_mock, property_table):
+
+def test_load_tokenizer(test_config):
     factory.create_tokenizer(test_config)
 
     tokenizer = factory.get_tokenizer_for_db(test_config)
     factory.create_tokenizer(test_config)
 
     tokenizer = factory.get_tokenizer_for_db(test_config)
@@ -58,7 +57,7 @@ def test_load_tokenizer(temp_db_conn, test_config,
     assert tokenizer.init_state == "loaded"
 
 
     assert tokenizer.init_state == "loaded"
 
 
-def test_load_no_tokenizer_dir(test_config, tokenizer_mock, property_table):
+def test_load_no_tokenizer_dir(test_config):
     factory.create_tokenizer(test_config)
 
     test_config.project_dir = test_config.project_dir / 'foo'
     factory.create_tokenizer(test_config)
 
     test_config.project_dir = test_config.project_dir / 'foo'
@@ -67,11 +66,10 @@ def test_load_no_tokenizer_dir(test_config, tokenizer_mock, property_table):
         factory.get_tokenizer_for_db(test_config)
 
 
         factory.get_tokenizer_for_db(test_config)
 
 
-def test_load_missing_propoerty(temp_db_cursor, test_config, tokenizer_mock, property_table):
+def test_load_missing_propoerty(temp_db_cursor, test_config):
     factory.create_tokenizer(test_config)
 
     temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
 
     with pytest.raises(UsageError):
         factory.get_tokenizer_for_db(test_config)
     factory.create_tokenizer(test_config)
 
     temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
 
     with pytest.raises(UsageError):
         factory.get_tokenizer_for_db(test_config)
-