]> git.openstreetmap.org Git - nominatim.git/blobdiff - test/python/tokenizer/test_factory.py
always run function update on migrations
[nominatim.git] / test / python / tokenizer / test_factory.py
index 9dc0b7cb5cec8f575f7e4664425d717ca22d53de..166e6ba6388f424dbbd2347751398294eec45a96 100644 (file)
@@ -1,3 +1,9 @@
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
 """
 Tests for creating new tokenizers.
 """
@@ -8,68 +14,68 @@ from nominatim.tokenizer import factory
 from nominatim.errors import UsageError
 from dummy_tokenizer import DummyTokenizer
 
-@pytest.fixture
-def test_config(def_config, tmp_path, property_table, tokenizer_mock):
-    def_config.project_dir = tmp_path
-    return def_config
 
+def test_setup_bad_tokenizer_name(project_env, monkeypatch):
+    monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
-def test_setup_dummy_tokenizer(temp_db_conn, test_config):
-    tokenizer = factory.create_tokenizer(test_config)
+    with pytest.raises(UsageError):
+        factory.create_tokenizer(project_env)
 
-    assert isinstance(tokenizer, DummyTokenizer)
-    assert tokenizer.init_state == "new"
-    assert (test_config.project_dir / 'tokenizer').is_dir()
 
-    assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
+class TestFactory:
+    @pytest.fixture(autouse=True)
+    def init_env(self, project_env, property_table, tokenizer_mock):
+        self.config = project_env
 
 
-def test_setup_tokenizer_dir_exists(test_config):
-    (test_config.project_dir / 'tokenizer').mkdir()
+    def test_setup_dummy_tokenizer(self, temp_db_conn):
+        tokenizer = factory.create_tokenizer(self.config)
 
-    tokenizer = factory.create_tokenizer(test_config)
+        assert isinstance(tokenizer, DummyTokenizer)
+        assert tokenizer.init_state == "new"
+        assert (self.config.project_dir / 'tokenizer').is_dir()
 
-    assert isinstance(tokenizer, DummyTokenizer)
-    assert tokenizer.init_state == "new"
+        assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
 
 
-def test_setup_tokenizer_dir_failure(test_config):
-    (test_config.project_dir / 'tokenizer').write_text("foo")
+    def test_setup_tokenizer_dir_exists(self):
+        (self.config.project_dir / 'tokenizer').mkdir()
 
-    with pytest.raises(UsageError):
-        factory.create_tokenizer(test_config)
+        tokenizer = factory.create_tokenizer(self.config)
 
+        assert isinstance(tokenizer, DummyTokenizer)
+        assert tokenizer.init_state == "new"
 
-def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch):
-    def_config.project_dir = tmp_path
-    monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
-    with pytest.raises(UsageError):
-        factory.create_tokenizer(def_config)
+    def test_setup_tokenizer_dir_failure(self):
+        (self.config.project_dir / 'tokenizer').write_text("foo")
 
+        with pytest.raises(UsageError):
+            factory.create_tokenizer(self.config)
 
-def test_load_tokenizer(test_config):
-    factory.create_tokenizer(test_config)
 
-    tokenizer = factory.get_tokenizer_for_db(test_config)
+    def test_load_tokenizer(self):
+        factory.create_tokenizer(self.config)
 
-    assert isinstance(tokenizer, DummyTokenizer)
-    assert tokenizer.init_state == "loaded"
+        tokenizer = factory.get_tokenizer_for_db(self.config)
 
+        assert isinstance(tokenizer, DummyTokenizer)
+        assert tokenizer.init_state == "loaded"
 
-def test_load_no_tokenizer_dir(test_config):
-    factory.create_tokenizer(test_config)
 
-    test_config.project_dir = test_config.project_dir / 'foo'
+    def test_load_repopulate_tokenizer_dir(self):
+        factory.create_tokenizer(self.config)
 
-    with pytest.raises(UsageError):
-        factory.get_tokenizer_for_db(test_config)
+        self.config.project_dir = self.config.project_dir
 
+        factory.get_tokenizer_for_db(self.config)
+        assert (self.config.project_dir / 'tokenizer').exists()
 
-def test_load_missing_propoerty(temp_db_cursor, test_config):
-    factory.create_tokenizer(test_config)
 
-    temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
+    def test_load_missing_property(self, temp_db_cursor):
+        factory.create_tokenizer(self.config)
 
-    with pytest.raises(UsageError):
-        factory.get_tokenizer_for_db(test_config)
+        temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
+
+        with pytest.raises(UsageError):
+            factory.get_tokenizer_for_db(self.config)