]> git.openstreetmap.org Git - nominatim.git/commitdiff
tests: add fixture for making test project directory
authorSarah Hoffmann <lonvia@denofr.de>
Tue, 30 Nov 2021 17:01:46 +0000 (18:01 +0100)
committerSarah Hoffmann <lonvia@denofr.de>
Tue, 30 Nov 2021 17:01:46 +0000 (18:01 +0100)
test/python/conftest.py
test/python/indexer/test_indexing.py
test/python/tokenizer/test_factory.py
test/python/tokenizer/test_icu.py
test/python/tokenizer/test_icu_rule_loader.py
test/python/tokenizer/test_legacy.py
test/python/tools/test_refresh_address_levels.py
test/python/tools/test_refresh_create_functions.py
test/python/tools/test_refresh_setup_website.py

index 4cea9fb37ef23ab33c11861ccfeab4ba51f05ba4..ce96bf0d22c172fb515096885f18dabb7aea0d9b 100644 (file)
@@ -5,10 +5,9 @@ from pathlib import Path
 import psycopg2
 import pytest
 
 import psycopg2
 import pytest
 
-SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
-
 # always test against the source
 # always test against the source
-sys.path.insert(0, str(SRC_DIR.resolve()))
+SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
+sys.path.insert(0, str(SRC_DIR))
 
 from nominatim.config import Configuration
 from nominatim.db import connection
 
 from nominatim.config import Configuration
 from nominatim.db import connection
@@ -20,6 +19,11 @@ import mocks
 from cursor import CursorForTesting
 
 
 from cursor import CursorForTesting
 
 
+@pytest.fixture
+def src_dir():
+    return SRC_DIR
+
+
 @pytest.fixture
 def temp_db(monkeypatch):
     """ Create an empty database for the test. The database name is also
 @pytest.fixture
 def temp_db(monkeypatch):
     """ Create an empty database for the test. The database name is also
@@ -97,18 +101,25 @@ def table_factory(temp_db_cursor):
 
 
 @pytest.fixture
 
 
 @pytest.fixture
-def def_config():
-    cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
+def def_config(src_dir):
+    cfg = Configuration(None, src_dir / 'settings')
     cfg.set_libdirs(module='.', osm2pgsql='.',
     cfg.set_libdirs(module='.', osm2pgsql='.',
-                    php=SRC_DIR / 'lib-php',
-                    sql=SRC_DIR / 'lib-sql',
-                    data=SRC_DIR / 'data')
+                    php=src_dir / 'lib-php',
+                    sql=src_dir / 'lib-sql',
+                    data=src_dir / 'data')
     return cfg
 
 
 @pytest.fixture
     return cfg
 
 
 @pytest.fixture
-def src_dir():
-    return SRC_DIR.resolve()
+def project_env(src_dir, tmp_path):
+    projdir = tmp_path / 'project'
+    projdir.mkdir()
+    cfg = Configuration(projdir, src_dir / 'settings')
+    cfg.set_libdirs(module='.', osm2pgsql='.',
+                    php=src_dir / 'lib-php',
+                    sql=src_dir / 'lib-sql',
+                    data=src_dir / 'data')
+    return cfg
 
 
 @pytest.fixture
 
 
 @pytest.fixture
index 4c9d940d09b1c2a0a0cddbe78383c34c9251af53..9adf969e4e427823fb2427c8933505c17f7b21e2 100644 (file)
@@ -145,9 +145,8 @@ def test_db(temp_db_conn):
 
 
 @pytest.fixture
 
 
 @pytest.fixture
-def test_tokenizer(tokenizer_mock, def_config, tmp_path):
-    def_config.project_dir = tmp_path
-    return factory.create_tokenizer(def_config)
+def test_tokenizer(tokenizer_mock, project_env):
+    return factory.create_tokenizer(project_env)
 
 
 @pytest.mark.parametrize("threads", [1, 15])
 
 
 @pytest.mark.parametrize("threads", [1, 15])
index 9dc0b7cb5cec8f575f7e4664425d717ca22d53de..87d9b5830fcb2151fe6141a473250dbf2656e7d9 100644 (file)
@@ -8,68 +8,68 @@ from nominatim.tokenizer import factory
 from nominatim.errors import UsageError
 from dummy_tokenizer import DummyTokenizer
 
 from nominatim.errors import UsageError
 from dummy_tokenizer import DummyTokenizer
 
-@pytest.fixture
-def test_config(def_config, tmp_path, property_table, tokenizer_mock):
-    def_config.project_dir = tmp_path
-    return def_config
 
 
+def test_setup_bad_tokenizer_name(project_env, monkeypatch):
+    monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
 
-def test_setup_dummy_tokenizer(temp_db_conn, test_config):
-    tokenizer = factory.create_tokenizer(test_config)
+    with pytest.raises(UsageError):
+        factory.create_tokenizer(project_env)
 
 
-    assert isinstance(tokenizer, DummyTokenizer)
-    assert tokenizer.init_state == "new"
-    assert (test_config.project_dir / 'tokenizer').is_dir()
 
 
-    assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
+class TestFactory:
+    @pytest.fixture(autouse=True)
+    def init_env(self, project_env, property_table, tokenizer_mock):
+        self.config = project_env
 
 
 
 
-def test_setup_tokenizer_dir_exists(test_config):
-    (test_config.project_dir / 'tokenizer').mkdir()
+    def test_setup_dummy_tokenizer(self, temp_db_conn):
+        tokenizer = factory.create_tokenizer(self.config)
 
 
-    tokenizer = factory.create_tokenizer(test_config)
+        assert isinstance(tokenizer, DummyTokenizer)
+        assert tokenizer.init_state == "new"
+        assert (self.config.project_dir / 'tokenizer').is_dir()
 
 
-    assert isinstance(tokenizer, DummyTokenizer)
-    assert tokenizer.init_state == "new"
+        assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
 
 
 
 
-def test_setup_tokenizer_dir_failure(test_config):
-    (test_config.project_dir / 'tokenizer').write_text("foo")
+    def test_setup_tokenizer_dir_exists(self):
+        (self.config.project_dir / 'tokenizer').mkdir()
 
 
-    with pytest.raises(UsageError):
-        factory.create_tokenizer(test_config)
+        tokenizer = factory.create_tokenizer(self.config)
 
 
+        assert isinstance(tokenizer, DummyTokenizer)
+        assert tokenizer.init_state == "new"
 
 
-def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch):
-    def_config.project_dir = tmp_path
-    monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
 
 
-    with pytest.raises(UsageError):
-        factory.create_tokenizer(def_config)
+    def test_setup_tokenizer_dir_failure(self):
+        (self.config.project_dir / 'tokenizer').write_text("foo")
 
 
+        with pytest.raises(UsageError):
+            factory.create_tokenizer(self.config)
 
 
-def test_load_tokenizer(test_config):
-    factory.create_tokenizer(test_config)
 
 
-    tokenizer = factory.get_tokenizer_for_db(test_config)
+    def test_load_tokenizer(self):
+        factory.create_tokenizer(self.config)
 
 
-    assert isinstance(tokenizer, DummyTokenizer)
-    assert tokenizer.init_state == "loaded"
+        tokenizer = factory.get_tokenizer_for_db(self.config)
 
 
+        assert isinstance(tokenizer, DummyTokenizer)
+        assert tokenizer.init_state == "loaded"
 
 
-def test_load_no_tokenizer_dir(test_config):
-    factory.create_tokenizer(test_config)
 
 
-    test_config.project_dir = test_config.project_dir / 'foo'
+    def test_load_no_tokenizer_dir(self):
+        factory.create_tokenizer(self.config)
 
 
-    with pytest.raises(UsageError):
-        factory.get_tokenizer_for_db(test_config)
+        self.config.project_dir = self.config.project_dir / 'foo'
 
 
+        with pytest.raises(UsageError):
+            factory.get_tokenizer_for_db(self.config)
 
 
-def test_load_missing_propoerty(temp_db_cursor, test_config):
-    factory.create_tokenizer(test_config)
 
 
-    temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
+    def test_load_missing_property(self, temp_db_cursor):
+        factory.create_tokenizer(self.config)
 
 
-    with pytest.raises(UsageError):
-        factory.get_tokenizer_for_db(test_config)
+        temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
+
+        with pytest.raises(UsageError):
+            factory.get_tokenizer_for_db(self.config)
index 5dbe292e38bcd73c869275ed225794adfaa2e76e..165fc67f7d785ec23965df9678fa3211ee7426fa 100644 (file)
@@ -20,20 +20,17 @@ def word_table(temp_db_conn):
 
 
 @pytest.fixture
 
 
 @pytest.fixture
-def test_config(def_config, tmp_path):
-    def_config.project_dir = tmp_path / 'project'
-    def_config.project_dir.mkdir()
-
+def test_config(project_env, tmp_path):
     sqldir = tmp_path / 'sql'
     sqldir.mkdir()
     (sqldir / 'tokenizer').mkdir()
     (sqldir / 'tokenizer' / 'icu_tokenizer.sql').write_text("SELECT 'a'")
     sqldir = tmp_path / 'sql'
     sqldir.mkdir()
     (sqldir / 'tokenizer').mkdir()
     (sqldir / 'tokenizer' / 'icu_tokenizer.sql').write_text("SELECT 'a'")
-    shutil.copy(str(def_config.lib_dir.sql / 'tokenizer' / 'icu_tokenizer_tables.sql'),
+    shutil.copy(str(project_env.lib_dir.sql / 'tokenizer' / 'icu_tokenizer_tables.sql'),
                 str(sqldir / 'tokenizer' / 'icu_tokenizer_tables.sql'))
 
                 str(sqldir / 'tokenizer' / 'icu_tokenizer_tables.sql'))
 
-    def_config.lib_dir.sql = sqldir
+    project_env.lib_dir.sql = sqldir
 
 
-    return def_config
+    return project_env
 
 
 @pytest.fixture
 
 
 @pytest.fixture
index e22ccd4b054f1a73d3e9bc99b25ba3715356ca38..9e5a334bf48b99fe1f39a5d8d1721688376540af 100644 (file)
@@ -11,18 +11,20 @@ from nominatim.errors import UsageError
 
 from icu import Transliterator
 
 
 from icu import Transliterator
 
-@pytest.fixture
-def test_config(def_config, tmp_path):
-    project_dir = tmp_path / 'project_dir'
-    project_dir.mkdir()
-    def_config.project_dir = project_dir
+CONFIG_SECTIONS = ('normalization', 'transliteration', 'token-analysis')
+
+class TestIcuRuleLoader:
+
+    @pytest.fixture(autouse=True)
+    def init_env(self, project_env):
+        self.project_env = project_env
 
 
-    return def_config
 
 
+    def write_config(self, content):
+        (self.project_env.project_dir / 'icu_tokenizer.yaml').write_text(dedent(content))
 
 
-@pytest.fixture
-def cfgrules(test_config):
-    def _create_config(*variants, **kwargs):
+
+    def config_rules(self, *variants):
         content = dedent("""\
         normalization:
             - ":: NFD ()"
         content = dedent("""\
         normalization:
             - ":: NFD ()"
@@ -33,122 +35,116 @@ def cfgrules(test_config):
         transliteration:
             - "::  Latin ()"
             - "[[:Punctuation:][:Space:]]+ > ' '"
         transliteration:
             - "::  Latin ()"
             - "[[:Punctuation:][:Space:]]+ > ' '"
-        """)
-        content += "token-analysis:\n  - analyzer: generic\n    variants:\n     - words:\n"
-        content += '\n'.join(("         - " + s for s in variants)) + '\n'
-        for k, v in kwargs:
-            content += "    {}: {}\n".format(k, v)
-        (test_config.project_dir / 'icu_tokenizer.yaml').write_text(content)
-
-        return test_config
-
-    return _create_config
-
-
-def test_empty_rule_set(test_config):
-    (test_config.project_dir / 'icu_tokenizer.yaml').write_text(dedent("""\
-        normalization:
-        transliteration:
         token-analysis:
         token-analysis:
-          - analyzer: generic
-            variants:
-        """))
+            - analyzer: generic
+              variants:
+                  - words:
+        """)
+        content += '\n'.join(("             - " + s for s in variants)) + '\n'
+        self.write_config(content)
 
 
-    rules = ICURuleLoader(test_config)
-    assert rules.get_search_rules() == ''
-    assert rules.get_normalization_rules() == ''
-    assert rules.get_transliteration_rules() == ''
 
 
-CONFIG_SECTIONS = ('normalization', 'transliteration', 'token-analysis')
+    def get_replacements(self, *variants):
+        self.config_rules(*variants)
+        loader = ICURuleLoader(self.project_env)
+        rules = loader.analysis[None].config['replacements']
 
 
-@pytest.mark.parametrize("section", CONFIG_SECTIONS)
-def test_missing_section(section, test_config):
-    rule_cfg = { s: [] for s in CONFIG_SECTIONS if s != section}
-    (test_config.project_dir / 'icu_tokenizer.yaml').write_text(yaml.dump(rule_cfg))
+        return sorted((k, sorted(v)) for k,v in rules)
 
 
-    with pytest.raises(UsageError):
-        ICURuleLoader(test_config)
 
 
+    def test_empty_rule_set(self):
+        self.write_config("""\
+            normalization:
+            transliteration:
+            token-analysis:
+              - analyzer: generic
+                variants:
+            """)
 
 
-def test_get_search_rules(cfgrules):
-    loader = ICURuleLoader(cfgrules())
+        rules = ICURuleLoader(self.project_env)
+        assert rules.get_search_rules() == ''
+        assert rules.get_normalization_rules() == ''
+        assert rules.get_transliteration_rules() == ''
 
 
-    rules = loader.get_search_rules()
-    trans = Transliterator.createFromRules("test", rules)
 
 
-    assert trans.transliterate(" Baum straße ") == " baum straße "
-    assert trans.transliterate(" Baumstraße ") == " baumstraße "
-    assert trans.transliterate(" Baumstrasse ") == " baumstrasse "
-    assert trans.transliterate(" Baumstr ") == " baumstr "
-    assert trans.transliterate(" Baumwegstr ") == " baumwegstr "
-    assert trans.transliterate(" Αθήνα ") == " athēna "
-    assert trans.transliterate(" проспект ") == " prospekt "
+    @pytest.mark.parametrize("section", CONFIG_SECTIONS)
+    def test_missing_section(self, section):
+        rule_cfg = { s: [] for s in CONFIG_SECTIONS if s != section}
+        self.write_config(yaml.dump(rule_cfg))
 
 
+        with pytest.raises(UsageError):
+            ICURuleLoader(self.project_env)
 
 
-def test_get_normalization_rules(cfgrules):
-    loader = ICURuleLoader(cfgrules())
-    rules = loader.get_normalization_rules()
-    trans = Transliterator.createFromRules("test", rules)
 
 
-    assert trans.transliterate(" проспект-Prospekt ") == " проспект prospekt "
+    def test_get_search_rules(self):
+        self.config_rules()
+        loader = ICURuleLoader(self.project_env)
 
 
+        rules = loader.get_search_rules()
+        trans = Transliterator.createFromRules("test", rules)
 
 
-def test_get_transliteration_rules(cfgrules):
-    loader = ICURuleLoader(cfgrules())
-    rules = loader.get_transliteration_rules()
-    trans = Transliterator.createFromRules("test", rules)
+        assert trans.transliterate(" Baum straße ") == " baum straße "
+        assert trans.transliterate(" Baumstraße ") == " baumstraße "
+        assert trans.transliterate(" Baumstrasse ") == " baumstrasse "
+        assert trans.transliterate(" Baumstr ") == " baumstr "
+        assert trans.transliterate(" Baumwegstr ") == " baumwegstr "
+        assert trans.transliterate(" Αθήνα ") == " athēna "
+        assert trans.transliterate(" проспект ") == " prospekt "
 
 
-    assert trans.transliterate(" проспект-Prospekt ") == " prospekt Prospekt "
 
 
+    def test_get_normalization_rules(self):
+        self.config_rules()
+        loader = ICURuleLoader(self.project_env)
+        rules = loader.get_normalization_rules()
+        trans = Transliterator.createFromRules("test", rules)
 
 
-def test_transliteration_rules_from_file(test_config):
-    cfgpath = test_config.project_dir / ('icu_tokenizer.yaml')
-    cfgpath.write_text(dedent("""\
-        normalization:
-        transliteration:
-            - "'ax' > 'b'"
-            - !include transliteration.yaml
-        token-analysis:
-            - analyzer: generic
-              variants:
-        """))
-    transpath = test_config.project_dir / ('transliteration.yaml')
-    transpath.write_text('- "x > y"')
+        assert trans.transliterate(" проспект-Prospekt ") == " проспект prospekt "
 
 
-    loader = ICURuleLoader(test_config)
-    rules = loader.get_transliteration_rules()
-    trans = Transliterator.createFromRules("test", rules)
 
 
-    assert trans.transliterate(" axxt ") == " byt "
+    def test_get_transliteration_rules(self):
+        self.config_rules()
+        loader = ICURuleLoader(self.project_env)
+        rules = loader.get_transliteration_rules()
+        trans = Transliterator.createFromRules("test", rules)
 
 
+        assert trans.transliterate(" проспект-Prospekt ") == " prospekt Prospekt "
 
 
-def test_search_rules(cfgrules):
-    config = cfgrules('~street => s,st', 'master => mstr')
-    proc = ICURuleLoader(config).make_token_analysis()
 
 
-    assert proc.search.transliterate('Master Street').strip() == 'master street'
-    assert proc.search.transliterate('Earnes St').strip() == 'earnes st'
-    assert proc.search.transliterate('Nostreet').strip() == 'nostreet'
+    def test_transliteration_rules_from_file(self):
+        self.write_config("""\
+            normalization:
+            transliteration:
+                - "'ax' > 'b'"
+                - !include transliteration.yaml
+            token-analysis:
+                - analyzer: generic
+                  variants:
+            """)
+        transpath = self.project_env.project_dir / ('transliteration.yaml')
+        transpath.write_text('- "x > y"')
 
 
+        loader = ICURuleLoader(self.project_env)
+        rules = loader.get_transliteration_rules()
+        trans = Transliterator.createFromRules("test", rules)
 
 
-class TestGetReplacements:
+        assert trans.transliterate(" axxt ") == " byt "
 
 
-    @pytest.fixture(autouse=True)
-    def setup_cfg(self, cfgrules):
-        self.cfgrules = cfgrules
 
 
-    def get_replacements(self, *variants):
-        loader = ICURuleLoader(self.cfgrules(*variants))
-        rules = loader.analysis[None].config['replacements']
+    def test_search_rules(self):
+        self.config_rules('~street => s,st', 'master => mstr')
+        proc = ICURuleLoader(self.project_env).make_token_analysis()
 
 
-        return sorted((k, sorted(v)) for k,v in rules)
+        assert proc.search.transliterate('Master Street').strip() == 'master street'
+        assert proc.search.transliterate('Earnes St').strip() == 'earnes st'
+        assert proc.search.transliterate('Nostreet').strip() == 'nostreet'
 
 
     @pytest.mark.parametrize("variant", ['foo > bar', 'foo -> bar -> bar',
                                          '~foo~ -> bar', 'fo~ o -> bar'])
     def test_invalid_variant_description(self, variant):
 
 
     @pytest.mark.parametrize("variant", ['foo > bar', 'foo -> bar -> bar',
                                          '~foo~ -> bar', 'fo~ o -> bar'])
     def test_invalid_variant_description(self, variant):
+        self.config_rules(variant)
         with pytest.raises(UsageError):
         with pytest.raises(UsageError):
-            ICURuleLoader(self.cfgrules(variant))
+            ICURuleLoader(self.project_env)
 
     def test_add_full(self):
         repl = self.get_replacements("foo -> bar")
 
     def test_add_full(self):
         repl = self.get_replacements("foo -> bar")
index 53d45c1c93a0f44f375ab11b675902497789b73e..f90fc49402a6d235dd74e1081a7c98ace0587e29 100644 (file)
@@ -11,28 +11,25 @@ from nominatim.db import properties
 from nominatim.errors import UsageError
 
 @pytest.fixture
 from nominatim.errors import UsageError
 
 @pytest.fixture
-def test_config(def_config, tmp_path):
-    def_config.project_dir = tmp_path / 'project'
-    def_config.project_dir.mkdir()
-
+def test_config(project_env, tmp_path):
     module_dir = tmp_path / 'module_src'
     module_dir.mkdir()
     (module_dir / 'nominatim.so').write_text('TEST nomiantim.so')
 
     module_dir = tmp_path / 'module_src'
     module_dir.mkdir()
     (module_dir / 'nominatim.so').write_text('TEST nomiantim.so')
 
-    def_config.lib_dir.module = module_dir
+    project_env.lib_dir.module = module_dir
 
     sqldir = tmp_path / 'sql'
     sqldir.mkdir()
     (sqldir / 'tokenizer').mkdir()
     (sqldir / 'tokenizer' / 'legacy_tokenizer.sql').write_text("SELECT 'a'")
     (sqldir / 'words.sql').write_text("SELECT 'a'")
 
     sqldir = tmp_path / 'sql'
     sqldir.mkdir()
     (sqldir / 'tokenizer').mkdir()
     (sqldir / 'tokenizer' / 'legacy_tokenizer.sql').write_text("SELECT 'a'")
     (sqldir / 'words.sql').write_text("SELECT 'a'")
-    shutil.copy(str(def_config.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer_tables.sql'),
+    shutil.copy(str(project_env.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer_tables.sql'),
                 str(sqldir / 'tokenizer' / 'legacy_tokenizer_tables.sql'))
 
                 str(sqldir / 'tokenizer' / 'legacy_tokenizer_tables.sql'))
 
-    def_config.lib_dir.sql = sqldir
-    def_config.lib_dir.data = sqldir
+    project_env.lib_dir.sql = sqldir
+    project_env.lib_dir.data = sqldir
 
 
-    return def_config
+    return project_env
 
 
 @pytest.fixture
 
 
 @pytest.fixture
index 2c4ee24defb46def96ca69b29043fb19ecfa90d8..40d13641632c749a709c2973a0260823bac0e22b 100644 (file)
@@ -13,24 +13,21 @@ def test_load_ranks_def_config(temp_db_conn, temp_db_cursor, def_config):
 
     assert temp_db_cursor.table_rows('address_levels') > 0
 
 
     assert temp_db_cursor.table_rows('address_levels') > 0
 
-def test_load_ranks_from_project_dir(def_config, temp_db_conn, temp_db_cursor,
-                                     tmp_path):
-    test_file = tmp_path / 'address-levels.json'
+def test_load_ranks_from_project_dir(project_env, temp_db_conn, temp_db_cursor):
+    test_file = project_env.project_dir / 'address-levels.json'
     test_file.write_text('[{"tags":{"place":{"sea":2}}}]')
     test_file.write_text('[{"tags":{"place":{"sea":2}}}]')
-    def_config.project_dir = tmp_path
 
 
-    load_address_levels_from_config(temp_db_conn, def_config)
+    load_address_levels_from_config(temp_db_conn, project_env)
 
     assert temp_db_cursor.table_rows('address_levels') == 1
 
 
 
     assert temp_db_cursor.table_rows('address_levels') == 1
 
 
-def test_load_ranks_from_broken_file(def_config, temp_db_conn, tmp_path):
-    test_file = tmp_path / 'address-levels.json'
+def test_load_ranks_from_broken_file(project_env, temp_db_conn):
+    test_file = project_env.project_dir / 'address-levels.json'
     test_file.write_text('[{"tags":"place":{"sea":2}}}]')
     test_file.write_text('[{"tags":"place":{"sea":2}}}]')
-    def_config.project_dir = tmp_path
 
     with pytest.raises(json.decoder.JSONDecodeError):
 
     with pytest.raises(json.decoder.JSONDecodeError):
-        load_address_levels_from_config(temp_db_conn, def_config)
+        load_address_levels_from_config(temp_db_conn, project_env)
 
 
 def test_load_ranks_country(temp_db_conn, temp_db_cursor):
 
 
 def test_load_ranks_country(temp_db_conn, temp_db_cursor):
index 00b863ab1e621289c3a38be1dbd4d65a97496e6c..9fddcd212960590aff5ee86840b90f1f323e1f94 100644 (file)
@@ -5,47 +5,47 @@ import pytest
 
 from nominatim.tools.refresh import create_functions
 
 
 from nominatim.tools.refresh import create_functions
 
-@pytest.fixture
-def sql_tmp_path(tmp_path, def_config):
-    def_config.lib_dir.sql = tmp_path
-    return tmp_path
-
-@pytest.fixture
-def conn(sql_preprocessor, temp_db_conn):
-    return temp_db_conn
-
-
-def test_create_functions(temp_db_cursor, conn, def_config, sql_tmp_path):
-    sqlfile = sql_tmp_path / 'functions.sql'
-    sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
-                          AS $$
-                          BEGIN
-                            RETURN 43;
-                          END;
-                          $$ LANGUAGE plpgsql IMMUTABLE;
-                       """)
-
-    create_functions(conn, def_config)
-
-    assert temp_db_cursor.scalar('SELECT test()') == 43
-
-
-@pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22)))
-def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path,
-                                        dbg, ret):
-    sqlfile = sql_tmp_path / 'functions.sql'
-    sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
-                          AS $$
-                          BEGIN
-                            {% if debug %}
-                            RETURN 43;
-                            {% else %}
-                            RETURN 22;
-                            {% endif %}
-                          END;
-                          $$ LANGUAGE plpgsql IMMUTABLE;
-                       """)
-
-    create_functions(conn, def_config, enable_debug=dbg)
-
-    assert temp_db_cursor.scalar('SELECT test()') == ret
+class TestCreateFunctions:
+    @pytest.fixture(autouse=True)
+    def init_env(self, sql_preprocessor, temp_db_conn, def_config, tmp_path):
+        self.conn = temp_db_conn
+        self.config = def_config
+        def_config.lib_dir.sql = tmp_path
+
+
+    def write_functions(self, content):
+        sqlfile = self.config.lib_dir.sql / 'functions.sql'
+        sqlfile.write_text(content)
+
+
+    def test_create_functions(self, temp_db_cursor):
+        self.write_functions("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
+                              AS $$
+                              BEGIN
+                                RETURN 43;
+                              END;
+                              $$ LANGUAGE plpgsql IMMUTABLE;
+                           """)
+
+        create_functions(self.conn, self.config)
+
+        assert temp_db_cursor.scalar('SELECT test()') == 43
+
+
+    @pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22)))
+    def test_create_functions_with_template(self, temp_db_cursor, dbg, ret):
+        self.write_functions("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
+                              AS $$
+                              BEGIN
+                                {% if debug %}
+                                RETURN 43;
+                                {% else %}
+                                RETURN 22;
+                                {% endif %}
+                              END;
+                              $$ LANGUAGE plpgsql IMMUTABLE;
+                           """)
+
+        create_functions(self.conn, self.config, enable_debug=dbg)
+
+        assert temp_db_cursor.scalar('SELECT test()') == ret
index 8946bd1feae1790d3e29242e74ef28d07f3f616e..7102d43ba77b9faac505b1f146c868612fa2aebe 100644 (file)
@@ -22,12 +22,11 @@ def test_script(tmp_path):
 
 
 @pytest.fixture
 
 
 @pytest.fixture
-def run_website_script(tmp_path, def_config, temp_db_conn):
-    def_config.lib_dir.php = tmp_path / 'php'
-    def_config.project_dir = tmp_path
+def run_website_script(tmp_path, project_env, temp_db_conn):
+    project_env.lib_dir.php = tmp_path / 'php'
 
     def _runner():
 
     def _runner():
-        refresh.setup_website(tmp_path, def_config, temp_db_conn)
+        refresh.setup_website(tmp_path, project_env, temp_db_conn)
 
         proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
                                tmp_path / 'search.php'], check=False)
 
         proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
                                tmp_path / 'search.php'], check=False)