X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/af968d49039f5cd8f0ef5839558a57867d7ef111..a33f2c0f5ba43df533b6e137f9151fe67feb20a1:/test/python/conftest.py diff --git a/test/python/conftest.py b/test/python/conftest.py index d0fdc569..493620c4 100644 --- a/test/python/conftest.py +++ b/test/python/conftest.py @@ -1,3 +1,4 @@ +import importlib import itertools import sys from pathlib import Path @@ -15,6 +16,9 @@ sys.path.insert(0, str(SRC_DIR.resolve())) from nominatim.config import Configuration from nominatim.db import connection from nominatim.db.sql_preprocessor import SQLPreprocessor +from nominatim.db import properties + +import dummy_tokenizer class _TestingCursor(psycopg2.extras.DictCursor): """ Extension to the DictCursor class that provides execution @@ -117,9 +121,8 @@ def table_factory(temp_db_cursor): def mk_table(name, definition='id INT', content=None): temp_db_cursor.execute('CREATE TABLE {} ({})'.format(name, definition)) if content is not None: - if not isinstance(content, str): - content = '),('.join([str(x) for x in content]) - temp_db_cursor.execute("INSERT INTO {} VALUES ({})".format(name, content)) + psycopg2.extras.execute_values( + temp_db_cursor, "INSERT INTO {} VALUES %s".format(name), content) return mk_table @@ -286,10 +289,29 @@ def osm2pgsql_options(temp_db): @pytest.fixture def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory): - monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', '.') - table_factory('country_name', 'partition INT', (0, 1, 2)) + table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, ))) cfg = Configuration(None, SRC_DIR.resolve() / 'settings') cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php', sql=tmp_path, data=SRC_DIR / 'data') return SQLPreprocessor(temp_db_conn, cfg) + + +@pytest.fixture +def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path): + """ Sets up the configuration so that the test dummy tokenizer will be + loaded when the tokenizer factory is used. Also returns a factory + with which a new dummy tokenizer may be created. + """ + monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy') + + def _import_dummy(module, *args, **kwargs): + return dummy_tokenizer + + monkeypatch.setattr(importlib, "import_module", _import_dummy) + properties.set_property(temp_db_conn, 'tokenizer', 'dummy') + + def _create_tokenizer(): + return dummy_tokenizer.DummyTokenizer(None, None) + + return _create_tokenizer