+import importlib
import itertools
import sys
from pathlib import Path
from nominatim.config import Configuration
from nominatim.db import connection
from nominatim.db.sql_preprocessor import SQLPreprocessor
+from nominatim.db import properties
+
+import dummy_tokenizer
class _TestingCursor(psycopg2.extras.DictCursor):
""" Extension to the DictCursor class that provides execution
def mk_table(name, definition='id INT', content=None):
temp_db_cursor.execute('CREATE TABLE {} ({})'.format(name, definition))
if content is not None:
- if not isinstance(content, str):
- content = '),('.join([str(x) for x in content])
- temp_db_cursor.execute("INSERT INTO {} VALUES ({})".format(name, content))
+ psycopg2.extras.execute_values(
+ temp_db_cursor, "INSERT INTO {} VALUES %s".format(name), content)
return mk_table
@pytest.fixture
def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory):
- table_factory('country_name', 'partition INT', (0, 1, 2))
+ table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, )))
cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php',
sql=tmp_path, data=SRC_DIR / 'data')
return SQLPreprocessor(temp_db_conn, cfg)
+
+
+@pytest.fixture
+def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path):
+ """ Sets up the configuration so that the test dummy tokenizer will be
+ loaded when the tokenizer factory is used. Also returns a factory
+ with which a new dummy tokenizer may be created.
+ """
+ monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
+
+ def _import_dummy(module, *args, **kwargs):
+ return dummy_tokenizer
+
+ monkeypatch.setattr(importlib, "import_module", _import_dummy)
+ properties.set_property(temp_db_conn, 'tokenizer', 'dummy')
+
+ def _create_tokenizer():
+ return dummy_tokenizer.DummyTokenizer(None, None)
+
+ return _create_tokenizer