+@pytest.fixture
+def osmline_table(temp_db_with_extensions, temp_db_conn):
+ with temp_db_conn.cursor() as cur:
+ cur.execute("""CREATE TABLE location_property_osmline (
+ place_id BIGINT,
+ osm_id BIGINT,
+ parent_place_id BIGINT,
+ geometry_sector INTEGER,
+ indexed_date TIMESTAMP,
+ startnumber INTEGER,
+ endnumber INTEGER,
+ partition SMALLINT,
+ indexed_status SMALLINT,
+ linegeo GEOMETRY,
+ interpolationtype TEXT,
+ address HSTORE,
+ postcode TEXT,
+ country_code VARCHAR(2))""")
+ temp_db_conn.commit()
+
+
+@pytest.fixture
+def word_table(temp_db, temp_db_conn):
+ with temp_db_conn.cursor() as cur:
+ cur.execute("""CREATE TABLE word (
+ word_id INTEGER,
+ word_token text,
+ word text,
+ class text,
+ type text,
+ country_code varchar(2),
+ search_name_count INTEGER,
+ operator TEXT)""")
+ temp_db_conn.commit()
+
+
+@pytest.fixture
+def osm2pgsql_options(temp_db):
+ return dict(osm2pgsql='echo',
+ osm2pgsql_cache=10,
+ osm2pgsql_style='style.file',
+ threads=1,
+ dsn='dbname=' + temp_db,
+ flatnode_file='',
+ tablespaces=dict(slim_data='', slim_index='',
+ main_data='', main_index=''))
+
+@pytest.fixture
+def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory):
+ table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, )))
+ cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
+ cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php',
+ sql=tmp_path, data=SRC_DIR / 'data')
+
+ return SQLPreprocessor(temp_db_conn, cfg)
+
+
+@pytest.fixture
+def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path):
+ """ Sets up the configuration so that the test dummy tokenizer will be
+ loaded when the tokenizer factory is used. Also returns a factory
+ with which a new dummy tokenizer may be created.
+ """
+ monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
+
+ def _import_dummy(module, *args, **kwargs):
+ return dummy_tokenizer
+
+ monkeypatch.setattr(importlib, "import_module", _import_dummy)
+ properties.set_property(temp_db_conn, 'tokenizer', 'dummy')
+
+ def _create_tokenizer():
+ return dummy_tokenizer.DummyTokenizer(None, None)