ignored-classes=NominatimArgs,closing
disable=too-few-public-methods,duplicate-code
-good-names=i,x,y,fd
+good-names=i,x,y,fd,db
.$_SERVER['HTTP_HOST'].$_SERVER['DOCUMENT_URI'].'/?'
.http_build_query($aMoreParams);
} else {
- $sMoreURL = '/search.php'.http_build_query($aMoreParams);
+ $sMoreURL = '/search.php?'.http_build_query($aMoreParams);
}
if (CONST_Debug) exit;
-import importlib
import itertools
import sys
from pathlib import Path
import psycopg2
-import psycopg2.extras
import pytest
-import tempfile
SRC_DIR = Path(__file__) / '..' / '..' / '..'
from nominatim.config import Configuration
from nominatim.db import connection
from nominatim.db.sql_preprocessor import SQLPreprocessor
-from nominatim.db import properties
import nominatim.tokenizer.factory
+import nominatim.cli
import dummy_tokenizer
import mocks
-
-class _TestingCursor(psycopg2.extras.DictCursor):
- """ Extension to the DictCursor class that provides execution
- short-cuts that simplify writing assertions.
- """
-
- def scalar(self, sql, params=None):
- """ Execute a query with a single return value and return this value.
- Raises an assertion when not exactly one row is returned.
- """
- self.execute(sql, params)
- assert self.rowcount == 1
- return self.fetchone()[0]
-
- def row_set(self, sql, params=None):
- """ Execute a query and return the result as a set of tuples.
- """
- self.execute(sql, params)
-
- return set((tuple(row) for row in self))
-
- def table_exists(self, table):
- """ Check that a table with the given name exists in the database.
- """
- num = self.scalar("""SELECT count(*) FROM pg_tables
- WHERE tablename = %s""", (table, ))
- return num == 1
-
- def table_rows(self, table):
- """ Return the number of rows in the given table.
- """
- return self.scalar('SELECT count(*) FROM ' + table)
+from cursor import CursorForTesting
@pytest.fixture
conn.close()
- monkeypatch.setenv('NOMINATIM_DATABASE_DSN' , 'dbname=' + name)
+ monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'dbname=' + name)
yield name
"""
conn = psycopg2.connect('dbname=' + temp_db)
conn.set_isolation_level(0)
- with conn.cursor(cursor_factory=_TestingCursor) as cur:
+ with conn.cursor(cursor_factory=CursorForTesting) as cur:
yield cur
conn.close()
@pytest.fixture
def table_factory(temp_db_cursor):
+ """ A fixture that creates new SQL tables, potentially filled with
+ content.
+ """
def mk_table(name, definition='id INT', content=None):
temp_db_cursor.execute('CREATE TABLE {} ({})'.format(name, definition))
if content is not None:
- psycopg2.extras.execute_values(
- temp_db_cursor, "INSERT INTO {} VALUES %s".format(name), content)
+ temp_db_cursor.execute_values("INSERT INTO {} VALUES %s".format(name), content)
return mk_table
data=SRC_DIR / 'data')
return cfg
+
@pytest.fixture
def src_dir():
return SRC_DIR.resolve()
+
@pytest.fixture
-def tmp_phplib_dir():
- with tempfile.TemporaryDirectory() as phpdir:
- (Path(phpdir) / 'admin').mkdir()
+def cli_call():
+ def _call_nominatim(*args):
+ return nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
+ osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
+ phplib_dir=str(SRC_DIR / 'lib-php'),
+ data_dir=str(SRC_DIR / 'data'),
+ phpcgi_path='/usr/bin/php-cgi',
+ sqllib_dir=str(SRC_DIR / 'lib-sql'),
+ config_dir=str(SRC_DIR / 'settings'),
+ cli_args=args)
- yield Path(phpdir)
+ return _call_nominatim
@pytest.fixture
-def property_table(table_factory):
+def property_table(table_factory, temp_db_conn):
table_factory('nominatim_properties', 'property TEXT, value TEXT')
+ return mocks.MockPropertyTable(temp_db_conn)
+
+
@pytest.fixture
-def status_table(temp_db_conn):
+def status_table(table_factory):
""" Create an empty version of the status table and
the status logging table.
"""
- with temp_db_conn.cursor() as cur:
- cur.execute("""CREATE TABLE import_status (
- lastimportdate timestamp with time zone NOT NULL,
- sequence_id integer,
- indexed boolean
- )""")
- cur.execute("""CREATE TABLE import_osmosis_log (
- batchend timestamp,
- batchseq integer,
- batchsize bigint,
- starttime timestamp,
- endtime timestamp,
- event text
- )""")
- temp_db_conn.commit()
+ table_factory('import_status',
+ """lastimportdate timestamp with time zone NOT NULL,
+ sequence_id integer,
+ indexed boolean""")
+ table_factory('import_osmosis_log',
+ """batchend timestamp,
+ batchseq integer,
+ batchsize bigint,
+ starttime timestamp,
+ endtime timestamp,
+ event text""")
@pytest.fixture
-def place_table(temp_db_with_extensions, temp_db_conn):
+def place_table(temp_db_with_extensions, table_factory):
""" Create an empty version of the place table.
"""
- with temp_db_conn.cursor() as cur:
- cur.execute("""CREATE TABLE place (
- osm_id int8 NOT NULL,
- osm_type char(1) NOT NULL,
- class text NOT NULL,
- type text NOT NULL,
- name hstore,
- admin_level smallint,
- address hstore,
- extratags hstore,
- geometry Geometry(Geometry,4326) NOT NULL)""")
- temp_db_conn.commit()
+ table_factory('place',
+ """osm_id int8 NOT NULL,
+ osm_type char(1) NOT NULL,
+ class text NOT NULL,
+ type text NOT NULL,
+ name hstore,
+ admin_level smallint,
+ address hstore,
+ extratags hstore,
+ geometry Geometry(Geometry,4326) NOT NULL""")
@pytest.fixture
@pytest.fixture
-def osmline_table(temp_db_with_extensions, temp_db_conn):
- with temp_db_conn.cursor() as cur:
- cur.execute("""CREATE TABLE location_property_osmline (
- place_id BIGINT,
- osm_id BIGINT,
- parent_place_id BIGINT,
- geometry_sector INTEGER,
- indexed_date TIMESTAMP,
- startnumber INTEGER,
- endnumber INTEGER,
- partition SMALLINT,
- indexed_status SMALLINT,
- linegeo GEOMETRY,
- interpolationtype TEXT,
- address HSTORE,
- postcode TEXT,
- country_code VARCHAR(2))""")
- temp_db_conn.commit()
+def osmline_table(temp_db_with_extensions, table_factory):
+ table_factory('location_property_osmline',
+ """place_id BIGINT,
+ osm_id BIGINT,
+ parent_place_id BIGINT,
+ geometry_sector INTEGER,
+ indexed_date TIMESTAMP,
+ startnumber INTEGER,
+ endnumber INTEGER,
+ partition SMALLINT,
+ indexed_status SMALLINT,
+ linegeo GEOMETRY,
+ interpolationtype TEXT,
+ address HSTORE,
+ postcode TEXT,
+ country_code VARCHAR(2)""")
@pytest.fixture
main_data='', main_index=''))
@pytest.fixture
-def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory):
+def sql_preprocessor(temp_db_conn, tmp_path, table_factory):
table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, )))
cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php',
@pytest.fixture
-def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path):
+def tokenizer_mock(monkeypatch, property_table):
""" Sets up the configuration so that the test dummy tokenizer will be
loaded when the tokenizer factory is used. Also returns a factory
with which a new dummy tokenizer may be created.
"""
monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
- def _import_dummy(module, *args, **kwargs):
+ def _import_dummy(*args, **kwargs):
return dummy_tokenizer
monkeypatch.setattr(nominatim.tokenizer.factory, "_import_tokenizer", _import_dummy)
- properties.set_property(temp_db_conn, 'tokenizer', 'dummy')
+ property_table.set('tokenizer', 'dummy')
def _create_tokenizer():
return dummy_tokenizer.DummyTokenizer(None, None)
--- /dev/null
+"""
+Specialised psycopg2 cursor with shortcut functions useful for testing.
+"""
+import psycopg2.extras
+
+class CursorForTesting(psycopg2.extras.DictCursor):
+ """ Extension to the DictCursor class that provides execution
+ short-cuts that simplify writing assertions.
+ """
+
+ def scalar(self, sql, params=None):
+ """ Execute a query with a single return value and return this value.
+ Raises an assertion when not exactly one row is returned.
+ """
+ self.execute(sql, params)
+ assert self.rowcount == 1
+ return self.fetchone()[0]
+
+
+ def row_set(self, sql, params=None):
+ """ Execute a query and return the result as a set of tuples.
+ Fails when the SQL command returns duplicate rows.
+ """
+ self.execute(sql, params)
+
+ result = set((tuple(row) for row in self))
+ assert len(result) == self.rowcount
+
+ return result
+
+
+ def table_exists(self, table):
+ """ Check that a table with the given name exists in the database.
+ """
+ num = self.scalar("""SELECT count(*) FROM pg_tables
+ WHERE tablename = %s""", (table, ))
+ return num == 1
+
+
+ def table_rows(self, table, where=None):
+ """ Return the number of rows in the given table.
+ """
+ if where is None:
+ return self.scalar('SELECT count(*) FROM ' + table)
+
+ return self.scalar('SELECT count(*) FROM {} WHERE {}'.format(table, where))
+
+
+ def execute_values(self, *args, **kwargs):
+ """ Execute the execute_values() function on the cursor.
+ """
+ psycopg2.extras.execute_values(self, *args, **kwargs)
def init_new_db(self, *args, **kwargs):
- assert self.init_state == None
+ assert self.init_state is None
self.init_state = "new"
def init_from_project(self):
- assert self.init_state == None
+ assert self.init_state is None
self.init_state = "loaded"
- def finalize_import(self, _):
+ @staticmethod
+ def finalize_import(_):
pass
def close(self):
pass
- def normalize_postcode(self, postcode):
+ @staticmethod
+ def normalize_postcode(postcode):
return postcode
- def update_postcodes_from_db(self):
+ @staticmethod
+ def update_postcodes_from_db():
pass
def update_special_phrases(self, phrases, should_replace):
def add_country_names(self, code, names):
self.analyser_cache['countries'].append((code, names))
- def process_place(self, place):
+ @staticmethod
+ def process_place(place):
return {}
import psycopg2.extras
+from nominatim.db import properties
+
class MockParamCapture:
""" Mock that records the parameters with which a function was called
as well as the number of calls.
def __init__(self, retval=0):
self.called = 0
self.return_value = retval
+ self.last_args = None
+ self.last_kwargs = None
def __call__(self, *args, **kwargs):
self.called += 1
conn.commit()
- def add_special(self, word_token, word, cls, typ, op):
+ def add_special(self, word_token, word, cls, typ, oper):
with self.conn.cursor() as cur:
cur.execute("""INSERT INTO word (word_token, word, class, type, operator)
VALUES (%s, %s, %s, %s, %s)
- """, (word_token, word, cls, typ, op))
+ """, (word_token, word, cls, typ, oper))
self.conn.commit()
admin_level, address, extratags, 'SRID=4326;' + geom,
country))
self.conn.commit()
+
+
+class MockPropertyTable:
+ """ A property table for testing.
+ """
+ def __init__(self, conn):
+ self.conn = conn
+
+
+ def set(self, name, value):
+ """ Set a property in the table to the given value.
+ """
+ properties.set_property(self.conn, name, value)
correct functionionality. They use a lot of monkeypatching to avoid executing
the actual functions.
"""
-from pathlib import Path
-
import pytest
import nominatim.db.properties
from mocks import MockParamCapture
-SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
-
-def call_nominatim(*args):
- return nominatim.cli.nominatim(module_dir='build/module',
- osm2pgsql_path='build/osm2pgsql/osm2pgsql',
- phplib_dir=str(SRC_DIR / 'lib-php'),
- data_dir=str(SRC_DIR / 'data'),
- phpcgi_path='/usr/bin/php-cgi',
- sqllib_dir=str(SRC_DIR / 'lib-sql'),
- config_dir=str(SRC_DIR / 'settings'),
- cli_args=args)
-
-
@pytest.fixture
def mock_run_legacy(monkeypatch):
mock = MockParamCapture()
return get_mock
-@pytest.fixture
-def tokenizer_mock(monkeypatch):
- class DummyTokenizer:
- def __init__(self, *args, **kwargs):
- self.update_sql_functions_called = False
- self.finalize_import_called = False
- def update_sql_functions(self, *args):
- self.update_sql_functions_called = True
+class TestCli:
+
+ @pytest.fixture(autouse=True)
+ def setup_cli_call(self, cli_call):
+ self.call_nominatim = cli_call
+
+
+ def test_cli_help(self, capsys):
+ """ Running nominatim tool without arguments prints help.
+ """
+ assert self.call_nominatim() == 1
+
+ captured = capsys.readouterr()
+ assert captured.out.startswith('usage:')
+
+
+ @pytest.mark.parametrize("command,script", [
+ (('add-data', '--file', 'foo.osm'), 'update'),
+ (('export',), 'export')
+ ])
+ def test_legacy_commands_simple(self, mock_run_legacy, command, script):
+ assert self.call_nominatim(*command) == 0
+
+ assert mock_run_legacy.called == 1
+ assert mock_run_legacy.last_args[0] == script + '.php'
+
+
+ @pytest.mark.parametrize("params", [('--warm', ),
+ ('--warm', '--reverse-only'),
+ ('--warm', '--search-only')])
+ def test_admin_command_legacy(self, mock_func_factory, params):
+ mock_run_legacy = mock_func_factory(nominatim.clicmd.admin, 'run_legacy_script')
+
+ assert self.call_nominatim('admin', *params) == 0
- def finalize_import(self, *args):
- self.finalize_import_called = True
+ assert mock_run_legacy.called == 1
- tok = DummyTokenizer()
- monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db' ,
- lambda *args: tok)
- monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer' ,
- lambda *args: tok)
- return tok
+ def test_admin_command_check_database(self, mock_func_factory):
+ mock = mock_func_factory(nominatim.tools.check_database, 'check_database')
+ assert self.call_nominatim('admin', '--check-database') == 0
+ assert mock.called == 1
-def test_cli_help(capsys):
- """ Running nominatim tool without arguments prints help.
- """
- assert 1 == call_nominatim()
- captured = capsys.readouterr()
- assert captured.out.startswith('usage:')
+ @pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc'),
+ ('node', 12), ('way', 8), ('relation', 32)])
+ def test_add_data_command(self, mock_run_legacy, name, oid):
+ assert self.call_nominatim('add-data', '--' + name, str(oid)) == 0
+ assert mock_run_legacy.called == 1
+ assert mock_run_legacy.last_args == ('update.php', '--import-' + name, oid)
-@pytest.mark.parametrize("command,script", [
- (('add-data', '--file', 'foo.osm'), 'update'),
- (('export',), 'export')
- ])
-def test_legacy_commands_simple(mock_run_legacy, command, script):
- assert 0 == call_nominatim(*command)
- assert mock_run_legacy.called == 1
- assert mock_run_legacy.last_args[0] == script + '.php'
+ def test_serve_command(self, mock_func_factory):
+ func = mock_func_factory(nominatim.cli, 'run_php_server')
+ self.call_nominatim('serve')
-def test_import_missing_file(temp_db):
- assert 1 == call_nominatim('import', '--osm-file', 'sfsafegweweggdgw.reh.erh')
+ assert func.called == 1
-def test_import_bad_file(temp_db):
- assert 1 == call_nominatim('import', '--osm-file', '.')
+ @pytest.mark.parametrize("params", [('search', '--query', 'new'),
+ ('reverse', '--lat', '0', '--lon', '0'),
+ ('lookup', '--id', 'N1'),
+ ('details', '--node', '1'),
+ ('details', '--way', '1'),
+ ('details', '--relation', '1'),
+ ('details', '--place_id', '10001'),
+ ('status',)])
+ def test_api_commands_simple(self, mock_func_factory, params):
+ mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
+ assert self.call_nominatim(*params) == 0
-def test_import_full(temp_db, mock_func_factory, tokenizer_mock):
- mocks = [
- mock_func_factory(nominatim.tools.database_import, 'setup_database_skeleton'),
- mock_func_factory(nominatim.tools.database_import, 'import_osm_data'),
- mock_func_factory(nominatim.tools.refresh, 'import_wikipedia_articles'),
- mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
- mock_func_factory(nominatim.tools.database_import, 'load_data'),
- mock_func_factory(nominatim.tools.database_import, 'create_tables'),
- mock_func_factory(nominatim.tools.database_import, 'create_table_triggers'),
- mock_func_factory(nominatim.tools.database_import, 'create_partition_tables'),
- mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
- mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
- mock_func_factory(nominatim.tools.refresh, 'load_address_levels_from_file'),
- mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
- mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
- mock_func_factory(nominatim.tools.refresh, 'setup_website'),
- mock_func_factory(nominatim.db.properties, 'set_property')
- ]
+ assert mock_run_api.called == 1
+ assert mock_run_api.last_args[0] == params[0]
- cf_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
- assert 0 == call_nominatim('import', '--osm-file', __file__)
- assert tokenizer_mock.finalize_import_called
- assert cf_mock.called > 1
+class TestCliWithDb:
- for mock in mocks:
- assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
+ @pytest.fixture(autouse=True)
+ def setup_cli_call(self, cli_call, temp_db):
+ self.call_nominatim = cli_call
-def test_import_continue_load_data(temp_db, mock_func_factory, tokenizer_mock):
- mocks = [
- mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
- mock_func_factory(nominatim.tools.database_import, 'load_data'),
- mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
- mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
- mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
- mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
- mock_func_factory(nominatim.tools.refresh, 'setup_website'),
- mock_func_factory(nominatim.db.properties, 'set_property')
- ]
+ @pytest.fixture(autouse=True)
+ def setup_tokenizer_mock(self, monkeypatch):
+ class DummyTokenizer:
+ def __init__(self, *args, **kwargs):
+ self.update_sql_functions_called = False
+ self.finalize_import_called = False
- assert 0 == call_nominatim('import', '--continue', 'load-data')
- assert tokenizer_mock.finalize_import_called
+ def update_sql_functions(self, *args):
+ self.update_sql_functions_called = True
- for mock in mocks:
- assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
+ def finalize_import(self, *args):
+ self.finalize_import_called = True
+ tok = DummyTokenizer()
+ monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
+ lambda *args: tok)
+ monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer',
+ lambda *args: tok)
-def test_import_continue_indexing(temp_db, mock_func_factory, placex_table,
- temp_db_conn, tokenizer_mock):
- mocks = [
- mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
- mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
- mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
- mock_func_factory(nominatim.tools.refresh, 'setup_website'),
- mock_func_factory(nominatim.db.properties, 'set_property')
- ]
+ self.tokenizer_mock = tok
- assert 0 == call_nominatim('import', '--continue', 'indexing')
- for mock in mocks:
- assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
+ def test_import_missing_file(self):
+ assert self.call_nominatim('import', '--osm-file', 'sfsafegwedgw.reh.erh') == 1
- assert temp_db_conn.index_exists('idx_placex_pendingsector')
- # Calling it again still works for the index
- assert 0 == call_nominatim('import', '--continue', 'indexing')
- assert temp_db_conn.index_exists('idx_placex_pendingsector')
+ def test_import_bad_file(self):
+ assert self.call_nominatim('import', '--osm-file', '.') == 1
-def test_import_continue_postprocess(temp_db, mock_func_factory, tokenizer_mock):
- mocks = [
- mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
- mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
- mock_func_factory(nominatim.tools.refresh, 'setup_website'),
- mock_func_factory(nominatim.db.properties, 'set_property')
- ]
+ def test_import_full(self, mock_func_factory):
+ mocks = [
+ mock_func_factory(nominatim.tools.database_import, 'setup_database_skeleton'),
+ mock_func_factory(nominatim.tools.database_import, 'import_osm_data'),
+ mock_func_factory(nominatim.tools.refresh, 'import_wikipedia_articles'),
+ mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
+ mock_func_factory(nominatim.tools.database_import, 'load_data'),
+ mock_func_factory(nominatim.tools.database_import, 'create_tables'),
+ mock_func_factory(nominatim.tools.database_import, 'create_table_triggers'),
+ mock_func_factory(nominatim.tools.database_import, 'create_partition_tables'),
+ mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
+ mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
+ mock_func_factory(nominatim.tools.refresh, 'load_address_levels_from_file'),
+ mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
+ mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
+ mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+ mock_func_factory(nominatim.db.properties, 'set_property')
+ ]
- assert 0 == call_nominatim('import', '--continue', 'db-postprocess')
+ cf_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
- assert tokenizer_mock.finalize_import_called
+ assert self.call_nominatim('import', '--osm-file', __file__) == 0
+ assert self.tokenizer_mock.finalize_import_called
- for mock in mocks:
- assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
+ assert cf_mock.called > 1
+ for mock in mocks:
+ assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
-def test_freeze_command(mock_func_factory, temp_db):
- mock_drop = mock_func_factory(nominatim.tools.freeze, 'drop_update_tables')
- mock_flatnode = mock_func_factory(nominatim.tools.freeze, 'drop_flatnode_file')
- assert 0 == call_nominatim('freeze')
+ def test_import_continue_load_data(self, mock_func_factory):
+ mocks = [
+ mock_func_factory(nominatim.tools.database_import, 'truncate_data_tables'),
+ mock_func_factory(nominatim.tools.database_import, 'load_data'),
+ mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
+ mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
+ mock_func_factory(nominatim.tools.postcodes, 'update_postcodes'),
+ mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
+ mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+ mock_func_factory(nominatim.db.properties, 'set_property')
+ ]
- assert mock_drop.called == 1
- assert mock_flatnode.called == 1
+ assert self.call_nominatim('import', '--continue', 'load-data') == 0
+ assert self.tokenizer_mock.finalize_import_called
+ for mock in mocks:
+ assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
-@pytest.mark.parametrize("params", [('--warm', ),
- ('--warm', '--reverse-only'),
- ('--warm', '--search-only')])
-def test_admin_command_legacy(mock_func_factory, params):
- mock_run_legacy = mock_func_factory(nominatim.clicmd.admin, 'run_legacy_script')
- assert 0 == call_nominatim('admin', *params)
+ def test_import_continue_indexing(self, mock_func_factory, placex_table,
+ temp_db_conn):
+ mocks = [
+ mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
+ mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
+ mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_full'),
+ mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+ mock_func_factory(nominatim.db.properties, 'set_property')
+ ]
- assert mock_run_legacy.called == 1
+ assert self.call_nominatim('import', '--continue', 'indexing') == 0
+ for mock in mocks:
+ assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
-@pytest.mark.parametrize("func, params", [('analyse_indexing', ('--analyse-indexing', ))])
-def test_admin_command_tool(temp_db, mock_func_factory, func, params):
- mock = mock_func_factory(nominatim.tools.admin, func)
+ assert temp_db_conn.index_exists('idx_placex_pendingsector')
- assert 0 == call_nominatim('admin', *params)
- assert mock.called == 1
+ # Calling it again still works for the index
+ assert self.call_nominatim('import', '--continue', 'indexing') == 0
+ assert temp_db_conn.index_exists('idx_placex_pendingsector')
-def test_admin_command_check_database(mock_func_factory):
- mock = mock_func_factory(nominatim.tools.check_database, 'check_database')
+ def test_import_continue_postprocess(self, mock_func_factory):
+ mocks = [
+ mock_func_factory(nominatim.tools.database_import, 'create_search_indices'),
+ mock_func_factory(nominatim.tools.database_import, 'create_country_names'),
+ mock_func_factory(nominatim.tools.refresh, 'setup_website'),
+ mock_func_factory(nominatim.db.properties, 'set_property')
+ ]
- assert 0 == call_nominatim('admin', '--check-database')
- assert mock.called == 1
+ assert self.call_nominatim('import', '--continue', 'db-postprocess') == 0
+ assert self.tokenizer_mock.finalize_import_called
-@pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc'),
- ('node', 12), ('way', 8), ('relation', 32)])
-def test_add_data_command(mock_run_legacy, name, oid):
- assert 0 == call_nominatim('add-data', '--' + name, str(oid))
+ for mock in mocks:
+ assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
- assert mock_run_legacy.called == 1
- assert mock_run_legacy.last_args == ('update.php', '--import-' + name, oid)
+ def test_freeze_command(self, mock_func_factory):
+ mock_drop = mock_func_factory(nominatim.tools.freeze, 'drop_update_tables')
+ mock_flatnode = mock_func_factory(nominatim.tools.freeze, 'drop_flatnode_file')
-@pytest.mark.parametrize("params,do_bnds,do_ranks", [
- ([], 1, 1),
- (['--boundaries-only'], 1, 0),
- (['--no-boundaries'], 0, 1),
- (['--boundaries-only', '--no-boundaries'], 0, 0)])
-def test_index_command(mock_func_factory, temp_db_cursor, tokenizer_mock,
- params, do_bnds, do_ranks):
- temp_db_cursor.execute("CREATE TABLE import_status (indexed bool)")
- bnd_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_boundaries')
- rank_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_by_rank')
+ assert self.call_nominatim('freeze') == 0
- assert 0 == call_nominatim('index', *params)
+ assert mock_drop.called == 1
+ assert mock_flatnode.called == 1
- assert bnd_mock.called == do_bnds
- assert rank_mock.called == do_ranks
-@pytest.mark.parametrize("no_replace", [(True), (False)])
-def test_special_phrases_wiki_command(temp_db, mock_func_factory, tokenizer_mock, no_replace):
- func = mock_func_factory(nominatim.clicmd.special_phrases.SPImporter, 'import_phrases')
- if no_replace:
- call_nominatim('special-phrases', '--import-from-wiki', '--no-replace')
- else:
- call_nominatim('special-phrases', '--import-from-wiki')
+ @pytest.mark.parametrize("func, params", [('analyse_indexing', ('--analyse-indexing', ))])
+ def test_admin_command_tool(self, mock_func_factory, func, params):
+ mock = mock_func_factory(nominatim.tools.admin, func)
- assert func.called == 1
+ assert self.call_nominatim('admin', *params) == 0
+ assert mock.called == 1
-@pytest.mark.parametrize("no_replace", [(True), (False)])
-def test_special_phrases_csv_command(temp_db, mock_func_factory, tokenizer_mock, no_replace):
- func = mock_func_factory(nominatim.clicmd.special_phrases.SPImporter, 'import_phrases')
- testdata = SRC_DIR / 'test' / 'testdb'
- csv_path = str((testdata / 'full_en_phrases_test.csv').resolve())
- if no_replace:
- call_nominatim('special-phrases', '--import-from-csv', csv_path, '--no-replace')
- else:
- call_nominatim('special-phrases', '--import-from-csv', csv_path)
+ @pytest.mark.parametrize("params,do_bnds,do_ranks", [
+ ([], 1, 1),
+ (['--boundaries-only'], 1, 0),
+ (['--no-boundaries'], 0, 1),
+ (['--boundaries-only', '--no-boundaries'], 0, 0)])
+ def test_index_command(self, mock_func_factory, table_factory,
+ params, do_bnds, do_ranks):
+ table_factory('import_status', 'indexed bool')
+ bnd_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_boundaries')
+ rank_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_by_rank')
- assert func.called == 1
+ assert self.call_nominatim('index', *params) == 0
-@pytest.mark.parametrize("command,func", [
- ('word-counts', 'recompute_word_counts'),
- ('address-levels', 'load_address_levels_from_file'),
- ('wiki-data', 'import_wikipedia_articles'),
- ('importance', 'recompute_importance'),
- ('website', 'setup_website'),
- ])
-def test_refresh_command(mock_func_factory, temp_db, command, func, tokenizer_mock):
- func_mock = mock_func_factory(nominatim.tools.refresh, func)
+ assert bnd_mock.called == do_bnds
+ assert rank_mock.called == do_ranks
- assert 0 == call_nominatim('refresh', '--' + command)
- assert func_mock.called == 1
+ @pytest.mark.parametrize("no_replace", [(True), (False)])
+ def test_special_phrases_wiki_command(self, mock_func_factory, no_replace):
+ func = mock_func_factory(nominatim.clicmd.special_phrases.SPImporter, 'import_phrases')
+ if no_replace:
+ self.call_nominatim('special-phrases', '--import-from-wiki', '--no-replace')
+ else:
+ self.call_nominatim('special-phrases', '--import-from-wiki')
-def test_refresh_postcodes(mock_func_factory, temp_db, tokenizer_mock):
- func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes')
- idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes')
+ assert func.called == 1
- assert 0 == call_nominatim('refresh', '--postcodes')
- assert func_mock.called == 1
+ @pytest.mark.parametrize("no_replace", [(True), (False)])
+ def test_special_phrases_csv_command(self, src_dir, mock_func_factory, no_replace):
+ func = mock_func_factory(nominatim.clicmd.special_phrases.SPImporter, 'import_phrases')
+ testdata = src_dir / 'test' / 'testdb'
+ csv_path = str((testdata / 'full_en_phrases_test.csv').resolve())
-def test_refresh_create_functions(mock_func_factory, temp_db, tokenizer_mock):
- func_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
+ if no_replace:
+ self.call_nominatim('special-phrases', '--import-from-csv', csv_path, '--no-replace')
+ else:
+ self.call_nominatim('special-phrases', '--import-from-csv', csv_path)
- assert 0 == call_nominatim('refresh', '--functions')
- assert func_mock.called == 1
- assert tokenizer_mock.update_sql_functions_called
+ assert func.called == 1
+ @pytest.mark.parametrize("command,func", [
+ ('word-counts', 'recompute_word_counts'),
+ ('address-levels', 'load_address_levels_from_file'),
+ ('wiki-data', 'import_wikipedia_articles'),
+ ('importance', 'recompute_importance'),
+ ('website', 'setup_website'),
+ ])
+ def test_refresh_command(self, mock_func_factory, command, func):
+ func_mock = mock_func_factory(nominatim.tools.refresh, func)
-def test_refresh_importance_computed_after_wiki_import(monkeypatch, temp_db, tokenizer_mock):
- calls = []
- monkeypatch.setattr(nominatim.tools.refresh, 'import_wikipedia_articles',
- lambda *args, **kwargs: calls.append('import') or 0)
- monkeypatch.setattr(nominatim.tools.refresh, 'recompute_importance',
- lambda *args, **kwargs: calls.append('update'))
+ assert self.call_nominatim('refresh', '--' + command) == 0
+ assert func_mock.called == 1
- assert 0 == call_nominatim('refresh', '--importance', '--wiki-data')
- assert calls == ['import', 'update']
+ def test_refresh_postcodes(self, mock_func_factory):
+ func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes')
+ idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes')
+ assert self.call_nominatim('refresh', '--postcodes') == 0
+ assert func_mock.called == 1
+ assert idx_mock.called == 1
-def test_serve_command(mock_func_factory):
- func = mock_func_factory(nominatim.cli, 'run_php_server')
+ def test_refresh_create_functions(self, mock_func_factory):
+ func_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions')
- call_nominatim('serve')
+ assert self.call_nominatim('refresh', '--functions') == 0
+ assert func_mock.called == 1
+ assert self.tokenizer_mock.update_sql_functions_called
- assert func.called == 1
-@pytest.mark.parametrize("params", [
- ('search', '--query', 'new'),
- ('reverse', '--lat', '0', '--lon', '0'),
- ('lookup', '--id', 'N1'),
- ('details', '--node', '1'),
- ('details', '--way', '1'),
- ('details', '--relation', '1'),
- ('details', '--place_id', '10001'),
- ('status',)
- ])
-def test_api_commands_simple(mock_func_factory, params):
- mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
+ def test_refresh_importance_computed_after_wiki_import(self, monkeypatch):
+ calls = []
+ monkeypatch.setattr(nominatim.tools.refresh, 'import_wikipedia_articles',
+ lambda *args, **kwargs: calls.append('import') or 0)
+ monkeypatch.setattr(nominatim.tools.refresh, 'recompute_importance',
+ lambda *args, **kwargs: calls.append('update'))
- assert 0 == call_nominatim(*params)
+ assert self.call_nominatim('refresh', '--importance', '--wiki-data') == 0
- assert mock_run_api.called == 1
- assert mock_run_api.last_args[0] == params[0]
+ assert calls == ['import', 'update']
"""
import datetime as dt
import time
-from pathlib import Path
import pytest
from mocks import MockParamCapture
-SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
-
-def call_nominatim(*args):
- return nominatim.cli.nominatim(module_dir='build/module',
- osm2pgsql_path='build/osm2pgsql/osm2pgsql',
- phplib_dir=str(SRC_DIR / 'lib-php'),
- data_dir=str(SRC_DIR / 'data'),
- phpcgi_path='/usr/bin/php-cgi',
- sqllib_dir=str(SRC_DIR / 'lib-sql'),
- config_dir=str(SRC_DIR / 'settings'),
- cli_args=['replication'] + list(args))
-
@pytest.fixture
def tokenizer_mock(monkeypatch):
class DummyTokenizer:
self.finalize_import_called = True
tok = DummyTokenizer()
- monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db' ,
+ monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
lambda *args: tok)
- monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer' ,
+ monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer',
lambda *args: tok)
return tok
-@pytest.fixture
-def index_mock(monkeypatch, tokenizer_mock):
- mock = MockParamCapture()
- monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_boundaries', mock)
- monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_by_rank', mock)
-
- return mock
-
-
@pytest.fixture
def mock_func_factory(monkeypatch):
def get_mock(module, func):
@pytest.fixture
def init_status(temp_db_conn, status_table):
status.set_status(temp_db_conn, date=dt.datetime.now(dt.timezone.utc), seq=1)
- return 1
+
+
+@pytest.fixture
+def index_mock(monkeypatch, tokenizer_mock, init_status):
+ mock = MockParamCapture()
+ monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_boundaries', mock)
+ monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_by_rank', mock)
+
+ return mock
@pytest.fixture
def update_mock(mock_func_factory, init_status, tokenizer_mock):
return mock_func_factory(nominatim.tools.replication, 'update')
-@pytest.mark.parametrize("params,func", [
- (('--init', '--no-update-functions'), 'init_replication'),
- (('--check-for-updates',), 'check_for_updates')
- ])
-def test_replication_command(mock_func_factory, temp_db, params, func):
- func_mock = mock_func_factory(nominatim.tools.replication, func)
- assert 0 == call_nominatim(*params)
- assert func_mock.called == 1
+class TestCliReplication:
+
+ @pytest.fixture(autouse=True)
+ def setup_cli_call(self, cli_call, temp_db):
+ self.call_nominatim = lambda *args: cli_call('replication', *args)
+
+ @pytest.mark.parametrize("params,func", [
+ (('--init', '--no-update-functions'), 'init_replication'),
+ (('--check-for-updates',), 'check_for_updates')
+ ])
+ def test_replication_command(self, mock_func_factory, params, func):
+ func_mock = mock_func_factory(nominatim.tools.replication, func)
+
+ assert self.call_nominatim(*params) == 0
+ assert func_mock.called == 1
-def test_replication_update_bad_interval(monkeypatch, temp_db):
- monkeypatch.setenv('NOMINATIM_REPLICATION_UPDATE_INTERVAL', 'xx')
+ def test_replication_update_bad_interval(self, monkeypatch):
+ monkeypatch.setenv('NOMINATIM_REPLICATION_UPDATE_INTERVAL', 'xx')
- assert call_nominatim() == 1
+ assert self.call_nominatim() == 1
-def test_replication_update_bad_interval_for_geofabrik(monkeypatch, temp_db):
- monkeypatch.setenv('NOMINATIM_REPLICATION_URL',
- 'https://download.geofabrik.de/europe/ireland-and-northern-ireland-updates')
+ def test_replication_update_bad_interval_for_geofabrik(self, monkeypatch):
+ monkeypatch.setenv('NOMINATIM_REPLICATION_URL',
+ 'https://download.geofabrik.de/europe/italy-updates')
- assert call_nominatim() == 1
+ assert self.call_nominatim() == 1
-def test_replication_update_once_no_index(update_mock):
- assert 0 == call_nominatim('--once', '--no-index')
+ def test_replication_update_once_no_index(self, update_mock):
+ assert self.call_nominatim('--once', '--no-index') == 0
- assert str(update_mock.last_args[1]['osm2pgsql']) == 'build/osm2pgsql/osm2pgsql'
+ assert str(update_mock.last_args[1]['osm2pgsql']) == 'OSM2PGSQL NOT AVAILABLE'
-def test_replication_update_custom_osm2pgsql(monkeypatch, update_mock):
- monkeypatch.setenv('NOMINATIM_OSM2PGSQL_BINARY', '/secret/osm2pgsql')
- assert 0 == call_nominatim('--once', '--no-index')
+ def test_replication_update_custom_osm2pgsql(self, monkeypatch, update_mock):
+ monkeypatch.setenv('NOMINATIM_OSM2PGSQL_BINARY', '/secret/osm2pgsql')
+ assert self.call_nominatim('--once', '--no-index') == 0
- assert str(update_mock.last_args[1]['osm2pgsql']) == '/secret/osm2pgsql'
+ assert str(update_mock.last_args[1]['osm2pgsql']) == '/secret/osm2pgsql'
-def test_replication_update_custom_threads(update_mock):
- assert 0 == call_nominatim('--once', '--no-index', '--threads', '4')
+ def test_replication_update_custom_threads(self, update_mock):
+ assert self.call_nominatim('--once', '--no-index', '--threads', '4') == 0
- assert update_mock.last_args[1]['threads'] == 4
+ assert update_mock.last_args[1]['threads'] == 4
-def test_replication_update_continuous(monkeypatch, init_status, index_mock):
- states = [nominatim.tools.replication.UpdateState.UP_TO_DATE,
- nominatim.tools.replication.UpdateState.UP_TO_DATE]
- monkeypatch.setattr(nominatim.tools.replication, 'update',
- lambda *args, **kwargs: states.pop())
+ def test_replication_update_continuous(self, monkeypatch, index_mock):
+ states = [nominatim.tools.replication.UpdateState.UP_TO_DATE,
+ nominatim.tools.replication.UpdateState.UP_TO_DATE]
+ monkeypatch.setattr(nominatim.tools.replication, 'update',
+ lambda *args, **kwargs: states.pop())
- with pytest.raises(IndexError):
- call_nominatim()
+ with pytest.raises(IndexError):
+ self.call_nominatim()
- assert index_mock.called == 4
+ assert index_mock.called == 4
-def test_replication_update_continuous_no_change(monkeypatch, init_status, index_mock):
- states = [nominatim.tools.replication.UpdateState.NO_CHANGES,
- nominatim.tools.replication.UpdateState.UP_TO_DATE]
- monkeypatch.setattr(nominatim.tools.replication, 'update',
- lambda *args, **kwargs: states.pop())
+ def test_replication_update_continuous_no_change(self, monkeypatch, index_mock):
+ states = [nominatim.tools.replication.UpdateState.NO_CHANGES,
+ nominatim.tools.replication.UpdateState.UP_TO_DATE]
+ monkeypatch.setattr(nominatim.tools.replication, 'update',
+ lambda *args, **kwargs: states.pop())
- sleep_mock = MockParamCapture()
- monkeypatch.setattr(time, 'sleep', sleep_mock)
+ sleep_mock = MockParamCapture()
+ monkeypatch.setattr(time, 'sleep', sleep_mock)
- with pytest.raises(IndexError):
- call_nominatim()
+ with pytest.raises(IndexError):
+ self.call_nominatim()
- assert index_mock.called == 2
- assert sleep_mock.called == 1
- assert sleep_mock.last_args[0] == 60
+ assert index_mock.called == 2
+ assert sleep_mock.called == 1
+ assert sleep_mock.last_args[0] == 60
"""
Test for loading dotenv configuration.
"""
-from pathlib import Path
-
import pytest
from nominatim.config import Configuration
from nominatim.errors import UsageError
-DEFCFG_DIR = Path(__file__) / '..' / '..' / '..' / 'settings'
+@pytest.fixture
+def make_config(src_dir):
+ """ Create a configuration object from the given project directory.
+ """
+ def _mk_config(project_dir=None):
+ return Configuration(project_dir, src_dir / 'settings')
+
+ return _mk_config
-def test_no_project_dir():
- config = Configuration(None, DEFCFG_DIR)
+
+def test_no_project_dir(make_config):
+ config = make_config()
assert config.DATABASE_WEBUSER == 'www-data'
@pytest.mark.parametrize("val", ('apache', '"apache"'))
-def test_prefer_project_setting_over_default(val, tmp_path):
+def test_prefer_project_setting_over_default(make_config, val, tmp_path):
envfile = tmp_path / '.env'
envfile.write_text('NOMINATIM_DATABASE_WEBUSER={}\n'.format(val))
- config = Configuration(Path(tmp_path), DEFCFG_DIR)
+ config = make_config(tmp_path)
assert config.DATABASE_WEBUSER == 'apache'
-def test_prefer_os_environ_over_project_setting(monkeypatch, tmp_path):
+def test_prefer_os_environ_over_project_setting(make_config, monkeypatch, tmp_path):
envfile = tmp_path / '.env'
envfile.write_text('NOMINATIM_DATABASE_WEBUSER=apache\n')
monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', 'nobody')
- config = Configuration(Path(tmp_path), DEFCFG_DIR)
+ config = make_config(tmp_path)
assert config.DATABASE_WEBUSER == 'nobody'
-def test_get_os_env_add_defaults(monkeypatch):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_os_env_add_defaults(make_config, monkeypatch):
+ config = make_config()
monkeypatch.delenv('NOMINATIM_DATABASE_WEBUSER', raising=False)
assert config.get_os_env()['NOMINATIM_DATABASE_WEBUSER'] == 'www-data'
-def test_get_os_env_prefer_os_environ(monkeypatch):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_os_env_prefer_os_environ(make_config, monkeypatch):
+ config = make_config()
monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', 'nobody')
assert config.get_os_env()['NOMINATIM_DATABASE_WEBUSER'] == 'nobody'
-def test_get_libpq_dsn_convert_default():
- config = Configuration(None, DEFCFG_DIR)
+def test_get_libpq_dsn_convert_default(make_config):
+ config = make_config()
assert config.get_libpq_dsn() == 'dbname=nominatim'
-def test_get_libpq_dsn_convert_php(monkeypatch):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_libpq_dsn_convert_php(make_config, monkeypatch):
+ config = make_config()
monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
'pgsql:dbname=gis;password=foo;host=localhost')
@pytest.mark.parametrize("val,expect", [('foo bar', "'foo bar'"),
("xy'z", "xy\\'z"),
])
-def test_get_libpq_dsn_convert_php_special_chars(monkeypatch, val, expect):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_libpq_dsn_convert_php_special_chars(make_config, monkeypatch, val, expect):
+ config = make_config()
monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
'pgsql:dbname=gis;password={}'.format(val))
assert config.get_libpq_dsn() == "dbname=gis password={}".format(expect)
-def test_get_libpq_dsn_convert_libpq(monkeypatch):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_libpq_dsn_convert_libpq(make_config, monkeypatch):
+ config = make_config()
- monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
+ monkeypatch.setenv('NOMINATIM_DATABASE_DSN',
'host=localhost dbname=gis password=foo')
assert config.get_libpq_dsn() == 'host=localhost dbname=gis password=foo'
@pytest.mark.parametrize("value,result",
[(x, True) for x in ('1', 'true', 'True', 'yes', 'YES')] +
[(x, False) for x in ('0', 'false', 'no', 'NO', 'x')])
-def test_get_bool(monkeypatch, value, result):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_bool(make_config, monkeypatch, value, result):
+ config = make_config()
monkeypatch.setenv('NOMINATIM_FOOBAR', value)
assert config.get_bool('FOOBAR') == result
-def test_get_bool_empty():
- config = Configuration(None, DEFCFG_DIR)
+def test_get_bool_empty(make_config):
+ config = make_config()
assert config.DATABASE_MODULE_PATH == ''
- assert config.get_bool('DATABASE_MODULE_PATH') == False
+ assert not config.get_bool('DATABASE_MODULE_PATH')
@pytest.mark.parametrize("value,result", [('0', 0), ('1', 1),
('85762513444', 85762513444)])
-def test_get_int_success(monkeypatch, value, result):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_int_success(make_config, monkeypatch, value, result):
+ config = make_config()
monkeypatch.setenv('NOMINATIM_FOOBAR', value)
@pytest.mark.parametrize("value", ['1b', 'fg', '0x23'])
-def test_get_int_bad_values(monkeypatch, value):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_int_bad_values(make_config, monkeypatch, value):
+ config = make_config()
monkeypatch.setenv('NOMINATIM_FOOBAR', value)
config.get_int('FOOBAR')
-def test_get_int_empty():
- config = Configuration(None, DEFCFG_DIR)
+def test_get_int_empty(make_config):
+ config = make_config()
assert config.DATABASE_MODULE_PATH == ''
config.get_int('DATABASE_MODULE_PATH')
-def test_get_import_style_intern(monkeypatch):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_import_style_intern(make_config, src_dir, monkeypatch):
+ config = make_config()
monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', 'street')
- expected = DEFCFG_DIR / 'import-street.style'
+ expected = src_dir / 'settings' / 'import-street.style'
assert config.get_import_style_file() == expected
@pytest.mark.parametrize("value", ['custom', '/foo/bar.stye'])
-def test_get_import_style_intern(monkeypatch, value):
- config = Configuration(None, DEFCFG_DIR)
+def test_get_import_style_extern(make_config, monkeypatch, value):
+ config = make_config()
monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', value)
import pytest
import psycopg2
-from psycopg2.extras import wait_select
from nominatim.db.async_connection import DBConnection, DeadlockHandler
@pytest.fixture
def conn(temp_db):
- with closing(DBConnection('dbname=' + temp_db)) as c:
- yield c
+ with closing(DBConnection('dbname=' + temp_db)) as connection:
+ yield connection
@pytest.fixture
future.result()
assert len(deadlock_check) == 1
-
-
from nominatim.db.connection import connect, get_pg_env
@pytest.fixture
-def db(temp_db):
- with connect('dbname=' + temp_db) as conn:
+def db(dsn):
+ with connect(dsn) as conn:
yield conn
def test_connection_table_exists(db, table_factory):
- assert db.table_exists('foobar') == False
+ assert not db.table_exists('foobar')
table_factory('foobar')
- assert db.table_exists('foobar') == True
+ assert db.table_exists('foobar')
-def test_connection_index_exists(db, temp_db_cursor):
- assert db.index_exists('some_index') == False
+def test_connection_index_exists(db, table_factory, temp_db_cursor):
+ assert not db.index_exists('some_index')
- temp_db_cursor.execute('CREATE TABLE foobar (id INT)')
+ table_factory('foobar')
temp_db_cursor.execute('CREATE INDEX some_index ON foobar(id)')
- assert db.index_exists('some_index') == True
- assert db.index_exists('some_index', table='foobar') == True
- assert db.index_exists('some_index', table='bar') == False
+ assert db.index_exists('some_index')
+ assert db.index_exists('some_index', table='foobar')
+ assert not db.index_exists('some_index', table='bar')
def test_drop_table_existing(db, table_factory):
assert ver[0] > 8
-def test_connection_postgis_version_tuple(db, temp_db_cursor):
- temp_db_cursor.execute('CREATE EXTENSION postgis')
-
+def test_connection_postgis_version_tuple(db, temp_db_with_extensions):
ver = db.postgis_version_tuple()
assert isinstance(ver, tuple)
from nominatim.db import properties
@pytest.fixture
-def prop_table(table_factory):
- table_factory('nominatim_properties', 'property TEXT, value TEXT')
+def property_factory(property_table, temp_db_cursor):
+ """ A function fixture that adds a property into the property table.
+ """
+ def _add_property(name, value):
+ temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES(%s, %s)",
+ (name, value))
+ return _add_property
-def test_get_property_existing(prop_table, temp_db_conn, temp_db_cursor):
- temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES('foo', 'bar')")
+
+def test_get_property_existing(property_factory, temp_db_conn):
+ property_factory('foo', 'bar')
assert properties.get_property(temp_db_conn, 'foo') == 'bar'
-def test_get_property_unknown(prop_table, temp_db_conn, temp_db_cursor):
- temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES('other', 'bar')")
+def test_get_property_unknown(property_factory, temp_db_conn):
+ property_factory('other', 'bar')
assert properties.get_property(temp_db_conn, 'foo') is None
@pytest.mark.parametrize("prefill", (True, False))
-def test_set_property_new(prop_table, temp_db_conn, temp_db_cursor, prefill):
+def test_set_property_new(property_factory, temp_db_conn, temp_db_cursor, prefill):
if prefill:
- temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES('something', 'bar')")
+ property_factory('something', 'bar')
properties.set_property(temp_db_conn, 'something', 'else')
"""
Tests for SQL preprocessing.
"""
-from pathlib import Path
-
import pytest
@pytest.fixture
import nominatim.db.status
from nominatim.errors import UsageError
-def test_compute_database_date_place_empty(status_table, place_table, temp_db_conn):
- with pytest.raises(UsageError):
- nominatim.db.status.compute_database_date(temp_db_conn)
-
OSM_NODE_DATA = """\
<osm version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/">
<node id="45673" visible="true" version="1" changeset="2047" timestamp="2006-01-27T22:09:10Z" user="Foo" uid="111" lat="48.7586670" lon="8.1343060">
.replace(tzinfo=dt.timezone.utc)
-def test_compute_database_date_valid(monkeypatch, status_table, place_row, temp_db_conn):
+@pytest.fixture(autouse=True)
+def setup_status_table(status_table):
+ pass
+
+
+def test_compute_database_date_place_empty(place_table, temp_db_conn):
+ with pytest.raises(UsageError):
+ nominatim.db.status.compute_database_date(temp_db_conn)
+
+
+def test_compute_database_date_valid(monkeypatch, place_row, temp_db_conn):
place_row(osm_type='N', osm_id=45673)
requested_url = []
assert date == iso_date('2006-01-27T22:09:10')
-def test_compute_database_broken_api(monkeypatch, status_table, place_row, temp_db_conn):
+def test_compute_database_broken_api(monkeypatch, place_row, temp_db_conn):
place_row(osm_type='N', osm_id=45673)
requested_url = []
monkeypatch.setattr(nominatim.db.status, "get_url", mock_url)
with pytest.raises(UsageError):
- date = nominatim.db.status.compute_database_date(temp_db_conn)
+ nominatim.db.status.compute_database_date(temp_db_conn)
-def test_set_status_empty_table(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_empty_table(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date)
- temp_db_cursor.execute("SELECT * FROM import_status")
+ assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
+ {(date, None, True)}
- assert temp_db_cursor.rowcount == 1
- assert temp_db_cursor.fetchone() == [date, None, True]
-
-def test_set_status_filled_table(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_filled_table(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date)
- assert 1 == temp_db_cursor.scalar("SELECT count(*) FROM import_status")
+ assert temp_db_cursor.table_rows('import_status') == 1
date = dt.datetime.fromordinal(1000100).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date, seq=456, indexed=False)
- temp_db_cursor.execute("SELECT * FROM import_status")
-
- assert temp_db_cursor.rowcount == 1
- assert temp_db_cursor.fetchone() == [date, 456, False]
+ assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
+ {(date, 456, False)}
-def test_set_status_missing_date(status_table, temp_db_conn, temp_db_cursor):
+def test_set_status_missing_date(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date)
- assert 1 == temp_db_cursor.scalar("SELECT count(*) FROM import_status")
+ assert temp_db_cursor.table_rows('import_status') == 1
nominatim.db.status.set_status(temp_db_conn, date=None, seq=456, indexed=False)
- temp_db_cursor.execute("SELECT * FROM import_status")
+ assert temp_db_cursor.row_set("SELECT * FROM import_status") == \
+ {(date, 456, False)}
- assert temp_db_cursor.rowcount == 1
- assert temp_db_cursor.fetchone() == [date, 456, False]
-
-def test_get_status_empty_table(status_table, temp_db_conn):
+def test_get_status_empty_table(temp_db_conn):
assert nominatim.db.status.get_status(temp_db_conn) == (None, None, None)
-def test_get_status_success(status_table, temp_db_conn):
+def test_get_status_success(temp_db_conn):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date, seq=667, indexed=False)
@pytest.mark.parametrize("old_state", [True, False])
@pytest.mark.parametrize("new_state", [True, False])
-def test_set_indexed(status_table, temp_db_conn, temp_db_cursor, old_state, new_state):
+def test_set_indexed(temp_db_conn, temp_db_cursor, old_state, new_state):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
nominatim.db.status.set_status(temp_db_conn, date=date, indexed=old_state)
nominatim.db.status.set_indexed(temp_db_conn, new_state)
assert temp_db_cursor.scalar("SELECT indexed FROM import_status") == new_state
-def test_set_indexed_empty_status(status_table, temp_db_conn, temp_db_cursor):
+def test_set_indexed_empty_status(temp_db_conn, temp_db_cursor):
nominatim.db.status.set_indexed(temp_db_conn, True)
- assert temp_db_cursor.scalar("SELECT count(*) FROM import_status") == 0
+ assert temp_db_cursor.table_rows("import_status") == 0
-def text_log_status(status_table, temp_db_conn):
+def test_log_status(temp_db_conn, temp_db_cursor):
date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc)
start = dt.datetime.now() - dt.timedelta(hours=1)
+
nominatim.db.status.set_status(temp_db_conn, date=date, seq=56)
nominatim.db.status.log_status(temp_db_conn, start, 'index')
- assert temp_db_cursor.scalar("SELECT count(*) FROM import_osmosis_log") == 1
- assert temp_db_cursor.scalar("SELECT seq FROM import_osmosis_log") == 56
- assert temp_db_cursor.scalar("SELECT date FROM import_osmosis_log") == date
+ temp_db_conn.commit()
+
+ assert temp_db_cursor.table_rows("import_osmosis_log") == 1
+ assert temp_db_cursor.scalar("SELECT batchseq FROM import_osmosis_log") == 56
+ assert temp_db_cursor.scalar("SELECT event FROM import_osmosis_log") == 'index'
"""
Tests for DB utility functions in db.utils
"""
-import psycopg2
import pytest
import nominatim.db.utils as db_utils
db_utils.execute_file(dsn, tmpfile)
- temp_db_cursor.execute('SELECT * FROM test')
-
- assert temp_db_cursor.rowcount == 1
- assert temp_db_cursor.fetchone()[0] == 56
+ assert temp_db_cursor.row_set('SELECT * FROM test') == {(56, )}
def test_execute_file_bad_file(dsn, tmp_path):
with pytest.raises(FileNotFoundError):
db_utils.execute_file(dsn, tmpfile, pre_code='CREATE TABLE test (id INT)')
- temp_db_cursor.execute('SELECT * FROM test')
-
- assert temp_db_cursor.rowcount == 1
- assert temp_db_cursor.fetchone()[0] == 4
+ assert temp_db_cursor.row_set('SELECT * FROM test') == {(4, )}
def test_execute_file_with_post_code(dsn, tmp_path, temp_db_cursor):
db_utils.execute_file(dsn, tmpfile, post_code='INSERT INTO test VALUES(23)')
- temp_db_cursor.execute('SELECT * FROM test')
-
- assert temp_db_cursor.rowcount == 1
- assert temp_db_cursor.fetchone()[0] == 23
+ assert temp_db_cursor.row_set('SELECT * FROM test') == {(23, )}
Tests for running the indexing.
"""
import itertools
-import psycopg2
import pytest
from nominatim.indexer import indexer
END;
$$ LANGUAGE plpgsql STABLE;
""")
- cur.execute("""CREATE OR REPLACE FUNCTION get_interpolation_address(in_address HSTORE, wayid BIGINT)
+ cur.execute("""CREATE OR REPLACE FUNCTION
+ get_interpolation_address(in_address HSTORE, wayid BIGINT)
RETURNS HSTORE AS $$
BEGIN
RETURN in_address;
return self.scalar('SELECT count(*) from placex where indexed_status > 0')
def osmline_unindexed(self):
- return self.scalar('SELECT count(*) from location_property_osmline where indexed_status > 0')
+ return self.scalar("""SELECT count(*) from location_property_osmline
+ WHERE indexed_status > 0""")
@pytest.fixture
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
- assert 31 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 31
+ assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_by_rank(0, 30)
- assert 0 == test_db.placex_unindexed()
- assert 0 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 0
+ assert test_db.osmline_unindexed() == 0
- assert 0 == test_db.scalar("""SELECT count(*) from placex
- WHERE indexed_status = 0 and indexed_date is null""")
+ assert test_db.scalar("""SELECT count(*) from placex
+ WHERE indexed_status = 0 and indexed_date is null""") == 0
# ranks come in order of rank address
- assert 0 == test_db.scalar("""
+ assert test_db.scalar("""
SELECT count(*) FROM placex p WHERE rank_address > 0
AND indexed_date >= (SELECT min(indexed_date) FROM placex o
- WHERE p.rank_address < o.rank_address)""")
+ WHERE p.rank_address < o.rank_address)""") == 0
# placex rank < 30 objects come before interpolations
- assert 0 == test_db.scalar(
+ assert test_db.scalar(
"""SELECT count(*) FROM placex WHERE rank_address < 30
- AND indexed_date > (SELECT min(indexed_date) FROM location_property_osmline)""")
+ AND indexed_date >
+ (SELECT min(indexed_date) FROM location_property_osmline)""") == 0
# placex rank = 30 objects come after interpolations
- assert 0 == test_db.scalar(
+ assert test_db.scalar(
"""SELECT count(*) FROM placex WHERE rank_address = 30
- AND indexed_date < (SELECT max(indexed_date) FROM location_property_osmline)""")
+ AND indexed_date <
+ (SELECT max(indexed_date) FROM location_property_osmline)""") == 0
# rank 0 comes after rank 29 and before rank 30
- assert 0 == test_db.scalar(
+ assert test_db.scalar(
"""SELECT count(*) FROM placex WHERE rank_address < 30
- AND indexed_date > (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""")
- assert 0 == test_db.scalar(
+ AND indexed_date >
+ (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""") == 0
+ assert test_db.scalar(
"""SELECT count(*) FROM placex WHERE rank_address = 30
- AND indexed_date < (SELECT max(indexed_date) FROM placex WHERE rank_address = 0)""")
+ AND indexed_date <
+ (SELECT max(indexed_date) FROM placex WHERE rank_address = 0)""") == 0
@pytest.mark.parametrize("threads", [1, 15])
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
- assert 31 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 31
+ assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest',
test_tokenizer, threads)
idx.index_by_rank(4, 15)
- assert 19 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 19
+ assert test_db.osmline_unindexed() == 1
- assert 0 == test_db.scalar("""
+ assert test_db.scalar("""
SELECT count(*) FROM placex
- WHERE indexed_status = 0 AND not rank_address between 4 and 15""")
+ WHERE indexed_status = 0 AND not rank_address between 4 and 15""") == 0
@pytest.mark.parametrize("threads", [1, 15])
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
- assert 31 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 31
+ assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_by_rank(28, 30)
- assert 27 == test_db.placex_unindexed()
- assert 0 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 27
+ assert test_db.osmline_unindexed() == 0
- assert 0 == test_db.scalar("""
+ assert test_db.scalar("""
SELECT count(*) FROM placex
- WHERE indexed_status = 0 AND rank_address between 1 and 27""")
+ WHERE indexed_status = 0 AND rank_address between 1 and 27""") == 0
@pytest.mark.parametrize("threads", [1, 15])
def test_index_boundaries(test_db, threads, test_tokenizer):
test_db.add_place(rank_address=rank, rank_search=rank)
test_db.add_osmline()
- assert 37 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 37
+ assert test_db.osmline_unindexed() == 1
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_boundaries(0, 30)
- assert 31 == test_db.placex_unindexed()
- assert 1 == test_db.osmline_unindexed()
+ assert test_db.placex_unindexed() == 31
+ assert test_db.osmline_unindexed() == 1
- assert 0 == test_db.scalar("""
+ assert test_db.scalar("""
SELECT count(*) FROM placex
- WHERE indexed_status = 0 AND class != 'boundary'""")
+ WHERE indexed_status = 0 AND class != 'boundary'""") == 0
@pytest.mark.parametrize("threads", [1, 15])
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_postcodes()
- assert 0 == test_db.scalar("""SELECT count(*) FROM location_postcode
- WHERE indexed_status != 0""")
+ assert test_db.scalar("""SELECT count(*) FROM location_postcode
+ WHERE indexed_status != 0""") == 0
@pytest.mark.parametrize("analyse", [True, False])
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, 4)
idx.index_full(analyse=analyse)
- assert 0 == test_db.placex_unindexed()
- assert 0 == test_db.osmline_unindexed()
- assert 0 == test_db.scalar("""SELECT count(*) FROM location_postcode
- WHERE indexed_status != 0""")
+ assert test_db.placex_unindexed() == 0
+ assert test_db.osmline_unindexed() == 0
+ assert test_db.scalar("""SELECT count(*) FROM location_postcode
+ WHERE indexed_status != 0""") == 0
@pytest.mark.parametrize("threads", [1, 15])
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
idx.index_by_rank(28, 30)
- assert 0 == test_db.placex_unindexed()
+ assert test_db.placex_unindexed() == 0
"""
Tests for creating new tokenizers.
"""
-import importlib
import pytest
from nominatim.db import properties
from dummy_tokenizer import DummyTokenizer
@pytest.fixture
-def test_config(def_config, tmp_path):
+def test_config(def_config, tmp_path, property_table, tokenizer_mock):
def_config.project_dir = tmp_path
return def_config
-def test_setup_dummy_tokenizer(temp_db_conn, test_config,
- tokenizer_mock, property_table):
+def test_setup_dummy_tokenizer(temp_db_conn, test_config):
tokenizer = factory.create_tokenizer(test_config)
assert isinstance(tokenizer, DummyTokenizer)
assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
-def test_setup_tokenizer_dir_exists(test_config, tokenizer_mock, property_table):
+def test_setup_tokenizer_dir_exists(test_config):
(test_config.project_dir / 'tokenizer').mkdir()
tokenizer = factory.create_tokenizer(test_config)
assert tokenizer.init_state == "new"
-def test_setup_tokenizer_dir_failure(test_config, tokenizer_mock, property_table):
+def test_setup_tokenizer_dir_failure(test_config):
(test_config.project_dir / 'tokenizer').write_text("foo")
with pytest.raises(UsageError):
factory.create_tokenizer(test_config)
-def test_setup_bad_tokenizer_name(test_config, monkeypatch):
+def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch):
+ def_config.project_dir = tmp_path
monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
with pytest.raises(UsageError):
- factory.create_tokenizer(test_config)
+ factory.create_tokenizer(def_config)
-def test_load_tokenizer(temp_db_conn, test_config,
- tokenizer_mock, property_table):
+
+def test_load_tokenizer(test_config):
factory.create_tokenizer(test_config)
tokenizer = factory.get_tokenizer_for_db(test_config)
assert tokenizer.init_state == "loaded"
-def test_load_no_tokenizer_dir(test_config, tokenizer_mock, property_table):
+def test_load_no_tokenizer_dir(test_config):
factory.create_tokenizer(test_config)
test_config.project_dir = test_config.project_dir / 'foo'
factory.get_tokenizer_for_db(test_config)
-def test_load_missing_propoerty(temp_db_cursor, test_config, tokenizer_mock, property_table):
+def test_load_missing_propoerty(temp_db_cursor, test_config):
factory.create_tokenizer(test_config)
temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
with pytest.raises(UsageError):
factory.get_tokenizer_for_db(test_config)
-
@pytest.fixture
def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor):
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.init_new_db(test_config)
RETURNS INTEGER AS $$ SELECT 342; $$ LANGUAGE SQL;
""")
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();')
tok = tokenizer_factory()
tok.init_new_db(test_config)
$$ LANGUAGE SQL""")
-@pytest.fixture
-def create_housenumbers(temp_db_cursor):
- temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers(
- housenumbers TEXT[],
- OUT tokens TEXT, OUT normtext TEXT)
- AS $$
- SELECT housenumbers::TEXT, array_to_string(housenumbers, ';')
- $$ LANGUAGE SQL""")
-
-
@pytest.fixture
def make_keywords(temp_db_cursor, temp_db_with_extensions):
temp_db_cursor.execute(
def test_init_new(tokenizer_factory, test_config, monkeypatch,
temp_db_conn, sql_preprocessor):
monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', 'xxvv')
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.init_new_db(test_config)
assert outfile.stat().st_mode == 33261
-def test_init_module_load_failed(tokenizer_factory, test_config,
- monkeypatch, temp_db_conn):
+def test_init_module_load_failed(tokenizer_factory, test_config):
tok = tokenizer_factory()
with pytest.raises(UsageError):
(module_dir/ 'nominatim.so').write_text('CUSTOM nomiantim.so')
monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', str(module_dir))
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.init_new_db(test_config)
tokenizer_factory, test_config, table_factory,
monkeypatch, temp_db_cursor):
monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133')
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.init_new_db(test_config)
monkeypatch.undo()
def test_migrate_database(tokenizer_factory, test_config, temp_db_conn, monkeypatch):
- monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
+ monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None)
tok = tokenizer_factory()
tok.migrate_database(test_config)
(' strasse', 'strasse', 'highway', 'primary', 'in')))
-def test_update_special_phrase_delete_all(analyzer, word_table, temp_db_cursor,
- make_standard_name):
+def test_update_special_phrase_delete_all(analyzer, word_table, make_standard_name):
word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in')
word_table.add_special(' bar', 'bar', 'highway', 'road', None)
assert word_table.count_special() == 0
-def test_update_special_phrases_no_replace(analyzer, word_table, temp_db_cursor,
- make_standard_name):
- temp_db_cursor.execute("""INSERT INTO word (word_token, word, class, type, operator)
- VALUES (' foo', 'foo', 'amenity', 'prison', 'in'),
- (' bar', 'bar', 'highway', 'road', null)""")
+def test_update_special_phrases_no_replace(analyzer, word_table, make_standard_name):
+ word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in')
+ word_table.add_special(' bar', 'bar', 'highway', 'road', None)
- assert 2 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+ assert word_table.count_special() == 2
analyzer.update_special_phrases([], False)
- assert 2 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+ assert word_table.count_special() == 2
def test_update_special_phrase_modify(analyzer, word_table, make_standard_name):
assert word_table.count_special() == 2
analyzer.update_special_phrases([
- ('prison', 'amenity', 'prison', 'in'),
- ('bar', 'highway', 'road', '-'),
- ('garden', 'leisure', 'garden', 'near')
+ ('prison', 'amenity', 'prison', 'in'),
+ ('bar', 'highway', 'road', '-'),
+ ('garden', 'leisure', 'garden', 'near')
], True)
assert word_table.get_special() \
def test_process_place_names(analyzer, make_keywords):
-
info = analyzer.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
assert info['names'] == '{1,2,3}'
-@pytest.mark.parametrize('pc', ['12345', 'AB 123', '34-345'])
-def test_process_place_postcode(analyzer, create_postcode_id, word_table, pc):
- info = analyzer.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345'])
+def test_process_place_postcode(analyzer, create_postcode_id, word_table, pcode):
+ analyzer.process_place({'address': {'postcode' : pcode}})
- assert word_table.get_postcodes() == {pc, }
+ assert word_table.get_postcodes() == {pcode, }
-@pytest.mark.parametrize('pc', ['12:23', 'ab;cd;f', '123;836'])
-def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pc):
- info = analyzer.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836'])
+def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pcode):
+ analyzer.process_place({'address': {'postcode' : pcode}})
assert not word_table.get_postcodes()
-@pytest.mark.parametrize('hnr', ['123a', '1', '101'])
-def test_process_place_housenumbers_simple(analyzer, create_housenumbers, hnr):
- info = analyzer.process_place({'address': {'housenumber' : hnr}})
+class TestHousenumberName:
+
+ @staticmethod
+ @pytest.fixture(autouse=True)
+ def setup_create_housenumbers(temp_db_cursor):
+ temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers(
+ housenumbers TEXT[],
+ OUT tokens TEXT, OUT normtext TEXT)
+ AS $$
+ SELECT housenumbers::TEXT, array_to_string(housenumbers, ';')
+ $$ LANGUAGE SQL""")
+
+
+ @staticmethod
+ @pytest.mark.parametrize('hnr', ['123a', '1', '101'])
+ def test_process_place_housenumbers_simple(analyzer, hnr):
+ info = analyzer.process_place({'address': {'housenumber' : hnr}})
- assert info['hnr'] == hnr
- assert info['hnr_tokens'].startswith("{")
+ assert info['hnr'] == hnr
+ assert info['hnr_tokens'].startswith("{")
-def test_process_place_housenumbers_lists(analyzer, create_housenumbers):
- info = analyzer.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
+ @staticmethod
+ def test_process_place_housenumbers_lists(analyzer):
+ info = analyzer.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
- assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
+ assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
-def test_process_place_housenumbers_duplicates(analyzer, create_housenumbers):
- info = analyzer.process_place({'address': {'housenumber' : '134',
- 'conscriptionnumber' : '134',
- 'streetnumber' : '99a'}})
+ @staticmethod
+ def test_process_place_housenumbers_duplicates(analyzer):
+ info = analyzer.process_place({'address': {'housenumber' : '134',
+ 'conscriptionnumber' : '134',
+ 'streetnumber' : '99a'}})
- assert set(info['hnr'].split(';')) == set(('134', '99a'))
+ assert set(info['hnr'].split(';')) == set(('134', '99a'))
return _get_db_property
@pytest.fixture
-def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor):
+def tokenizer_setup(tokenizer_factory, test_config):
tok = tokenizer_factory()
tok.init_new_db(test_config)
@pytest.fixture
-def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor,
- word_table, temp_db_with_extensions, tmp_path):
+def analyzer(tokenizer_factory, test_config, monkeypatch,
+ temp_db_with_extensions, tmp_path):
sql = tmp_path / 'sql' / 'tokenizer' / 'legacy_icu_tokenizer.sql'
sql.write_text("SELECT 'a';")
@pytest.fixture
def getorcreate_term_id(temp_db_cursor):
temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_term_id(lookup_term TEXT)
- RETURNS INTEGER AS $$ SELECT nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
+ RETURNS INTEGER AS $$
+ SELECT nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
@pytest.fixture
def getorcreate_hnr_id(temp_db_cursor):
temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_hnr_id(lookup_term TEXT)
- RETURNS INTEGER AS $$ SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
+ RETURNS INTEGER AS $$
+ SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""")
-def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop,
- sql_preprocessor, place_table, word_table):
+def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop):
monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();')
tok = tokenizer_factory()
assert tok.abbreviations is not None
-def test_update_sql_functions(temp_db_conn, db_prop, temp_db_cursor,
+def test_update_sql_functions(db_prop, temp_db_cursor,
tokenizer_factory, test_config, table_factory,
- monkeypatch,
- sql_preprocessor, place_table, word_table):
+ monkeypatch):
monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133')
tok = tokenizer_factory()
tok.init_new_db(test_config)
def test_make_standard_word(analyzer):
- with analyzer(abbr=(('STREET', 'ST'), ('tiny', 't'))) as a:
- assert a.make_standard_word('tiny street') == 'TINY ST'
+ with analyzer(abbr=(('STREET', 'ST'), ('tiny', 't'))) as anl:
+ assert anl.make_standard_word('tiny street') == 'TINY ST'
- with analyzer(abbr=(('STRASSE', 'STR'), ('STR', 'ST'))) as a:
- assert a.make_standard_word('Hauptstrasse') == 'HAUPTST'
+ with analyzer(abbr=(('STRASSE', 'STR'), ('STR', 'ST'))) as anl:
+ assert anl.make_standard_word('Hauptstrasse') == 'HAUPTST'
def test_make_standard_hnr(analyzer):
- with analyzer(abbr=(('IV', '4'),)) as a:
- assert a._make_standard_hnr('345') == '345'
- assert a._make_standard_hnr('iv') == 'IV'
+ with analyzer(abbr=(('IV', '4'),)) as anl:
+ assert anl._make_standard_hnr('345') == '345'
+ assert anl._make_standard_hnr('iv') == 'IV'
def test_update_postcodes_from_db_empty(analyzer, table_factory, word_table):
table_factory('location_postcode', 'postcode TEXT',
content=(('1234',), ('12 34',), ('AB23',), ('1234',)))
- with analyzer() as a:
- a.update_postcodes_from_db()
+ with analyzer() as anl:
+ anl.update_postcodes_from_db()
assert word_table.count() == 3
assert word_table.get_postcodes() == {'1234', '12 34', 'AB23'}
word_table.add_postcode(' 1234', '1234')
word_table.add_postcode(' 5678', '5678')
- with analyzer() as a:
- a.update_postcodes_from_db()
+ with analyzer() as anl:
+ anl.update_postcodes_from_db()
assert word_table.count() == 3
assert word_table.get_postcodes() == {'1234', '45BC', 'XX45'}
-def test_update_special_phrase_empty_table(analyzer, word_table, temp_db_cursor):
- with analyzer() as a:
- a.update_special_phrases([
+def test_update_special_phrase_empty_table(analyzer, word_table):
+ with analyzer() as anl:
+ anl.update_special_phrases([
("König bei", "amenity", "royal", "near"),
("Könige", "amenity", "royal", "-"),
("street", "highway", "primary", "in")
], True)
- assert temp_db_cursor.row_set("""SELECT word_token, word, class, type, operator
- FROM word WHERE class != 'place'""") \
- == set(((' KÖNIG BEI', 'könig bei', 'amenity', 'royal', 'near'),
- (' KÖNIGE', 'könige', 'amenity', 'royal', None),
- (' ST', 'street', 'highway', 'primary', 'in')))
+ assert word_table.get_special() \
+ == {(' KÖNIG BEI', 'könig bei', 'amenity', 'royal', 'near'),
+ (' KÖNIGE', 'könige', 'amenity', 'royal', None),
+ (' ST', 'street', 'highway', 'primary', 'in')}
-def test_update_special_phrase_delete_all(analyzer, word_table, temp_db_cursor):
- temp_db_cursor.execute("""INSERT INTO word (word_token, word, class, type, operator)
- VALUES (' FOO', 'foo', 'amenity', 'prison', 'in'),
- (' BAR', 'bar', 'highway', 'road', null)""")
+def test_update_special_phrase_delete_all(analyzer, word_table):
+ word_table.add_special(' FOO', 'foo', 'amenity', 'prison', 'in')
+ word_table.add_special(' BAR', 'bar', 'highway', 'road', None)
- assert 2 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+ assert word_table.count_special() == 2
- with analyzer() as a:
- a.update_special_phrases([], True)
+ with analyzer() as anl:
+ anl.update_special_phrases([], True)
- assert 0 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+ assert word_table.count_special() == 0
-def test_update_special_phrases_no_replace(analyzer, word_table, temp_db_cursor,):
- temp_db_cursor.execute("""INSERT INTO word (word_token, word, class, type, operator)
- VALUES (' FOO', 'foo', 'amenity', 'prison', 'in'),
- (' BAR', 'bar', 'highway', 'road', null)""")
+def test_update_special_phrases_no_replace(analyzer, word_table):
+ word_table.add_special(' FOO', 'foo', 'amenity', 'prison', 'in')
+ word_table.add_special(' BAR', 'bar', 'highway', 'road', None)
- assert 2 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+ assert word_table.count_special() == 2
- with analyzer() as a:
- a.update_special_phrases([], False)
+ with analyzer() as anl:
+ anl.update_special_phrases([], False)
- assert 2 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+ assert word_table.count_special() == 2
-def test_update_special_phrase_modify(analyzer, word_table, temp_db_cursor):
- temp_db_cursor.execute("""INSERT INTO word (word_token, word, class, type, operator)
- VALUES (' FOO', 'foo', 'amenity', 'prison', 'in'),
- (' BAR', 'bar', 'highway', 'road', null)""")
+def test_update_special_phrase_modify(analyzer, word_table):
+ word_table.add_special(' FOO', 'foo', 'amenity', 'prison', 'in')
+ word_table.add_special(' BAR', 'bar', 'highway', 'road', None)
- assert 2 == temp_db_cursor.scalar("SELECT count(*) FROM word WHERE class != 'place'""")
+ assert word_table.count_special() == 2
- with analyzer() as a:
- a.update_special_phrases([
- ('prison', 'amenity', 'prison', 'in'),
- ('bar', 'highway', 'road', '-'),
- ('garden', 'leisure', 'garden', 'near')
+ with analyzer() as anl:
+ anl.update_special_phrases([
+ ('prison', 'amenity', 'prison', 'in'),
+ ('bar', 'highway', 'road', '-'),
+ ('garden', 'leisure', 'garden', 'near')
], True)
- assert temp_db_cursor.row_set("""SELECT word_token, word, class, type, operator
- FROM word WHERE class != 'place'""") \
- == set(((' PRISON', 'prison', 'amenity', 'prison', 'in'),
- (' BAR', 'bar', 'highway', 'road', None),
- (' GARDEN', 'garden', 'leisure', 'garden', 'near')))
+ assert word_table.get_special() \
+ == {(' PRISON', 'prison', 'amenity', 'prison', 'in'),
+ (' BAR', 'bar', 'highway', 'road', None),
+ (' GARDEN', 'garden', 'leisure', 'garden', 'near')}
def test_process_place_names(analyzer, getorcreate_term_id):
- with analyzer() as a:
- info = a.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
+ with analyzer() as anl:
+ info = anl.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}})
assert info['names'] == '{1,2,3,4,5,6}'
-@pytest.mark.parametrize('pc', ['12345', 'AB 123', '34-345'])
-def test_process_place_postcode(analyzer, word_table, pc):
- with analyzer() as a:
- info = a.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345'])
+def test_process_place_postcode(analyzer, word_table, pcode):
+ with analyzer() as anl:
+ anl.process_place({'address': {'postcode' : pcode}})
- assert word_table.get_postcodes() == {pc, }
+ assert word_table.get_postcodes() == {pcode, }
-@pytest.mark.parametrize('pc', ['12:23', 'ab;cd;f', '123;836'])
-def test_process_place_bad_postcode(analyzer, word_table, pc):
- with analyzer() as a:
- info = a.process_place({'address': {'postcode' : pc}})
+@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836'])
+def test_process_place_bad_postcode(analyzer, word_table, pcode):
+ with analyzer() as anl:
+ anl.process_place({'address': {'postcode' : pcode}})
assert not word_table.get_postcodes()
@pytest.mark.parametrize('hnr', ['123a', '1', '101'])
def test_process_place_housenumbers_simple(analyzer, hnr, getorcreate_hnr_id):
- with analyzer() as a:
- info = a.process_place({'address': {'housenumber' : hnr}})
+ with analyzer() as anl:
+ info = anl.process_place({'address': {'housenumber' : hnr}})
assert info['hnr'] == hnr.upper()
assert info['hnr_tokens'] == "{-1}"
def test_process_place_housenumbers_lists(analyzer, getorcreate_hnr_id):
- with analyzer() as a:
- info = a.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
+ with analyzer() as anl:
+ info = anl.process_place({'address': {'conscriptionnumber' : '1; 2;3'}})
assert set(info['hnr'].split(';')) == set(('1', '2', '3'))
assert info['hnr_tokens'] == "{-1,-2,-3}"
def test_process_place_housenumbers_duplicates(analyzer, getorcreate_hnr_id):
- with analyzer() as a:
- info = a.process_place({'address': {'housenumber' : '134',
- 'conscriptionnumber' : '134',
- 'streetnumber' : '99a'}})
+ with analyzer() as anl:
+ info = anl.process_place({'address': {'housenumber' : '134',
+ 'conscriptionnumber' : '134',
+ 'streetnumber' : '99a'}})
assert set(info['hnr'].split(';')) == set(('134', '99A'))
assert info['hnr_tokens'] == "{-1,-2}"
"""
import pytest
-from nominatim.db.connection import connect
from nominatim.errors import UsageError
from nominatim.tools import admin
-@pytest.fixture
-def db(temp_db, placex_table):
- with connect('dbname=' + temp_db) as conn:
- yield conn
+@pytest.fixture(autouse=True)
+def create_placex_table(placex_table):
+ """ All tests in this module require the placex table to be set up.
+ """
-def test_analyse_indexing_no_objects(db):
+
+def test_analyse_indexing_no_objects(temp_db_conn):
with pytest.raises(UsageError):
- admin.analyse_indexing(db)
+ admin.analyse_indexing(temp_db_conn)
@pytest.mark.parametrize("oid", ['1234', 'N123a', 'X123'])
-def test_analyse_indexing_bad_osmid(db, oid):
+def test_analyse_indexing_bad_osmid(temp_db_conn, oid):
with pytest.raises(UsageError):
- admin.analyse_indexing(db, osm_id=oid)
+ admin.analyse_indexing(temp_db_conn, osm_id=oid)
-def test_analyse_indexing_unknown_osmid(db):
+def test_analyse_indexing_unknown_osmid(temp_db_conn):
with pytest.raises(UsageError):
- admin.analyse_indexing(db, osm_id='W12345674')
+ admin.analyse_indexing(temp_db_conn, osm_id='W12345674')
-def test_analyse_indexing_with_place_id(db, temp_db_cursor):
+def test_analyse_indexing_with_place_id(temp_db_conn, temp_db_cursor):
temp_db_cursor.execute("INSERT INTO placex (place_id) VALUES(12345)")
- admin.analyse_indexing(db, place_id=12345)
+ admin.analyse_indexing(temp_db_conn, place_id=12345)
-def test_analyse_indexing_with_osm_id(db, temp_db_cursor):
+def test_analyse_indexing_with_osm_id(temp_db_conn, temp_db_cursor):
temp_db_cursor.execute("""INSERT INTO placex (place_id, osm_type, osm_id)
VALUES(9988, 'N', 10000)""")
- admin.analyse_indexing(db, osm_id='N10000')
+ admin.analyse_indexing(temp_db_conn, osm_id='N10000')
def test_check_database_unknown_db(def_config, monkeypatch):
monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'pgsql:dbname=fjgkhughwgh2423gsags')
- assert 1 == chkdb.check_database(def_config)
+ assert chkdb.check_database(def_config) == 1
def test_check_database_fatal_test(def_config, temp_db):
- assert 1 == chkdb.check_database(def_config)
+ assert chkdb.check_database(def_config) == 1
def test_check_conection_good(temp_db_conn, def_config):
assert chkdb.check_connection(badconn, def_config) == chkdb.CheckState.FATAL
-def test_check_placex_table_good(temp_db_cursor, temp_db_conn, def_config):
- temp_db_cursor.execute('CREATE TABLE placex (place_id int)')
+def test_check_placex_table_good(table_factory, temp_db_conn, def_config):
+ table_factory('placex')
assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.OK
assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.FATAL
-def test_check_placex_table_size_good(temp_db_cursor, temp_db_conn, def_config):
- temp_db_cursor.execute('CREATE TABLE placex (place_id int)')
- temp_db_cursor.execute('INSERT INTO placex VALUES (1), (2)')
+def test_check_placex_table_size_good(table_factory, temp_db_conn, def_config):
+ table_factory('placex', content=((1, ), (2, )))
assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.OK
-def test_check_placex_table_size_bad(temp_db_cursor, temp_db_conn, def_config):
- temp_db_cursor.execute('CREATE TABLE placex (place_id int)')
+def test_check_placex_table_size_bad(table_factory, temp_db_conn, def_config):
+ table_factory('placex')
assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.FATAL
@pytest.mark.parametrize("check_result,state", [(None, chkdb.CheckState.OK),
("Something wrong", chkdb.CheckState.FAIL)])
-def test_check_tokenizer(tokenizer_mock, temp_db_conn, def_config, monkeypatch,
+def test_check_tokenizer(temp_db_conn, def_config, monkeypatch,
check_result, state):
class _TestTokenizer:
- def check_database(self):
+ @staticmethod
+ def check_database():
return check_result
monkeypatch.setattr(chkdb.tokenizer_factory, 'get_tokenizer_for_db',
- lambda *a, **k: _TestTokenizer())
+ lambda *a, **k: _TestTokenizer())
assert chkdb.check_tokenizer(temp_db_conn, def_config) == state
-def test_check_indexing_good(temp_db_cursor, temp_db_conn, def_config):
- temp_db_cursor.execute('CREATE TABLE placex (place_id int, indexed_status smallint)')
- temp_db_cursor.execute('INSERT INTO placex VALUES (1, 0), (2, 0)')
+def test_check_indexing_good(table_factory, temp_db_conn, def_config):
+ table_factory('placex', 'place_id int, indexed_status smallint',
+ content=((1, 0), (2, 0)))
assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.OK
-def test_check_indexing_bad(temp_db_cursor, temp_db_conn, def_config):
- temp_db_cursor.execute('CREATE TABLE placex (place_id int, indexed_status smallint)')
- temp_db_cursor.execute('INSERT INTO placex VALUES (1, 0), (2, 2)')
+def test_check_indexing_bad(table_factory, temp_db_conn, def_config):
+ table_factory('placex', 'place_id int, indexed_status smallint',
+ content=((1, 0), (2, 2)))
assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.FAIL
def test_check_tiger_table_disabled(temp_db_conn, def_config, monkeypatch):
- monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA' , 'no')
+ monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'no')
assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.NOT_APPLICABLE
def test_check_tiger_table_enabled(temp_db_cursor, temp_db_conn, def_config, monkeypatch):
- monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA' , 'yes')
+ monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'yes')
assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.FAIL
temp_db_cursor.execute('CREATE TABLE location_property_tiger (place_id int)')
temp_db_cursor.execute('INSERT INTO location_property_tiger VALUES (1), (2)')
assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.OK
-
"""
Tests for functions to import a new database.
"""
+from pathlib import Path
+
import pytest
import psycopg2
-import sys
-from pathlib import Path
from nominatim.tools import database_import
from nominatim.errors import UsageError
try:
with conn.cursor() as cur:
cur.execute("SELECT distinct partition FROM country_name")
- partitions = set([r[0] for r in list(cur)])
+ partitions = set((r[0] for r in list(cur)))
if no_partitions:
- assert partitions == set([0])
+ assert partitions == set((0, ))
else:
assert len(partitions) > 10
finally:
database_import.create_db('dbname=' + nonexistant_db, rouser='sdfwkjkjgdugu2;jgsafkljas;')
-def test_setup_extensions(temp_db_conn, temp_db_cursor):
+def test_setup_extensions(temp_db_conn, table_factory):
database_import.setup_extensions(temp_db_conn)
- temp_db_cursor.execute('CREATE TABLE t (h HSTORE, geom GEOMETRY(Geometry, 4326))')
+ # Use table creation to check that hstore and geometry types are available.
+ table_factory('t', 'h HSTORE, geom GEOMETRY(Geometry, 4326)')
def test_setup_extensions_old_postgis(temp_db_conn, monkeypatch):
database_import.setup_extensions(temp_db_conn)
-def test_import_base_data(src_dir, temp_db, temp_db_cursor):
- temp_db_cursor.execute('CREATE EXTENSION hstore')
- temp_db_cursor.execute('CREATE EXTENSION postgis')
- database_import.import_base_data('dbname=' + temp_db, src_dir / 'data')
+def test_import_base_data(dsn, src_dir, temp_db_with_extensions, temp_db_cursor):
+ database_import.import_base_data(dsn, src_dir / 'data')
- assert temp_db_cursor.scalar('SELECT count(*) FROM country_name') > 0
+ assert temp_db_cursor.table_rows('country_name') > 0
-def test_import_base_data_ignore_partitions(src_dir, temp_db, temp_db_cursor):
- temp_db_cursor.execute('CREATE EXTENSION hstore')
- temp_db_cursor.execute('CREATE EXTENSION postgis')
- database_import.import_base_data('dbname=' + temp_db, src_dir / 'data',
- ignore_partitions=True)
+def test_import_base_data_ignore_partitions(dsn, src_dir, temp_db_with_extensions,
+ temp_db_cursor):
+ database_import.import_base_data(dsn, src_dir / 'data', ignore_partitions=True)
- assert temp_db_cursor.scalar('SELECT count(*) FROM country_name') > 0
- assert temp_db_cursor.scalar('SELECT count(*) FROM country_name WHERE partition != 0') == 0
+ assert temp_db_cursor.table_rows('country_name') > 0
+ assert temp_db_cursor.table_rows('country_name', where='partition != 0') == 0
-def test_import_osm_data_simple(temp_db_cursor,osm2pgsql_options):
- temp_db_cursor.execute('CREATE TABLE place (id INT)')
- temp_db_cursor.execute('INSERT INTO place values (1)')
+def test_import_osm_data_simple(table_factory, osm2pgsql_options):
+ table_factory('place', content=((1, ), ))
database_import.import_osm_data('file.pdf', osm2pgsql_options)
-def test_import_osm_data_simple_no_data(temp_db_cursor,osm2pgsql_options):
- temp_db_cursor.execute('CREATE TABLE place (id INT)')
+def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options):
+ table_factory('place')
with pytest.raises(UsageError, match='No data.*'):
database_import.import_osm_data('file.pdf', osm2pgsql_options)
-def test_import_osm_data_drop(temp_db_conn, temp_db_cursor, tmp_path, osm2pgsql_options):
- temp_db_cursor.execute('CREATE TABLE place (id INT)')
- temp_db_cursor.execute('CREATE TABLE planet_osm_nodes (id INT)')
- temp_db_cursor.execute('INSERT INTO place values (1)')
+def test_import_osm_data_drop(table_factory, temp_db_conn, tmp_path, osm2pgsql_options):
+ table_factory('place', content=((1, ), ))
+ table_factory('planet_osm_nodes')
flatfile = tmp_path / 'flatfile'
flatfile.write_text('touch')
assert not temp_db_conn.table_exists('planet_osm_nodes')
-def test_import_osm_data_default_cache(temp_db_cursor,osm2pgsql_options):
- temp_db_cursor.execute('CREATE TABLE place (id INT)')
- temp_db_cursor.execute('INSERT INTO place values (1)')
+def test_import_osm_data_default_cache(table_factory, osm2pgsql_options):
+ table_factory('place', content=((1, ), ))
osm2pgsql_options['osm2pgsql_cache'] = 0
@pytest.mark.parametrize("threads", (1, 5))
-def test_load_data(dsn, src_dir, place_row, placex_table, osmline_table,
+def test_load_data(dsn, place_row, placex_table, osmline_table,
word_table, temp_db_cursor, threads):
for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'):
temp_db_cursor.execute("""CREATE FUNCTION {} (src TEXT)
"""
from pathlib import Path
import subprocess
-import tempfile
import pytest
import nominatim.tools.exec_utils as exec_utils
-@pytest.fixture
-def nominatim_env(tmp_phplib_dir, def_config):
- class _NominatimEnv:
- config = def_config
- phplib_dir = tmp_phplib_dir
- data_dir = Path('data')
- project_dir = Path('.')
- sqllib_dir = Path('lib-sql')
- config_dir = Path('settings')
- module_dir = 'module'
- osm2pgsql_path = 'osm2pgsql'
-
- return _NominatimEnv
-
-@pytest.fixture
-def test_script(nominatim_env):
- def _create_file(code):
- with (nominatim_env.phplib_dir / 'admin' / 't.php').open(mode='w') as fd:
- fd.write('<?php\n')
- fd.write(code + '\n')
+class TestRunLegacyScript:
- return 't.php'
+ @pytest.fixture(autouse=True)
+ def setup_nominatim_env(self, tmp_path, def_config):
+ tmp_phplib_dir = tmp_path / 'phplib'
+ tmp_phplib_dir.mkdir()
+ (tmp_phplib_dir / 'admin').mkdir()
+
+ class _NominatimEnv:
+ config = def_config
+ phplib_dir = tmp_phplib_dir
+ data_dir = Path('data')
+ project_dir = Path('.')
+ sqllib_dir = Path('lib-sql')
+ config_dir = Path('settings')
+ module_dir = 'module'
+ osm2pgsql_path = 'osm2pgsql'
- return _create_file
+ self.testenv = _NominatimEnv
-@pytest.fixture(params=[0, 1, 15, 255])
-def return_code(request):
- return request.param
-### run_legacy_script
+ def mk_script(self, code):
+ codefile = self.testenv.phplib_dir / 'admin' / 't.php'
+ codefile.write_text('<?php\n' + code + '\n')
-def test_run_legacy_return_exit_code(nominatim_env, test_script, return_code):
- fname = test_script('exit({});'.format(return_code))
- assert return_code == exec_utils.run_legacy_script(fname,
- nominatim_env=nominatim_env)
+ return 't.php'
-def test_run_legacy_return_throw_on_fail(nominatim_env, test_script):
- fname = test_script('exit(11);')
- with pytest.raises(subprocess.CalledProcessError):
- exec_utils.run_legacy_script(fname, nominatim_env=nominatim_env,
- throw_on_fail=True)
+ @pytest.mark.parametrize("return_code", (0, 1, 15, 255))
+ def test_run_legacy_return_exit_code(self, return_code):
+ fname = self.mk_script('exit({});'.format(return_code))
+ assert return_code == \
+ exec_utils.run_legacy_script(fname, nominatim_env=self.testenv)
-def test_run_legacy_return_dont_throw_on_success(nominatim_env, test_script):
- fname = test_script('exit(0);')
- assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=nominatim_env,
- throw_on_fail=True)
+ def test_run_legacy_return_throw_on_fail(self):
+ fname = self.mk_script('exit(11);')
+ with pytest.raises(subprocess.CalledProcessError):
+ exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
+ throw_on_fail=True)
-def test_run_legacy_use_given_module_path(nominatim_env, test_script):
- fname = test_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == '' ? 0 : 23);")
- assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=nominatim_env)
+ def test_run_legacy_return_dont_throw_on_success(self):
+ fname = self.mk_script('exit(0);')
+ assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
+ throw_on_fail=True) == 0
+ def test_run_legacy_use_given_module_path(self):
+ fname = self.mk_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == '' ? 0 : 23);")
-def test_run_legacy_do_not_overwrite_module_path(nominatim_env, test_script, monkeypatch):
- monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', 'other')
- fname = test_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);")
+ assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
- assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=nominatim_env)
-### run_api_script
+ def test_run_legacy_do_not_overwrite_module_path(self, monkeypatch):
+ monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', 'other')
+ fname = self.mk_script(
+ "exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);")
-@pytest.fixture
-def tmp_project_dir():
- with tempfile.TemporaryDirectory() as tempd:
- project_dir = Path(tempd)
- webdir = project_dir / 'website'
- webdir.mkdir()
+ assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
- with (webdir / 'test.php').open(mode='w') as fd:
- fd.write("<?php\necho 'OK\n';")
- yield project_dir
+class TestRunApiScript:
+
+ @staticmethod
+ @pytest.fixture(autouse=True)
+ def setup_project_dir(tmp_path):
+ webdir = tmp_path / 'website'
+ webdir.mkdir()
+ (webdir / 'test.php').write_text("<?php\necho 'OK\n';")
+
-def test_run_api(tmp_project_dir):
- assert 0 == exec_utils.run_api_script('test', tmp_project_dir)
+ @staticmethod
+ def test_run_api(tmp_path):
+ assert exec_utils.run_api_script('test', tmp_path) == 0
-def test_run_api_execution_error(tmp_project_dir):
- assert 0 != exec_utils.run_api_script('badname', tmp_project_dir)
+ @staticmethod
+ def test_run_api_execution_error(tmp_path):
+ assert exec_utils.run_api_script('badname', tmp_path) != 0
-def test_run_api_with_extra_env(tmp_project_dir):
- extra_env = dict(SCRIPT_FILENAME=str(tmp_project_dir / 'website' / 'test.php'))
- assert 0 == exec_utils.run_api_script('badname', tmp_project_dir,
- extra_env=extra_env)
+ @staticmethod
+ def test_run_api_with_extra_env(tmp_path):
+ extra_env = dict(SCRIPT_FILENAME=str(tmp_path / 'website' / 'test.php'))
+ assert exec_utils.run_api_script('badname', tmp_path, extra_env=extra_env) == 0
### run_osm2pgsql
"""
Tests for freeze functions (removing unused database parts).
"""
-import pytest
-
from nominatim.tools import freeze
NOMINATIM_RUNTIME_TABLES = [
'wikipedia_article', 'wikipedia_redirect'
]
-def test_drop_tables(temp_db_conn, temp_db_cursor):
+def test_drop_tables(temp_db_conn, temp_db_cursor, table_factory):
for table in NOMINATIM_RUNTIME_TABLES + NOMINATIM_DROP_TABLES:
- temp_db_cursor.execute('CREATE TABLE {} (id int)'.format(table))
+ table_factory(table)
freeze.drop_update_tables(temp_db_conn)
Tests for import special phrases methods
of the class SPImporter.
"""
-from nominatim.errors import UsageError
-from pathlib import Path
-import tempfile
from shutil import copyfile
import pytest
from nominatim.tools.special_phrases.sp_importer import SPImporter
from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
-from nominatim.tools.special_phrases.sp_csv_loader import SPCsvLoader
from nominatim.tools.special_phrases.special_phrase import SpecialPhrase
+from nominatim.errors import UsageError
+
+from cursor import CursorForTesting
+
+@pytest.fixture
+def testfile_dir(src_dir):
+ return src_dir / 'test' / 'testfiles'
+
+
+@pytest.fixture
+def sp_importer(temp_db_conn, def_config, temp_phplib_dir_with_migration):
+ """
+ Return an instance of SPImporter.
+ """
+ loader = SPWikiLoader(def_config, ['en'])
+ return SPImporter(def_config, temp_phplib_dir_with_migration, temp_db_conn, loader)
+
+
+@pytest.fixture
+def temp_phplib_dir_with_migration(src_dir, tmp_path):
+ """
+ Return temporary phpdir with migration subdirectory and
+ PhraseSettingsToJson.php script inside.
+ """
+ migration_file = (src_dir / 'lib-php' / 'migration' / 'PhraseSettingsToJson.php').resolve()
-TEST_BASE_DIR = Path(__file__) / '..' / '..'
+ phpdir = tmp_path / 'tempphp'
+ phpdir.mkdir()
-def test_fetch_existing_place_classtype_tables(sp_importer, temp_db_cursor):
+ (phpdir / 'migration').mkdir()
+ migration_dest_path = (phpdir / 'migration' / 'PhraseSettingsToJson.php').resolve()
+ copyfile(str(migration_file), str(migration_dest_path))
+
+ return phpdir
+
+
+@pytest.fixture
+def xml_wiki_content(src_dir):
+ """
+ return the content of the static xml test file.
+ """
+ xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt'
+ return xml_test_content.read_text()
+
+
+@pytest.fixture
+def default_phrases(table_factory):
+ table_factory('place_classtype_testclasstypetable_to_delete')
+ table_factory('place_classtype_testclasstypetable_to_keep')
+
+
+def test_fetch_existing_place_classtype_tables(sp_importer, table_factory):
"""
Check for the fetch_existing_place_classtype_tables() method.
It should return the table just created.
"""
- temp_db_cursor.execute('CREATE TABLE place_classtype_testclasstypetable()')
+ table_factory('place_classtype_testclasstypetable')
sp_importer._fetch_existing_place_classtype_tables()
contained_table = sp_importer.table_phrases_to_delete.pop()
assert isinstance(black_list, dict) and isinstance(white_list, dict)
-def test_convert_php_settings(sp_importer):
+def test_convert_php_settings(sp_importer, testfile_dir, tmp_path):
"""
Test that _convert_php_settings_if_needed() convert the given
php file to a json file.
"""
- php_file = (TEST_BASE_DIR / 'testfiles' / 'phrase_settings.php').resolve()
+ php_file = (testfile_dir / 'phrase_settings.php').resolve()
- with tempfile.TemporaryDirectory() as temp_dir:
- temp_settings = (Path(temp_dir) / 'phrase_settings.php').resolve()
- copyfile(php_file, temp_settings)
- sp_importer._convert_php_settings_if_needed(temp_settings)
+ temp_settings = (tmp_path / 'phrase_settings.php').resolve()
+ copyfile(php_file, temp_settings)
+ sp_importer._convert_php_settings_if_needed(temp_settings)
- assert (Path(temp_dir) / 'phrase_settings.json').is_file()
+ assert (tmp_path / 'phrase_settings.json').is_file()
def test_convert_settings_wrong_file(sp_importer):
"""
with pytest.raises(UsageError, match='random_file is not a valid file.'):
sp_importer._convert_php_settings_if_needed('random_file')
-def test_convert_settings_json_already_exist(sp_importer):
+def test_convert_settings_json_already_exist(sp_importer, testfile_dir):
"""
Test that if we give to '_convert_php_settings_if_needed' a php file path
and that a the corresponding json file already exists, it is returned.
"""
- php_file = (TEST_BASE_DIR / 'testfiles' / 'phrase_settings.php').resolve()
- json_file = (TEST_BASE_DIR / 'testfiles' / 'phrase_settings.json').resolve()
+ php_file = (testfile_dir / 'phrase_settings.php').resolve()
+ json_file = (testfile_dir / 'phrase_settings.json').resolve()
returned = sp_importer._convert_php_settings_if_needed(php_file)
assert returned == json_file
-def test_convert_settings_giving_json(sp_importer):
+def test_convert_settings_giving_json(sp_importer, testfile_dir):
"""
Test that if we give to '_convert_php_settings_if_needed' a json file path
the same path is directly returned
"""
- json_file = (TEST_BASE_DIR / 'testfiles' / 'phrase_settings.json').resolve()
+ json_file = (testfile_dir / 'phrase_settings.json').resolve()
returned = sp_importer._convert_php_settings_if_needed(json_file)
assert returned == json_file
-def test_create_place_classtype_indexes(temp_db_conn, sp_importer):
+def test_create_place_classtype_indexes(temp_db_with_extensions, temp_db_conn,
+ table_factory, sp_importer):
"""
Test that _create_place_classtype_indexes() create the
place_id index and centroid index on the right place_class_type table.
phrase_type = 'type'
table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
- with temp_db_conn.cursor() as temp_db_cursor:
- temp_db_cursor.execute("CREATE EXTENSION postgis;")
- temp_db_cursor.execute('CREATE TABLE {}(place_id BIGINT, centroid GEOMETRY)'.format(table_name))
+ table_factory(table_name, 'place_id BIGINT, centroid GEOMETRY')
sp_importer._create_place_classtype_indexes('', phrase_class, phrase_type)
assert check_table_exist(temp_db_conn, phrase_class, phrase_type)
-def test_grant_access_to_web_user(temp_db_conn, def_config, sp_importer):
+def test_grant_access_to_web_user(temp_db_conn, table_factory, def_config, sp_importer):
"""
Test that _grant_access_to_webuser() give
right access to the web user.
phrase_type = 'type'
table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
- with temp_db_conn.cursor() as temp_db_cursor:
- temp_db_cursor.execute('CREATE TABLE {}()'.format(table_name))
+ table_factory(table_name)
sp_importer._grant_access_to_webuser(phrase_class, phrase_type)
assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, pair[0], pair[1])
def test_remove_non_existent_tables_from_db(sp_importer, default_phrases,
- temp_db_conn):
+ temp_db_conn):
"""
Check for the remove_non_existent_phrases_from_db() method.
place_classtype tables contained in table_phrases_to_delete should
be deleted.
"""
- with temp_db_conn.cursor() as temp_db_cursor:
- sp_importer.table_phrases_to_delete = {
- 'place_classtype_testclasstypetable_to_delete'
- }
+ sp_importer.table_phrases_to_delete = {
+ 'place_classtype_testclasstypetable_to_delete'
+ }
+
+ query_tables = """
+ SELECT table_name
+ FROM information_schema.tables
+ WHERE table_schema='public'
+ AND table_name like 'place_classtype_%';
+ """
- query_tables = """
- SELECT table_name
- FROM information_schema.tables
- WHERE table_schema='public'
- AND table_name like 'place_classtype_%';
- """
+ sp_importer._remove_non_existent_tables_from_db()
- sp_importer._remove_non_existent_tables_from_db()
+ # Changes are not committed yet. Use temp_db_conn for checking results.
+ with temp_db_conn.cursor(cursor_factory=CursorForTesting) as cur:
+ assert cur.row_set(query_tables) \
+ == {('place_classtype_testclasstypetable_to_keep', )}
- temp_db_cursor.execute(query_tables)
- tables_result = temp_db_cursor.fetchall()
- assert (len(tables_result) == 1 and
- tables_result[0][0] == 'place_classtype_testclasstypetable_to_keep'
- )
@pytest.mark.parametrize("should_replace", [(True), (False)])
def test_import_phrases(monkeypatch, temp_db_conn, def_config, sp_importer,
- placex_table, tokenizer_mock, should_replace):
+ placex_table, table_factory, tokenizer_mock,
+ xml_wiki_content, should_replace):
"""
Check that the main import_phrases() method is well executed.
It should create the place_classtype table, the place_id and centroid indexes,
"""
#Add some data to the database before execution in order to test
#what is deleted and what is preserved.
- with temp_db_conn.cursor() as temp_db_cursor:
- temp_db_cursor.execute("""
- CREATE TABLE place_classtype_amenity_animal_shelter();
- CREATE TABLE place_classtype_wrongclass_wrongtype();""")
-
+ table_factory('place_classtype_amenity_animal_shelter')
+ table_factory('place_classtype_wrongclass_wrongtype')
+
monkeypatch.setattr('nominatim.tools.special_phrases.sp_wiki_loader.SPWikiLoader._get_wiki_content',
- mock_get_wiki_content)
+ lambda self, lang: xml_wiki_content)
tokenizer = tokenizer_mock()
sp_importer.import_phrases(tokenizer, should_replace)
if should_replace:
assert not check_table_exist(temp_db_conn, 'wrong_class', 'wrong_type')
- #Format (query, should_return_something_bool) use to easily execute all asserts
- queries_tests = set()
-
- #Used to check that correct place_classtype table already in the datase before is still there.
- query_existing_table = """
- SELECT table_name
- FROM information_schema.tables
- WHERE table_schema='public'
- AND table_name = 'place_classtype_amenity_animal_shelter';
- """
- queries_tests.add((query_existing_table, True))
-
- #Used to check that wrong place_classtype table was deleted from the database.
- query_wrong_table = """
- SELECT table_name
- FROM information_schema.tables
- WHERE table_schema='public'
- AND table_name = 'place_classtype_wrongclass_wrongtype';
- """
+ assert temp_db_conn.table_exists('place_classtype_amenity_animal_shelter')
if should_replace:
- queries_tests.add((query_wrong_table, False))
-
- with temp_db_conn.cursor() as temp_db_cursor:
- for query in queries_tests:
- temp_db_cursor.execute(query[0])
- if (query[1] == True):
- assert temp_db_cursor.fetchone()
- else:
- assert not temp_db_cursor.fetchone()
-
-def mock_get_wiki_content(self, lang):
- """
- Mock the _get_wiki_content() method to return
- static xml test file content.
- """
- return get_test_xml_wiki_content()
-
-def get_test_xml_wiki_content():
- """
- return the content of the static xml test file.
- """
- xml_test_content_path = (TEST_BASE_DIR / 'testdata' / 'special_phrases_test_content.txt').resolve()
- with open(xml_test_content_path) as xml_content_reader:
- return xml_content_reader.read()
+ assert not temp_db_conn.table_exists('place_classtype_wrongclass_wrongtype')
def check_table_exist(temp_db_conn, phrase_class, phrase_type):
"""
Verify that the place_classtype table exists for the given
phrase_class and phrase_type.
"""
- table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
+ return temp_db_conn.table_exists('place_classtype_{}_{}'.format(phrase_class, phrase_type))
- with temp_db_conn.cursor() as temp_db_cursor:
- temp_db_cursor.execute("""
- SELECT *
- FROM information_schema.tables
- WHERE table_type='BASE TABLE'
- AND table_name='{}'""".format(table_name))
- return temp_db_cursor.fetchone()
def check_grant_access(temp_db_conn, user, phrase_class, phrase_type):
"""
and
temp_db_conn.index_exists(index_prefix + 'place_id')
)
-
-@pytest.fixture
-def sp_importer(temp_db_conn, def_config, temp_phplib_dir_with_migration):
- """
- Return an instance of SPImporter.
- """
- loader = SPWikiLoader(def_config, ['en'])
- return SPImporter(def_config, temp_phplib_dir_with_migration, temp_db_conn, loader)
-
-@pytest.fixture
-def temp_phplib_dir_with_migration():
- """
- Return temporary phpdir with migration subdirectory and
- PhraseSettingsToJson.php script inside.
- """
- migration_file = (TEST_BASE_DIR / '..' / 'lib-php' / 'migration'
- / 'PhraseSettingsToJson.php').resolve()
- with tempfile.TemporaryDirectory() as phpdir:
- (Path(phpdir) / 'migration').mkdir()
- migration_dest_path = (Path(phpdir) / 'migration' / 'PhraseSettingsToJson.php').resolve()
- copyfile(migration_file, migration_dest_path)
-
- yield Path(phpdir)
-
-@pytest.fixture
-def default_phrases(temp_db_cursor):
- temp_db_cursor.execute("""
- CREATE TABLE place_classtype_testclasstypetable_to_delete();
- CREATE TABLE place_classtype_testclasstypetable_to_keep();""")
return MockPostcodeTable(temp_db_conn)
-def test_import_postcodes_empty(dsn, postcode_table, tmp_path, tokenizer):
+def test_postcodes_empty(dsn, postcode_table, tmp_path, tokenizer):
postcodes.update_postcodes(dsn, tmp_path, tokenizer)
assert not postcode_table.row_set
-def test_import_postcodes_add_new(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_add_new(dsn, placex_table, postcode_table, tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='9486'))
postcode_table.add('yy', '9486', 99, 34)
assert postcode_table.row_set == {('xx', '9486', 10, 12), }
-def test_import_postcodes_replace_coordinates(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_replace_coordinates(dsn, placex_table, postcode_table,
+ tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
postcode_table.add('xx', 'AB 4511', 99, 34)
assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
-def test_import_postcodes_replace_coordinates_close(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_replace_coordinates_close(dsn, placex_table, postcode_table,
+ tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
postcode_table.add('xx', 'AB 4511', 10, 11.99999)
assert postcode_table.row_set == {('xx', 'AB 4511', 10, 11.99999)}
-def test_import_postcodes_remove(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_remove(dsn, placex_table, postcode_table, tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
postcode_table.add('xx', 'badname', 10, 12)
assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
-def test_import_postcodes_ignore_empty_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_ignore_empty_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
placex_table.add(country=None, geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
assert not postcode_table.row_set
-def test_import_postcodes_remove_all(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_remove_all(dsn, postcode_table, tmp_path, tokenizer):
postcode_table.add('ch', '5613', 10, 12)
postcodes.update_postcodes(dsn, tmp_path, tokenizer)
assert not postcode_table.row_set
-def test_import_postcodes_multi_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
+def test_postcodes_multi_country(dsn, placex_table, postcode_table, tmp_path, tokenizer):
placex_table.add(country='de', geom='POINT(10 12)',
address=dict(postcode='54451'))
placex_table.add(country='cc', geom='POINT(100 56)',
@pytest.mark.parametrize("gzipped", [True, False])
-def test_import_postcodes_extern(dsn, placex_table, postcode_table, tmp_path,
- tokenizer, gzipped):
+def test_postcodes_extern(dsn, placex_table, postcode_table, tmp_path,
+ tokenizer, gzipped):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
('xx', 'CD 4511', -10, -5)}
-def test_import_postcodes_extern_bad_column(dsn, placex_table, postcode_table,
- tmp_path, tokenizer):
+def test_postcodes_extern_bad_column(dsn, placex_table, postcode_table,
+ tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)}
-def test_import_postcodes_extern_bad_number(dsn, placex_table, postcode_table,
- tmp_path, tokenizer):
+def test_postcodes_extern_bad_number(dsn, placex_table, postcode_table,
+ tmp_path, tokenizer):
placex_table.add(country='xx', geom='POINT(10 12)',
address=dict(postcode='AB 4511'))
from nominatim.tools import refresh
-TEST_DIR = (Path(__file__) / '..' / '..').resolve()
-
def test_refresh_import_wikipedia_not_existing(dsn):
- assert 1 == refresh.import_wikipedia_articles(dsn, Path('.'))
+ assert refresh.import_wikipedia_articles(dsn, Path('.')) == 1
@pytest.mark.parametrize("replace", (True, False))
-def test_refresh_import_wikipedia(dsn, table_factory, temp_db_cursor, replace):
+def test_refresh_import_wikipedia(dsn, src_dir, table_factory, temp_db_cursor, replace):
if replace:
table_factory('wikipedia_article')
table_factory('wikipedia_redirect')
# use the small wikipedia file for the API testdb
- assert 0 == refresh.import_wikipedia_articles(dsn, TEST_DIR / 'testdb')
+ assert refresh.import_wikipedia_articles(dsn, src_dir / 'test' / 'testdb') == 0
- assert temp_db_cursor.scalar('SELECT count(*) FROM wikipedia_article') > 0
- assert temp_db_cursor.scalar('SELECT count(*) FROM wikipedia_redirect') > 0
+ assert temp_db_cursor.table_rows('wikipedia_article') > 0
+ assert temp_db_cursor.table_rows('wikipedia_redirect') > 0
def test_load_ranks_def_config(temp_db_conn, temp_db_cursor, def_config):
load_address_levels_from_file(temp_db_conn, Path(def_config.ADDRESS_LEVEL_CONFIG))
- assert temp_db_cursor.scalar('SELECT count(*) FROM address_levels') > 0
+ assert temp_db_cursor.table_rows('address_levels') > 0
def test_load_ranks_from_file(temp_db_conn, temp_db_cursor, tmp_path):
test_file = tmp_path / 'test_levels.json'
load_address_levels_from_file(temp_db_conn, test_file)
- assert temp_db_cursor.scalar('SELECT count(*) FROM address_levels') > 0
+ assert temp_db_cursor.table_rows('address_levels') > 0
def test_load_ranks_from_broken_file(temp_db_conn, tmp_path):
[{"tags": {"place": {"village": 14}}},
{"countries": ['de'],
"tags": {"place": {"village": 15}}},
- {"countries": ['uk', 'us' ],
+ {"countries": ['uk', 'us'],
"tags": {"place": {"village": 16}}}
])
def test_load_ranks_multiple_keys(temp_db_conn, temp_db_cursor):
load_address_levels(temp_db_conn, 'levels',
- [{"tags":
- {"place": {"city": 14},
- "boundary": {"administrative2" : 4}}
+ [{"tags": {"place": {"city": 14},
+ "boundary": {"administrative2" : 4}}
}])
assert temp_db_cursor.row_set('SELECT * FROM levels') == \
def test_load_ranks_address(temp_db_conn, temp_db_cursor):
load_address_levels(temp_db_conn, 'levels',
- [{"tags":
- {"place": {"city": 14,
- "town" : [14, 13]}}
+ [{"tags": {"place": {"city": 14,
+ "town" : [14, 13]}}
}])
assert temp_db_cursor.row_set('SELECT * FROM levels') == \
@pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22)))
-def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path, dbg, ret):
+def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path,
+ dbg, ret):
sqlfile = sql_tmp_path / 'functions.sql'
sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
AS $$
"""
Tests for setting up the website scripts.
"""
-from pathlib import Path
import subprocess
import pytest
from nominatim.tools import refresh
@pytest.fixture
-def envdir(tmpdir):
- (tmpdir / 'php').mkdir()
- (tmpdir / 'php' / 'website').mkdir()
- return tmpdir
+def test_script(tmp_path):
+ (tmp_path / 'php').mkdir()
+ website_dir = (tmp_path / 'php' / 'website')
+ website_dir.mkdir()
-@pytest.fixture
-def test_script(envdir):
def _create_file(code):
- outfile = envdir / 'php' / 'website' / 'reverse-only-search.php'
+ outfile = website_dir / 'reverse-only-search.php'
outfile.write_text('<?php\n{}\n'.format(code), 'utf-8')
return _create_file
-def run_website_script(envdir, config, conn):
- config.lib_dir.php = envdir / 'php'
- config.project_dir = envdir
- refresh.setup_website(envdir, config, conn)
+@pytest.fixture
+def run_website_script(tmp_path, def_config, temp_db_conn):
+ def_config.lib_dir.php = tmp_path / 'php'
+ def_config.project_dir = tmp_path
+
+ def _runner():
+ refresh.setup_website(tmp_path, def_config, temp_db_conn)
+
+ proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
+ tmp_path / 'search.php'], check=False)
- proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
- envdir / 'search.php'], check=False)
+ return proc.returncode
- return proc.returncode
+ return _runner
@pytest.mark.parametrize("setting,retval", (('yes', 10), ('no', 20)))
-def test_setup_website_check_bool(def_config, monkeypatch, envdir, test_script,
- setting, retval, temp_db_conn):
+def test_setup_website_check_bool(monkeypatch, test_script, run_website_script,
+ setting, retval):
monkeypatch.setenv('NOMINATIM_CORS_NOACCESSCONTROL', setting)
test_script('exit(CONST_NoAccessControl ? 10 : 20);')
- assert run_website_script(envdir, def_config, temp_db_conn) == retval
+ assert run_website_script() == retval
@pytest.mark.parametrize("setting", (0, 10, 99067))
-def test_setup_website_check_int(def_config, monkeypatch, envdir, test_script, setting,
- temp_db_conn):
+def test_setup_website_check_int(monkeypatch, test_script, run_website_script, setting):
monkeypatch.setenv('NOMINATIM_LOOKUP_MAX_COUNT', str(setting))
test_script('exit(CONST_Places_Max_ID_count == {} ? 10 : 20);'.format(setting))
- assert run_website_script(envdir, def_config, temp_db_conn) == 10
+ assert run_website_script() == 10
-def test_setup_website_check_empty_str(def_config, monkeypatch, envdir, test_script,
- temp_db_conn):
+def test_setup_website_check_empty_str(monkeypatch, test_script, run_website_script):
monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', '')
test_script('exit(CONST_Default_Language === false ? 10 : 20);')
- assert run_website_script(envdir, def_config, temp_db_conn) == 10
+ assert run_website_script() == 10
-def test_setup_website_check_str(def_config, monkeypatch, envdir, test_script,
- temp_db_conn):
+def test_setup_website_check_str(monkeypatch, test_script, run_website_script):
monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', 'ffde 2')
test_script('exit(CONST_Default_Language === "ffde 2" ? 10 : 20);')
- assert run_website_script(envdir, def_config, temp_db_conn) == 10
-
-
+ assert run_website_script() == 10
</osm>
"""
+@pytest.fixture(autouse=True)
+def setup_status_table(status_table):
+ pass
+
### init replication
-def test_init_replication_bad_base_url(monkeypatch, status_table, place_row, temp_db_conn, temp_db_cursor):
+def test_init_replication_bad_base_url(monkeypatch, place_row, temp_db_conn):
place_row(osm_type='N', osm_id=100)
- monkeypatch.setattr(nominatim.db.status, "get_url", lambda u : OSM_NODE_DATA)
+ monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA)
with pytest.raises(UsageError, match="Failed to reach replication service"):
nominatim.tools.replication.init_replication(temp_db_conn, 'https://test.io')
-def test_init_replication_success(monkeypatch, status_table, place_row, temp_db_conn, temp_db_cursor):
+def test_init_replication_success(monkeypatch, place_row, temp_db_conn, temp_db_cursor):
place_row(osm_type='N', osm_id=100)
- monkeypatch.setattr(nominatim.db.status, "get_url", lambda u : OSM_NODE_DATA)
+ monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA)
monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
"timestamp_to_sequence",
lambda self, date: 234)
nominatim.tools.replication.init_replication(temp_db_conn, 'https://test.io')
- temp_db_cursor.execute("SELECT * FROM import_status")
-
expected_date = dt.datetime.strptime('2006-01-27T19:09:10', status.ISODATE_FORMAT)\
.replace(tzinfo=dt.timezone.utc)
- assert temp_db_cursor.rowcount == 1
- assert temp_db_cursor.fetchone() == [expected_date, 234, True]
+
+ assert temp_db_cursor.row_set("SELECT * FROM import_status") \
+ == {(expected_date, 234, True)}
### checking for updates
-def test_check_for_updates_empty_status_table(status_table, temp_db_conn):
+def test_check_for_updates_empty_status_table(temp_db_conn):
assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
-def test_check_for_updates_seq_not_set(status_table, temp_db_conn):
+def test_check_for_updates_seq_not_set(temp_db_conn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc))
assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254
-def test_check_for_updates_no_state(monkeypatch, status_table, temp_db_conn):
+def test_check_for_updates_no_state(monkeypatch, temp_db_conn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=345)
monkeypatch.setattr(nominatim.tools.replication.ReplicationServer,
@pytest.mark.parametrize("server_sequence,result", [(344, 2), (345, 2), (346, 0)])
-def test_check_for_updates_no_new_data(monkeypatch, status_table, temp_db_conn,
+def test_check_for_updates_no_new_data(monkeypatch, temp_db_conn,
server_sequence, result):
date = dt.datetime.now(dt.timezone.utc)
status.set_status(temp_db_conn, date, seq=345)
@pytest.fixture
def update_options(tmpdir):
return dict(base_url='https://test.io',
- indexed_only=False,
- update_interval=3600,
- import_file=tmpdir / 'foo.osm',
- max_diff_size=1)
+ indexed_only=False,
+ update_interval=3600,
+ import_file=tmpdir / 'foo.osm',
+ max_diff_size=1)
-def test_update_empty_status_table(status_table, temp_db_conn):
+def test_update_empty_status_table(temp_db_conn):
with pytest.raises(UsageError):
nominatim.tools.replication.update(temp_db_conn, {})
-def test_update_already_indexed(status_table, temp_db_conn):
+def test_update_already_indexed(temp_db_conn):
status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=34, indexed=False)
assert nominatim.tools.replication.update(temp_db_conn, dict(indexed_only=True)) \
== nominatim.tools.replication.UpdateState.MORE_PENDING
-def test_update_no_data_no_sleep(monkeypatch, status_table, temp_db_conn, update_options):
+def test_update_no_data_no_sleep(monkeypatch, temp_db_conn, update_options):
date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(days=1)
status.set_status(temp_db_conn, date, seq=34)
lambda *args, **kwargs: None)
sleeptime = []
- monkeypatch.setattr(time, 'sleep', lambda s: sleeptime.append(s))
+ monkeypatch.setattr(time, 'sleep', sleeptime.append)
assert nominatim.tools.replication.update(temp_db_conn, update_options) \
== nominatim.tools.replication.UpdateState.NO_CHANGES
assert not sleeptime
-def test_update_no_data_sleep(monkeypatch, status_table, temp_db_conn, update_options):
+def test_update_no_data_sleep(monkeypatch, temp_db_conn, update_options):
date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(minutes=30)
status.set_status(temp_db_conn, date, seq=34)
lambda *args, **kwargs: None)
sleeptime = []
- monkeypatch.setattr(time, 'sleep', lambda s: sleeptime.append(s))
+ monkeypatch.setattr(time, 'sleep', sleeptime.append)
assert nominatim.tools.replication.update(temp_db_conn, update_options) \
== nominatim.tools.replication.UpdateState.NO_CHANGES
"""
Tests for methods of the SPCsvLoader class.
"""
-from nominatim.errors import UsageError
import pytest
-from pathlib import Path
-from nominatim.tools.special_phrases.sp_csv_loader import SPCsvLoader
-TEST_BASE_DIR = Path(__file__) / '..' / '..'
+from nominatim.errors import UsageError
+from nominatim.tools.special_phrases.sp_csv_loader import SPCsvLoader
def test_parse_csv(sp_csv_loader):
"""
the right phrases of the sp_csv_test.csv special phrases.
"""
return len(phrases) > 1 \
- and any(p.p_label == 'Billboard' and p.p_class == 'advertising' and p.p_type == 'billboard'
+ and any(p.p_label == 'Billboard'
+ and p.p_class == 'advertising'
+ and p.p_type == 'billboard'
and p.p_operator == '-' for p in phrases) \
- and any(p.p_label == 'Zip Lines' and p.p_class == 'aerialway' and p.p_type == 'zip_line'
+ and any(p.p_label == 'Zip Lines'
+ and p.p_class == 'aerialway'
+ and p.p_type == 'zip_line'
and p.p_operator == '-' for p in phrases)
@pytest.fixture
-def sp_csv_loader():
+def sp_csv_loader(src_dir):
"""
Return an instance of SPCsvLoader.
"""
- csv_path = (TEST_BASE_DIR / 'testdata' / 'sp_csv_test.csv').resolve()
+ csv_path = (src_dir / 'test' / 'testdata' / 'sp_csv_test.csv').resolve()
loader = SPCsvLoader(csv_path)
return loader
Tests for methods of the SPWikiLoader class.
"""
import pytest
-from pathlib import Path
from nominatim.tools.special_phrases.sp_wiki_loader import SPWikiLoader
-TEST_BASE_DIR = Path(__file__) / '..' / '..'
+@pytest.fixture
+def xml_wiki_content(src_dir):
+ """
+ return the content of the static xml test file.
+ """
+ xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt'
+ return xml_test_content.read_text()
-def test_parse_xml(sp_wiki_loader):
+
+@pytest.fixture
+def sp_wiki_loader(monkeypatch, def_config, xml_wiki_content):
+ """
+ Return an instance of SPWikiLoader.
+ """
+ loader = SPWikiLoader(def_config, ['en'])
+ monkeypatch.setattr('nominatim.tools.special_phrases.sp_wiki_loader.SPWikiLoader._get_wiki_content',
+ lambda self, lang: xml_wiki_content)
+ return loader
+
+
+def test_parse_xml(sp_wiki_loader, xml_wiki_content):
"""
Test method parse_xml()
Should return the right SpecialPhrase objects.
"""
- xml = get_test_xml_wiki_content()
- phrases = sp_wiki_loader.parse_xml(xml)
+ phrases = sp_wiki_loader.parse_xml(xml_wiki_content)
assert check_phrases_content(phrases)
and p.p_operator == '-' for p in phrases) \
and any(p.p_label == 'Zip Line' and p.p_class == 'aerialway' and p.p_type == 'zip_line'
and p.p_operator == '-' for p in phrases)
-
-@pytest.fixture
-def sp_wiki_loader(monkeypatch, def_config):
- """
- Return an instance of SPWikiLoader.
- """
- loader = SPWikiLoader(def_config, ['en'])
- monkeypatch.setattr('nominatim.tools.special_phrases.sp_wiki_loader.SPWikiLoader._get_wiki_content',
- mock_get_wiki_content)
- return loader
-
-def mock_get_wiki_content(self, lang):
- """
- Mock the _get_wiki_content() method to return
- static xml test file content.
- """
- return get_test_xml_wiki_content()
-
-def get_test_xml_wiki_content():
- """
- return the content of the static xml test file.
- """
- xml_test_content_path = (TEST_BASE_DIR / 'testdata' / 'special_phrases_test_content.txt').resolve()
- with open(xml_test_content_path) as xml_content_reader:
- return xml_content_reader.read()
"""
Test for tiger data function
"""
-from pathlib import Path
+import tarfile
from textwrap import dedent
import pytest
-import tarfile
-from nominatim.tools import tiger_data, database_import
+from nominatim.tools import tiger_data
from nominatim.errors import UsageError
class MockTigerTable:
stop INTEGER, interpol TEXT,
token_info JSONB, postcode TEXT)
RETURNS INTEGER AS $$
- INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode) RETURNING 1
+ INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode)
+ RETURNING 1
$$ LANGUAGE SQL;""")
(def_config.lib_dir.sql / 'tiger_import_finish.sql').write_text(
"""DROP FUNCTION tiger_line_import (linegeo GEOMETRY, in_startnumber INTEGER,
tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv'))
tar.close()
- tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1,
+ tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads,
tokenizer_mock())
assert tiger_table.count() == 6213
def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock,
- tmp_path, src_dir):
+ tmp_path):
tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
tar.add(__file__)
tar.close()
tokenizer_mock())
assert tiger_table.count() == 0
-