From 430c316e45c5fcbaf7f8023958a13ad1f31f44b2 Mon Sep 17 00:00:00 2001 From: Sarah Hoffmann Date: Wed, 19 May 2021 23:07:39 +0200 Subject: [PATCH] test: fix linting errors --- .pylintrc | 2 +- test/python/conftest.py | 30 ++--- test/python/dummy_tokenizer.py | 16 ++- test/python/mocks.py | 21 +++- test/python/test_cli.py | 68 +++++------ test/python/test_cli_replication.py | 38 +++--- test/python/test_config.py | 12 +- test/python/test_db_async_connection.py | 7 +- test/python/test_db_connection.py | 16 +-- test/python/test_db_sql_preprocessor.py | 2 - test/python/test_db_status.py | 43 ++++--- test/python/test_db_utils.py | 1 - test/python/test_indexing.py | 93 +++++++------- test/python/test_tokenizer_factory.py | 24 ++-- test/python/test_tokenizer_legacy.py | 94 ++++++++------- test/python/test_tokenizer_legacy_icu.py | 113 +++++++++--------- test/python/test_tools_admin.py | 1 - test/python/test_tools_check_database.py | 16 +-- test/python/test_tools_database_import.py | 10 +- test/python/test_tools_exec_utils.py | 30 +++-- test/python/test_tools_freeze.py | 2 - .../test_tools_import_special_phrases.py | 5 +- test/python/test_tools_postcodes.py | 30 ++--- test/python/test_tools_refresh.py | 4 +- .../test_tools_refresh_address_levels.py | 12 +- .../test_tools_refresh_create_functions.py | 3 +- .../test_tools_refresh_setup_website.py | 55 ++++----- test/python/test_tools_replication.py | 40 ++++--- test/python/test_tools_sp_csv_loader.py | 8 +- test/python/test_tools_sp_wiki_loader.py | 6 +- test/python/test_tools_tiger_data.py | 13 +- 31 files changed, 418 insertions(+), 397 deletions(-) diff --git a/.pylintrc b/.pylintrc index 022243ad..28ce1ff4 100644 --- a/.pylintrc +++ b/.pylintrc @@ -12,4 +12,4 @@ ignored-modules=icu ignored-classes=NominatimArgs,closing disable=too-few-public-methods,duplicate-code -good-names=i,x,y,fd +good-names=i,x,y,fd,db diff --git a/test/python/conftest.py b/test/python/conftest.py index 97f4c487..9a43a67e 100644 --- a/test/python/conftest.py +++ b/test/python/conftest.py @@ -13,7 +13,6 @@ sys.path.insert(0, str(SRC_DIR.resolve())) from nominatim.config import Configuration from nominatim.db import connection from nominatim.db.sql_preprocessor import SQLPreprocessor -from nominatim.db import properties import nominatim.tokenizer.factory import nominatim.cli @@ -116,23 +115,24 @@ def src_dir(): @pytest.fixture def cli_call(): def _call_nominatim(*args): - return nominatim.cli.nominatim( - module_dir='MODULE NOT AVAILABLE', - osm2pgsql_path='OSM2PGSQL NOT AVAILABLE', - phplib_dir=str(SRC_DIR / 'lib-php'), - data_dir=str(SRC_DIR / 'data'), - phpcgi_path='/usr/bin/php-cgi', - sqllib_dir=str(SRC_DIR / 'lib-sql'), - config_dir=str(SRC_DIR / 'settings'), - cli_args=args) + return nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE', + osm2pgsql_path='OSM2PGSQL NOT AVAILABLE', + phplib_dir=str(SRC_DIR / 'lib-php'), + data_dir=str(SRC_DIR / 'data'), + phpcgi_path='/usr/bin/php-cgi', + sqllib_dir=str(SRC_DIR / 'lib-sql'), + config_dir=str(SRC_DIR / 'settings'), + cli_args=args) return _call_nominatim @pytest.fixture -def property_table(table_factory): +def property_table(table_factory, temp_db_conn): table_factory('nominatim_properties', 'property TEXT, value TEXT') + return mocks.MockPropertyTable(temp_db_conn) + @pytest.fixture def status_table(table_factory): @@ -226,7 +226,7 @@ def osm2pgsql_options(temp_db): main_data='', main_index='')) @pytest.fixture -def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory): +def sql_preprocessor(temp_db_conn, tmp_path, table_factory): table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, ))) cfg = Configuration(None, SRC_DIR.resolve() / 'settings') cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php', @@ -236,18 +236,18 @@ def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory): @pytest.fixture -def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path): +def tokenizer_mock(monkeypatch, property_table): """ Sets up the configuration so that the test dummy tokenizer will be loaded when the tokenizer factory is used. Also returns a factory with which a new dummy tokenizer may be created. """ monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy') - def _import_dummy(module, *args, **kwargs): + def _import_dummy(*args, **kwargs): return dummy_tokenizer monkeypatch.setattr(nominatim.tokenizer.factory, "_import_tokenizer", _import_dummy) - properties.set_property(temp_db_conn, 'tokenizer', 'dummy') + property_table.set('tokenizer', 'dummy') def _create_tokenizer(): return dummy_tokenizer.DummyTokenizer(None, None) diff --git a/test/python/dummy_tokenizer.py b/test/python/dummy_tokenizer.py index 18e322ca..69202bc3 100644 --- a/test/python/dummy_tokenizer.py +++ b/test/python/dummy_tokenizer.py @@ -17,16 +17,17 @@ class DummyTokenizer: def init_new_db(self, *args, **kwargs): - assert self.init_state == None + assert self.init_state is None self.init_state = "new" def init_from_project(self): - assert self.init_state == None + assert self.init_state is None self.init_state = "loaded" - def finalize_import(self, _): + @staticmethod + def finalize_import(_): pass @@ -51,10 +52,12 @@ class DummyNameAnalyzer: def close(self): pass - def normalize_postcode(self, postcode): + @staticmethod + def normalize_postcode(postcode): return postcode - def update_postcodes_from_db(self): + @staticmethod + def update_postcodes_from_db(): pass def update_special_phrases(self, phrases, should_replace): @@ -63,5 +66,6 @@ class DummyNameAnalyzer: def add_country_names(self, code, names): self.analyser_cache['countries'].append((code, names)) - def process_place(self, place): + @staticmethod + def process_place(place): return {} diff --git a/test/python/mocks.py b/test/python/mocks.py index 53d84aac..d86f0196 100644 --- a/test/python/mocks.py +++ b/test/python/mocks.py @@ -5,6 +5,8 @@ import itertools import psycopg2.extras +from nominatim.db import properties + class MockParamCapture: """ Mock that records the parameters with which a function was called as well as the number of calls. @@ -12,6 +14,8 @@ class MockParamCapture: def __init__(self, retval=0): self.called = 0 self.return_value = retval + self.last_args = None + self.last_kwargs = None def __call__(self, *args, **kwargs): self.called += 1 @@ -37,11 +41,11 @@ class MockWordTable: conn.commit() - def add_special(self, word_token, word, cls, typ, op): + def add_special(self, word_token, word, cls, typ, oper): with self.conn.cursor() as cur: cur.execute("""INSERT INTO word (word_token, word, class, type, operator) VALUES (%s, %s, %s, %s, %s) - """, (word_token, word, cls, typ, op)) + """, (word_token, word, cls, typ, oper)) self.conn.commit() @@ -125,3 +129,16 @@ class MockPlacexTable: admin_level, address, extratags, 'SRID=4326;' + geom, country)) self.conn.commit() + + +class MockPropertyTable: + """ A property table for testing. + """ + def __init__(self, conn): + self.conn = conn + + + def set(self, name, value): + """ Set a property in the table to the given value. + """ + properties.set_property(self.conn, name, value) diff --git a/test/python/test_cli.py b/test/python/test_cli.py index c3625c4a..1d775b1f 100644 --- a/test/python/test_cli.py +++ b/test/python/test_cli.py @@ -5,8 +5,6 @@ These tests just check that the various command line parameters route to the correct functionionality. They use a lot of monkeypatching to avoid executing the actual functions. """ -from pathlib import Path - import pytest import nominatim.db.properties @@ -55,7 +53,7 @@ class TestCli: def test_cli_help(self, capsys): """ Running nominatim tool without arguments prints help. """ - assert 1 == self.call_nominatim() + assert self.call_nominatim() == 1 captured = capsys.readouterr() assert captured.out.startswith('usage:') @@ -66,7 +64,7 @@ class TestCli: (('export',), 'export') ]) def test_legacy_commands_simple(self, mock_run_legacy, command, script): - assert 0 == self.call_nominatim(*command) + assert self.call_nominatim(*command) == 0 assert mock_run_legacy.called == 1 assert mock_run_legacy.last_args[0] == script + '.php' @@ -78,7 +76,7 @@ class TestCli: def test_admin_command_legacy(self, mock_func_factory, params): mock_run_legacy = mock_func_factory(nominatim.clicmd.admin, 'run_legacy_script') - assert 0 == self.call_nominatim('admin', *params) + assert self.call_nominatim('admin', *params) == 0 assert mock_run_legacy.called == 1 @@ -86,14 +84,14 @@ class TestCli: def test_admin_command_check_database(self, mock_func_factory): mock = mock_func_factory(nominatim.tools.check_database, 'check_database') - assert 0 == self.call_nominatim('admin', '--check-database') + assert self.call_nominatim('admin', '--check-database') == 0 assert mock.called == 1 @pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc'), ('node', 12), ('way', 8), ('relation', 32)]) def test_add_data_command(self, mock_run_legacy, name, oid): - assert 0 == self.call_nominatim('add-data', '--' + name, str(oid)) + assert self.call_nominatim('add-data', '--' + name, str(oid)) == 0 assert mock_run_legacy.called == 1 assert mock_run_legacy.last_args == ('update.php', '--import-' + name, oid) @@ -107,20 +105,18 @@ class TestCli: assert func.called == 1 - @pytest.mark.parametrize("params", [ - ('search', '--query', 'new'), - ('reverse', '--lat', '0', '--lon', '0'), - ('lookup', '--id', 'N1'), - ('details', '--node', '1'), - ('details', '--way', '1'), - ('details', '--relation', '1'), - ('details', '--place_id', '10001'), - ('status',) - ]) + @pytest.mark.parametrize("params", [('search', '--query', 'new'), + ('reverse', '--lat', '0', '--lon', '0'), + ('lookup', '--id', 'N1'), + ('details', '--node', '1'), + ('details', '--way', '1'), + ('details', '--relation', '1'), + ('details', '--place_id', '10001'), + ('status',)]) def test_api_commands_simple(self, mock_func_factory, params): mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script') - assert 0 == self.call_nominatim(*params) + assert self.call_nominatim(*params) == 0 assert mock_run_api.called == 1 assert mock_run_api.last_args[0] == params[0] @@ -148,20 +144,20 @@ class TestCliWithDb: self.finalize_import_called = True tok = DummyTokenizer() - monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db' , + monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db', lambda *args: tok) - monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer' , + monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer', lambda *args: tok) self.tokenizer_mock = tok def test_import_missing_file(self): - assert 1 == self.call_nominatim('import', '--osm-file', 'sfsafegweweggdgw.reh.erh') + assert self.call_nominatim('import', '--osm-file', 'sfsafegwedgw.reh.erh') == 1 def test_import_bad_file(self): - assert 1 == self.call_nominatim('import', '--osm-file', '.') + assert self.call_nominatim('import', '--osm-file', '.') == 1 def test_import_full(self, mock_func_factory): @@ -185,7 +181,7 @@ class TestCliWithDb: cf_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions') - assert 0 == self.call_nominatim('import', '--osm-file', __file__) + assert self.call_nominatim('import', '--osm-file', __file__) == 0 assert self.tokenizer_mock.finalize_import_called assert cf_mock.called > 1 @@ -206,7 +202,7 @@ class TestCliWithDb: mock_func_factory(nominatim.db.properties, 'set_property') ] - assert 0 == self.call_nominatim('import', '--continue', 'load-data') + assert self.call_nominatim('import', '--continue', 'load-data') == 0 assert self.tokenizer_mock.finalize_import_called for mock in mocks: @@ -223,7 +219,7 @@ class TestCliWithDb: mock_func_factory(nominatim.db.properties, 'set_property') ] - assert 0 == self.call_nominatim('import', '--continue', 'indexing') + assert self.call_nominatim('import', '--continue', 'indexing') == 0 for mock in mocks: assert mock.called == 1, "Mock '{}' not called".format(mock.func_name) @@ -231,7 +227,7 @@ class TestCliWithDb: assert temp_db_conn.index_exists('idx_placex_pendingsector') # Calling it again still works for the index - assert 0 == self.call_nominatim('import', '--continue', 'indexing') + assert self.call_nominatim('import', '--continue', 'indexing') == 0 assert temp_db_conn.index_exists('idx_placex_pendingsector') @@ -243,7 +239,7 @@ class TestCliWithDb: mock_func_factory(nominatim.db.properties, 'set_property') ] - assert 0 == self.call_nominatim('import', '--continue', 'db-postprocess') + assert self.call_nominatim('import', '--continue', 'db-postprocess') == 0 assert self.tokenizer_mock.finalize_import_called @@ -255,7 +251,7 @@ class TestCliWithDb: mock_drop = mock_func_factory(nominatim.tools.freeze, 'drop_update_tables') mock_flatnode = mock_func_factory(nominatim.tools.freeze, 'drop_flatnode_file') - assert 0 == self.call_nominatim('freeze') + assert self.call_nominatim('freeze') == 0 assert mock_drop.called == 1 assert mock_flatnode.called == 1 @@ -266,7 +262,7 @@ class TestCliWithDb: def test_admin_command_tool(self, mock_func_factory, func, params): mock = mock_func_factory(nominatim.tools.admin, func) - assert 0 == self.call_nominatim('admin', *params) + assert self.call_nominatim('admin', *params) == 0 assert mock.called == 1 @@ -281,7 +277,7 @@ class TestCliWithDb: bnd_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_boundaries') rank_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_by_rank') - assert 0 == self.call_nominatim('index', *params) + assert self.call_nominatim('index', *params) == 0 assert bnd_mock.called == do_bnds assert rank_mock.called == do_ranks @@ -320,7 +316,7 @@ class TestCliWithDb: def test_refresh_command(self, mock_func_factory, command, func): func_mock = mock_func_factory(nominatim.tools.refresh, func) - assert 0 == self.call_nominatim('refresh', '--' + command) + assert self.call_nominatim('refresh', '--' + command) == 0 assert func_mock.called == 1 @@ -328,13 +324,14 @@ class TestCliWithDb: func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes') idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes') - assert 0 == self.call_nominatim('refresh', '--postcodes') + assert self.call_nominatim('refresh', '--postcodes') == 0 assert func_mock.called == 1 + assert idx_mock.called == 1 def test_refresh_create_functions(self, mock_func_factory): func_mock = mock_func_factory(nominatim.tools.refresh, 'create_functions') - assert 0 == self.call_nominatim('refresh', '--functions') + assert self.call_nominatim('refresh', '--functions') == 0 assert func_mock.called == 1 assert self.tokenizer_mock.update_sql_functions_called @@ -346,9 +343,6 @@ class TestCliWithDb: monkeypatch.setattr(nominatim.tools.refresh, 'recompute_importance', lambda *args, **kwargs: calls.append('update')) - assert 0 == self.call_nominatim('refresh', '--importance', '--wiki-data') + assert self.call_nominatim('refresh', '--importance', '--wiki-data') == 0 assert calls == ['import', 'update'] - - - diff --git a/test/python/test_cli_replication.py b/test/python/test_cli_replication.py index ebb89443..dcaeaf25 100644 --- a/test/python/test_cli_replication.py +++ b/test/python/test_cli_replication.py @@ -3,7 +3,6 @@ Tests for replication command of command-line interface wrapper. """ import datetime as dt import time -from pathlib import Path import pytest @@ -28,23 +27,14 @@ def tokenizer_mock(monkeypatch): self.finalize_import_called = True tok = DummyTokenizer() - monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db' , + monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db', lambda *args: tok) - monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer' , + monkeypatch.setattr(nominatim.tokenizer.factory, 'create_tokenizer', lambda *args: tok) return tok -@pytest.fixture -def index_mock(monkeypatch, tokenizer_mock): - mock = MockParamCapture() - monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_boundaries', mock) - monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_by_rank', mock) - - return mock - - @pytest.fixture def mock_func_factory(monkeypatch): def get_mock(module, func): @@ -58,7 +48,15 @@ def mock_func_factory(monkeypatch): @pytest.fixture def init_status(temp_db_conn, status_table): status.set_status(temp_db_conn, date=dt.datetime.now(dt.timezone.utc), seq=1) - return 1 + + +@pytest.fixture +def index_mock(monkeypatch, tokenizer_mock, init_status): + mock = MockParamCapture() + monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_boundaries', mock) + monkeypatch.setattr(nominatim.indexer.indexer.Indexer, 'index_by_rank', mock) + + return mock @pytest.fixture @@ -79,7 +77,7 @@ class TestCliReplication: def test_replication_command(self, mock_func_factory, params, func): func_mock = mock_func_factory(nominatim.tools.replication, func) - assert 0 == self.call_nominatim(*params) + assert self.call_nominatim(*params) == 0 assert func_mock.called == 1 @@ -91,31 +89,31 @@ class TestCliReplication: def test_replication_update_bad_interval_for_geofabrik(self, monkeypatch): monkeypatch.setenv('NOMINATIM_REPLICATION_URL', - 'https://download.geofabrik.de/europe/ireland-and-northern-ireland-updates') + 'https://download.geofabrik.de/europe/italy-updates') assert self.call_nominatim() == 1 def test_replication_update_once_no_index(self, update_mock): - assert 0 == self.call_nominatim('--once', '--no-index') + assert self.call_nominatim('--once', '--no-index') == 0 assert str(update_mock.last_args[1]['osm2pgsql']) == 'OSM2PGSQL NOT AVAILABLE' def test_replication_update_custom_osm2pgsql(self, monkeypatch, update_mock): monkeypatch.setenv('NOMINATIM_OSM2PGSQL_BINARY', '/secret/osm2pgsql') - assert 0 == self.call_nominatim('--once', '--no-index') + assert self.call_nominatim('--once', '--no-index') == 0 assert str(update_mock.last_args[1]['osm2pgsql']) == '/secret/osm2pgsql' def test_replication_update_custom_threads(self, update_mock): - assert 0 == self.call_nominatim('--once', '--no-index', '--threads', '4') + assert self.call_nominatim('--once', '--no-index', '--threads', '4') == 0 assert update_mock.last_args[1]['threads'] == 4 - def test_replication_update_continuous(self, monkeypatch, init_status, index_mock): + def test_replication_update_continuous(self, monkeypatch, index_mock): states = [nominatim.tools.replication.UpdateState.UP_TO_DATE, nominatim.tools.replication.UpdateState.UP_TO_DATE] monkeypatch.setattr(nominatim.tools.replication, 'update', @@ -127,7 +125,7 @@ class TestCliReplication: assert index_mock.called == 4 - def test_replication_update_continuous_no_change(self, monkeypatch, init_status, index_mock): + def test_replication_update_continuous_no_change(self, monkeypatch, index_mock): states = [nominatim.tools.replication.UpdateState.NO_CHANGES, nominatim.tools.replication.UpdateState.UP_TO_DATE] monkeypatch.setattr(nominatim.tools.replication, 'update', diff --git a/test/python/test_config.py b/test/python/test_config.py index cd828e14..6729f954 100644 --- a/test/python/test_config.py +++ b/test/python/test_config.py @@ -1,8 +1,6 @@ """ Test for loading dotenv configuration. """ -from pathlib import Path - import pytest from nominatim.config import Configuration @@ -91,7 +89,7 @@ def test_get_libpq_dsn_convert_php_special_chars(make_config, monkeypatch, val, def test_get_libpq_dsn_convert_libpq(make_config, monkeypatch): config = make_config() - monkeypatch.setenv('NOMINATIM_DATABASE_DSN', + monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'host=localhost dbname=gis password=foo') assert config.get_libpq_dsn() == 'host=localhost dbname=gis password=foo' @@ -111,7 +109,7 @@ def test_get_bool_empty(make_config): config = make_config() assert config.DATABASE_MODULE_PATH == '' - assert config.get_bool('DATABASE_MODULE_PATH') == False + assert not config.get_bool('DATABASE_MODULE_PATH') @pytest.mark.parametrize("value,result", [('0', 0), ('1', 1), @@ -143,18 +141,18 @@ def test_get_int_empty(make_config): config.get_int('DATABASE_MODULE_PATH') -def test_get_import_style_intern(make_config, monkeypatch): +def test_get_import_style_intern(make_config, src_dir, monkeypatch): config = make_config() monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', 'street') - expected = DEFCFG_DIR / 'import-street.style' + expected = src_dir / 'settings' / 'import-street.style' assert config.get_import_style_file() == expected @pytest.mark.parametrize("value", ['custom', '/foo/bar.stye']) -def test_get_import_style_intern(make_config, monkeypatch, value): +def test_get_import_style_extern(make_config, monkeypatch, value): config = make_config() monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', value) diff --git a/test/python/test_db_async_connection.py b/test/python/test_db_async_connection.py index 330b86f7..7398eac3 100644 --- a/test/python/test_db_async_connection.py +++ b/test/python/test_db_async_connection.py @@ -6,15 +6,14 @@ import concurrent.futures import pytest import psycopg2 -from psycopg2.extras import wait_select from nominatim.db.async_connection import DBConnection, DeadlockHandler @pytest.fixture def conn(temp_db): - with closing(DBConnection('dbname=' + temp_db)) as c: - yield c + with closing(DBConnection('dbname=' + temp_db)) as connection: + yield connection @pytest.fixture @@ -106,5 +105,3 @@ def test_deadlock(simple_conns): future.result() assert len(deadlock_check) == 1 - - diff --git a/test/python/test_db_connection.py b/test/python/test_db_connection.py index f10f72d1..41978e59 100644 --- a/test/python/test_db_connection.py +++ b/test/python/test_db_connection.py @@ -7,28 +7,28 @@ import psycopg2 from nominatim.db.connection import connect, get_pg_env @pytest.fixture -def db(temp_db): - with connect('dbname=' + temp_db) as conn: +def db(dsn): + with connect(dsn) as conn: yield conn def test_connection_table_exists(db, table_factory): - assert db.table_exists('foobar') == False + assert not db.table_exists('foobar') table_factory('foobar') - assert db.table_exists('foobar') == True + assert db.table_exists('foobar') def test_connection_index_exists(db, table_factory, temp_db_cursor): - assert db.index_exists('some_index') == False + assert not db.index_exists('some_index') table_factory('foobar') temp_db_cursor.execute('CREATE INDEX some_index ON foobar(id)') - assert db.index_exists('some_index') == True - assert db.index_exists('some_index', table='foobar') == True - assert db.index_exists('some_index', table='bar') == False + assert db.index_exists('some_index') + assert db.index_exists('some_index', table='foobar') + assert not db.index_exists('some_index', table='bar') def test_drop_table_existing(db, table_factory): diff --git a/test/python/test_db_sql_preprocessor.py b/test/python/test_db_sql_preprocessor.py index 6a254ef3..79f82dac 100644 --- a/test/python/test_db_sql_preprocessor.py +++ b/test/python/test_db_sql_preprocessor.py @@ -1,8 +1,6 @@ """ Tests for SQL preprocessing. """ -from pathlib import Path - import pytest @pytest.fixture diff --git a/test/python/test_db_status.py b/test/python/test_db_status.py index a1497317..b6f5a7b1 100644 --- a/test/python/test_db_status.py +++ b/test/python/test_db_status.py @@ -8,10 +8,6 @@ import pytest import nominatim.db.status from nominatim.errors import UsageError -def test_compute_database_date_place_empty(status_table, place_table, temp_db_conn): - with pytest.raises(UsageError): - nominatim.db.status.compute_database_date(temp_db_conn) - OSM_NODE_DATA = """\ @@ -24,7 +20,17 @@ def iso_date(date): .replace(tzinfo=dt.timezone.utc) -def test_compute_database_date_valid(monkeypatch, status_table, place_row, temp_db_conn): +@pytest.fixture(autouse=True) +def setup_status_table(status_table): + pass + + +def test_compute_database_date_place_empty(place_table, temp_db_conn): + with pytest.raises(UsageError): + nominatim.db.status.compute_database_date(temp_db_conn) + + +def test_compute_database_date_valid(monkeypatch, place_row, temp_db_conn): place_row(osm_type='N', osm_id=45673) requested_url = [] @@ -40,7 +46,7 @@ def test_compute_database_date_valid(monkeypatch, status_table, place_row, temp_ assert date == iso_date('2006-01-27T22:09:10') -def test_compute_database_broken_api(monkeypatch, status_table, place_row, temp_db_conn): +def test_compute_database_broken_api(monkeypatch, place_row, temp_db_conn): place_row(osm_type='N', osm_id=45673) requested_url = [] @@ -51,10 +57,10 @@ def test_compute_database_broken_api(monkeypatch, status_table, place_row, temp_ monkeypatch.setattr(nominatim.db.status, "get_url", mock_url) with pytest.raises(UsageError): - date = nominatim.db.status.compute_database_date(temp_db_conn) + nominatim.db.status.compute_database_date(temp_db_conn) -def test_set_status_empty_table(status_table, temp_db_conn, temp_db_cursor): +def test_set_status_empty_table(temp_db_conn, temp_db_cursor): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim.db.status.set_status(temp_db_conn, date=date) @@ -62,7 +68,7 @@ def test_set_status_empty_table(status_table, temp_db_conn, temp_db_cursor): {(date, None, True)} -def test_set_status_filled_table(status_table, temp_db_conn, temp_db_cursor): +def test_set_status_filled_table(temp_db_conn, temp_db_cursor): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim.db.status.set_status(temp_db_conn, date=date) @@ -75,7 +81,7 @@ def test_set_status_filled_table(status_table, temp_db_conn, temp_db_cursor): {(date, 456, False)} -def test_set_status_missing_date(status_table, temp_db_conn, temp_db_cursor): +def test_set_status_missing_date(temp_db_conn, temp_db_cursor): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim.db.status.set_status(temp_db_conn, date=date) @@ -87,11 +93,11 @@ def test_set_status_missing_date(status_table, temp_db_conn, temp_db_cursor): {(date, 456, False)} -def test_get_status_empty_table(status_table, temp_db_conn): +def test_get_status_empty_table(temp_db_conn): assert nominatim.db.status.get_status(temp_db_conn) == (None, None, None) -def test_get_status_success(status_table, temp_db_conn): +def test_get_status_success(temp_db_conn): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim.db.status.set_status(temp_db_conn, date=date, seq=667, indexed=False) @@ -101,7 +107,7 @@ def test_get_status_success(status_table, temp_db_conn): @pytest.mark.parametrize("old_state", [True, False]) @pytest.mark.parametrize("new_state", [True, False]) -def test_set_indexed(status_table, temp_db_conn, temp_db_cursor, old_state, new_state): +def test_set_indexed(temp_db_conn, temp_db_cursor, old_state, new_state): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim.db.status.set_status(temp_db_conn, date=date, indexed=old_state) nominatim.db.status.set_indexed(temp_db_conn, new_state) @@ -109,18 +115,21 @@ def test_set_indexed(status_table, temp_db_conn, temp_db_cursor, old_state, new_ assert temp_db_cursor.scalar("SELECT indexed FROM import_status") == new_state -def test_set_indexed_empty_status(status_table, temp_db_conn, temp_db_cursor): +def test_set_indexed_empty_status(temp_db_conn, temp_db_cursor): nominatim.db.status.set_indexed(temp_db_conn, True) assert temp_db_cursor.table_rows("import_status") == 0 -def text_log_status(status_table, temp_db_conn): +def test_log_status(temp_db_conn, temp_db_cursor): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) start = dt.datetime.now() - dt.timedelta(hours=1) + nominatim.db.status.set_status(temp_db_conn, date=date, seq=56) nominatim.db.status.log_status(temp_db_conn, start, 'index') + temp_db_conn.commit() + assert temp_db_cursor.table_rows("import_osmosis_log") == 1 - assert temp_db_cursor.scalar("SELECT seq FROM import_osmosis_log") == 56 - assert temp_db_cursor.scalar("SELECT date FROM import_osmosis_log") == date + assert temp_db_cursor.scalar("SELECT batchseq FROM import_osmosis_log") == 56 + assert temp_db_cursor.scalar("SELECT event FROM import_osmosis_log") == 'index' diff --git a/test/python/test_db_utils.py b/test/python/test_db_utils.py index 4a603888..d549b70f 100644 --- a/test/python/test_db_utils.py +++ b/test/python/test_db_utils.py @@ -1,7 +1,6 @@ """ Tests for DB utility functions in db.utils """ -import psycopg2 import pytest import nominatim.db.utils as db_utils diff --git a/test/python/test_indexing.py b/test/python/test_indexing.py index ff84e379..9873e7d7 100644 --- a/test/python/test_indexing.py +++ b/test/python/test_indexing.py @@ -2,7 +2,6 @@ Tests for running the indexing. """ import itertools -import psycopg2 import pytest from nominatim.indexer import indexer @@ -64,7 +63,8 @@ class IndexerTestDB: END; $$ LANGUAGE plpgsql STABLE; """) - cur.execute("""CREATE OR REPLACE FUNCTION get_interpolation_address(in_address HSTORE, wayid BIGINT) + cur.execute("""CREATE OR REPLACE FUNCTION + get_interpolation_address(in_address HSTORE, wayid BIGINT) RETURNS HSTORE AS $$ BEGIN RETURN in_address; @@ -120,7 +120,8 @@ class IndexerTestDB: return self.scalar('SELECT count(*) from placex where indexed_status > 0') def osmline_unindexed(self): - return self.scalar('SELECT count(*) from location_property_osmline where indexed_status > 0') + return self.scalar("""SELECT count(*) from location_property_osmline + WHERE indexed_status > 0""") @pytest.fixture @@ -140,37 +141,41 @@ def test_index_all_by_rank(test_db, threads, test_tokenizer): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() - assert 31 == test_db.placex_unindexed() - assert 1 == test_db.osmline_unindexed() + assert test_db.placex_unindexed() == 31 + assert test_db.osmline_unindexed() == 1 idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) idx.index_by_rank(0, 30) - assert 0 == test_db.placex_unindexed() - assert 0 == test_db.osmline_unindexed() + assert test_db.placex_unindexed() == 0 + assert test_db.osmline_unindexed() == 0 - assert 0 == test_db.scalar("""SELECT count(*) from placex - WHERE indexed_status = 0 and indexed_date is null""") + assert test_db.scalar("""SELECT count(*) from placex + WHERE indexed_status = 0 and indexed_date is null""") == 0 # ranks come in order of rank address - assert 0 == test_db.scalar(""" + assert test_db.scalar(""" SELECT count(*) FROM placex p WHERE rank_address > 0 AND indexed_date >= (SELECT min(indexed_date) FROM placex o - WHERE p.rank_address < o.rank_address)""") + WHERE p.rank_address < o.rank_address)""") == 0 # placex rank < 30 objects come before interpolations - assert 0 == test_db.scalar( + assert test_db.scalar( """SELECT count(*) FROM placex WHERE rank_address < 30 - AND indexed_date > (SELECT min(indexed_date) FROM location_property_osmline)""") + AND indexed_date > + (SELECT min(indexed_date) FROM location_property_osmline)""") == 0 # placex rank = 30 objects come after interpolations - assert 0 == test_db.scalar( + assert test_db.scalar( """SELECT count(*) FROM placex WHERE rank_address = 30 - AND indexed_date < (SELECT max(indexed_date) FROM location_property_osmline)""") + AND indexed_date < + (SELECT max(indexed_date) FROM location_property_osmline)""") == 0 # rank 0 comes after rank 29 and before rank 30 - assert 0 == test_db.scalar( + assert test_db.scalar( """SELECT count(*) FROM placex WHERE rank_address < 30 - AND indexed_date > (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""") - assert 0 == test_db.scalar( + AND indexed_date > + (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""") == 0 + assert test_db.scalar( """SELECT count(*) FROM placex WHERE rank_address = 30 - AND indexed_date < (SELECT max(indexed_date) FROM placex WHERE rank_address = 0)""") + AND indexed_date < + (SELECT max(indexed_date) FROM placex WHERE rank_address = 0)""") == 0 @pytest.mark.parametrize("threads", [1, 15]) @@ -179,19 +184,19 @@ def test_index_partial_without_30(test_db, threads, test_tokenizer): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() - assert 31 == test_db.placex_unindexed() - assert 1 == test_db.osmline_unindexed() + assert test_db.placex_unindexed() == 31 + assert test_db.osmline_unindexed() == 1 idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) idx.index_by_rank(4, 15) - assert 19 == test_db.placex_unindexed() - assert 1 == test_db.osmline_unindexed() + assert test_db.placex_unindexed() == 19 + assert test_db.osmline_unindexed() == 1 - assert 0 == test_db.scalar(""" + assert test_db.scalar(""" SELECT count(*) FROM placex - WHERE indexed_status = 0 AND not rank_address between 4 and 15""") + WHERE indexed_status = 0 AND not rank_address between 4 and 15""") == 0 @pytest.mark.parametrize("threads", [1, 15]) @@ -200,18 +205,18 @@ def test_index_partial_with_30(test_db, threads, test_tokenizer): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() - assert 31 == test_db.placex_unindexed() - assert 1 == test_db.osmline_unindexed() + assert test_db.placex_unindexed() == 31 + assert test_db.osmline_unindexed() == 1 idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) idx.index_by_rank(28, 30) - assert 27 == test_db.placex_unindexed() - assert 0 == test_db.osmline_unindexed() + assert test_db.placex_unindexed() == 27 + assert test_db.osmline_unindexed() == 0 - assert 0 == test_db.scalar(""" + assert test_db.scalar(""" SELECT count(*) FROM placex - WHERE indexed_status = 0 AND rank_address between 1 and 27""") + WHERE indexed_status = 0 AND rank_address between 1 and 27""") == 0 @pytest.mark.parametrize("threads", [1, 15]) def test_index_boundaries(test_db, threads, test_tokenizer): @@ -221,18 +226,18 @@ def test_index_boundaries(test_db, threads, test_tokenizer): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() - assert 37 == test_db.placex_unindexed() - assert 1 == test_db.osmline_unindexed() + assert test_db.placex_unindexed() == 37 + assert test_db.osmline_unindexed() == 1 idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) idx.index_boundaries(0, 30) - assert 31 == test_db.placex_unindexed() - assert 1 == test_db.osmline_unindexed() + assert test_db.placex_unindexed() == 31 + assert test_db.osmline_unindexed() == 1 - assert 0 == test_db.scalar(""" + assert test_db.scalar(""" SELECT count(*) FROM placex - WHERE indexed_status = 0 AND class != 'boundary'""") + WHERE indexed_status = 0 AND class != 'boundary'""") == 0 @pytest.mark.parametrize("threads", [1, 15]) @@ -245,8 +250,8 @@ def test_index_postcodes(test_db, threads, test_tokenizer): idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) idx.index_postcodes() - assert 0 == test_db.scalar("""SELECT count(*) FROM location_postcode - WHERE indexed_status != 0""") + assert test_db.scalar("""SELECT count(*) FROM location_postcode + WHERE indexed_status != 0""") == 0 @pytest.mark.parametrize("analyse", [True, False]) @@ -262,10 +267,10 @@ def test_index_full(test_db, analyse, test_tokenizer): idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, 4) idx.index_full(analyse=analyse) - assert 0 == test_db.placex_unindexed() - assert 0 == test_db.osmline_unindexed() - assert 0 == test_db.scalar("""SELECT count(*) FROM location_postcode - WHERE indexed_status != 0""") + assert test_db.placex_unindexed() == 0 + assert test_db.osmline_unindexed() == 0 + assert test_db.scalar("""SELECT count(*) FROM location_postcode + WHERE indexed_status != 0""") == 0 @pytest.mark.parametrize("threads", [1, 15]) @@ -278,4 +283,4 @@ def test_index_reopen_connection(test_db, threads, monkeypatch, test_tokenizer): idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) idx.index_by_rank(28, 30) - assert 0 == test_db.placex_unindexed() + assert test_db.placex_unindexed() == 0 diff --git a/test/python/test_tokenizer_factory.py b/test/python/test_tokenizer_factory.py index 69517e94..9dc0b7cb 100644 --- a/test/python/test_tokenizer_factory.py +++ b/test/python/test_tokenizer_factory.py @@ -1,7 +1,6 @@ """ Tests for creating new tokenizers. """ -import importlib import pytest from nominatim.db import properties @@ -10,13 +9,12 @@ from nominatim.errors import UsageError from dummy_tokenizer import DummyTokenizer @pytest.fixture -def test_config(def_config, tmp_path): +def test_config(def_config, tmp_path, property_table, tokenizer_mock): def_config.project_dir = tmp_path return def_config -def test_setup_dummy_tokenizer(temp_db_conn, test_config, - tokenizer_mock, property_table): +def test_setup_dummy_tokenizer(temp_db_conn, test_config): tokenizer = factory.create_tokenizer(test_config) assert isinstance(tokenizer, DummyTokenizer) @@ -26,7 +24,7 @@ def test_setup_dummy_tokenizer(temp_db_conn, test_config, assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy' -def test_setup_tokenizer_dir_exists(test_config, tokenizer_mock, property_table): +def test_setup_tokenizer_dir_exists(test_config): (test_config.project_dir / 'tokenizer').mkdir() tokenizer = factory.create_tokenizer(test_config) @@ -35,21 +33,22 @@ def test_setup_tokenizer_dir_exists(test_config, tokenizer_mock, property_table) assert tokenizer.init_state == "new" -def test_setup_tokenizer_dir_failure(test_config, tokenizer_mock, property_table): +def test_setup_tokenizer_dir_failure(test_config): (test_config.project_dir / 'tokenizer').write_text("foo") with pytest.raises(UsageError): factory.create_tokenizer(test_config) -def test_setup_bad_tokenizer_name(test_config, monkeypatch): +def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch): + def_config.project_dir = tmp_path monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy') with pytest.raises(UsageError): - factory.create_tokenizer(test_config) + factory.create_tokenizer(def_config) -def test_load_tokenizer(temp_db_conn, test_config, - tokenizer_mock, property_table): + +def test_load_tokenizer(test_config): factory.create_tokenizer(test_config) tokenizer = factory.get_tokenizer_for_db(test_config) @@ -58,7 +57,7 @@ def test_load_tokenizer(temp_db_conn, test_config, assert tokenizer.init_state == "loaded" -def test_load_no_tokenizer_dir(test_config, tokenizer_mock, property_table): +def test_load_no_tokenizer_dir(test_config): factory.create_tokenizer(test_config) test_config.project_dir = test_config.project_dir / 'foo' @@ -67,11 +66,10 @@ def test_load_no_tokenizer_dir(test_config, tokenizer_mock, property_table): factory.get_tokenizer_for_db(test_config) -def test_load_missing_propoerty(temp_db_cursor, test_config, tokenizer_mock, property_table): +def test_load_missing_propoerty(temp_db_cursor, test_config): factory.create_tokenizer(test_config) temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties") with pytest.raises(UsageError): factory.get_tokenizer_for_db(test_config) - diff --git a/test/python/test_tokenizer_legacy.py b/test/python/test_tokenizer_legacy.py index 2669e608..3cd18fcf 100644 --- a/test/python/test_tokenizer_legacy.py +++ b/test/python/test_tokenizer_legacy.py @@ -46,7 +46,7 @@ def tokenizer_factory(dsn, tmp_path, property_table): @pytest.fixture def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor): - monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None) + monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None) tok = tokenizer_factory() tok.init_new_db(test_config) @@ -60,7 +60,7 @@ def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor, RETURNS INTEGER AS $$ SELECT 342; $$ LANGUAGE SQL; """) - monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None) + monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None) monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();') tok = tokenizer_factory() tok.init_new_db(test_config) @@ -86,16 +86,6 @@ def create_postcode_id(temp_db_cursor): $$ LANGUAGE SQL""") -@pytest.fixture -def create_housenumbers(temp_db_cursor): - temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers( - housenumbers TEXT[], - OUT tokens TEXT, OUT normtext TEXT) - AS $$ - SELECT housenumbers::TEXT, array_to_string(housenumbers, ';') - $$ LANGUAGE SQL""") - - @pytest.fixture def make_keywords(temp_db_cursor, temp_db_with_extensions): temp_db_cursor.execute( @@ -105,7 +95,7 @@ def make_keywords(temp_db_cursor, temp_db_with_extensions): def test_init_new(tokenizer_factory, test_config, monkeypatch, temp_db_conn, sql_preprocessor): monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', 'xxvv') - monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None) + monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None) tok = tokenizer_factory() tok.init_new_db(test_config) @@ -119,8 +109,7 @@ def test_init_new(tokenizer_factory, test_config, monkeypatch, assert outfile.stat().st_mode == 33261 -def test_init_module_load_failed(tokenizer_factory, test_config, - monkeypatch, temp_db_conn): +def test_init_module_load_failed(tokenizer_factory, test_config): tok = tokenizer_factory() with pytest.raises(UsageError): @@ -134,7 +123,7 @@ def test_init_module_custom(tokenizer_factory, test_config, (module_dir/ 'nominatim.so').write_text('CUSTOM nomiantim.so') monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', str(module_dir)) - monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None) + monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None) tok = tokenizer_factory() tok.init_new_db(test_config) @@ -154,7 +143,7 @@ def test_update_sql_functions(sql_preprocessor, temp_db_conn, tokenizer_factory, test_config, table_factory, monkeypatch, temp_db_cursor): monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133') - monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None) + monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None) tok = tokenizer_factory() tok.init_new_db(test_config) monkeypatch.undo() @@ -174,7 +163,7 @@ def test_update_sql_functions(sql_preprocessor, temp_db_conn, def test_migrate_database(tokenizer_factory, test_config, temp_db_conn, monkeypatch): - monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None) + monkeypatch.setattr(legacy_tokenizer, '_check_module', lambda m, c: None) tok = tokenizer_factory() tok.migrate_database(test_config) @@ -229,8 +218,7 @@ def test_update_special_phrase_empty_table(analyzer, word_table, make_standard_n (' strasse', 'strasse', 'highway', 'primary', 'in'))) -def test_update_special_phrase_delete_all(analyzer, word_table, temp_db_cursor, - make_standard_name): +def test_update_special_phrase_delete_all(analyzer, word_table, make_standard_name): word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in') word_table.add_special(' bar', 'bar', 'highway', 'road', None) @@ -241,8 +229,7 @@ def test_update_special_phrase_delete_all(analyzer, word_table, temp_db_cursor, assert word_table.count_special() == 0 -def test_update_special_phrases_no_replace(analyzer, word_table, temp_db_cursor, - make_standard_name): +def test_update_special_phrases_no_replace(analyzer, word_table, make_standard_name): word_table.add_special(' foo', 'foo', 'amenity', 'prison', 'in') word_table.add_special(' bar', 'bar', 'highway', 'road', None) @@ -260,9 +247,9 @@ def test_update_special_phrase_modify(analyzer, word_table, make_standard_name): assert word_table.count_special() == 2 analyzer.update_special_phrases([ - ('prison', 'amenity', 'prison', 'in'), - ('bar', 'highway', 'road', '-'), - ('garden', 'leisure', 'garden', 'near') + ('prison', 'amenity', 'prison', 'in'), + ('bar', 'highway', 'road', '-'), + ('garden', 'leisure', 'garden', 'near') ], True) assert word_table.get_special() \ @@ -272,43 +259,58 @@ def test_update_special_phrase_modify(analyzer, word_table, make_standard_name): def test_process_place_names(analyzer, make_keywords): - info = analyzer.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}}) assert info['names'] == '{1,2,3}' -@pytest.mark.parametrize('pc', ['12345', 'AB 123', '34-345']) -def test_process_place_postcode(analyzer, create_postcode_id, word_table, pc): - info = analyzer.process_place({'address': {'postcode' : pc}}) +@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345']) +def test_process_place_postcode(analyzer, create_postcode_id, word_table, pcode): + analyzer.process_place({'address': {'postcode' : pcode}}) - assert word_table.get_postcodes() == {pc, } + assert word_table.get_postcodes() == {pcode, } -@pytest.mark.parametrize('pc', ['12:23', 'ab;cd;f', '123;836']) -def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pc): - info = analyzer.process_place({'address': {'postcode' : pc}}) +@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836']) +def test_process_place_bad_postcode(analyzer, create_postcode_id, word_table, pcode): + analyzer.process_place({'address': {'postcode' : pcode}}) assert not word_table.get_postcodes() -@pytest.mark.parametrize('hnr', ['123a', '1', '101']) -def test_process_place_housenumbers_simple(analyzer, create_housenumbers, hnr): - info = analyzer.process_place({'address': {'housenumber' : hnr}}) +class TestHousenumberName: + + @staticmethod + @pytest.fixture(autouse=True) + def setup_create_housenumbers(temp_db_cursor): + temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_housenumbers( + housenumbers TEXT[], + OUT tokens TEXT, OUT normtext TEXT) + AS $$ + SELECT housenumbers::TEXT, array_to_string(housenumbers, ';') + $$ LANGUAGE SQL""") + + + @staticmethod + @pytest.mark.parametrize('hnr', ['123a', '1', '101']) + def test_process_place_housenumbers_simple(analyzer, hnr): + info = analyzer.process_place({'address': {'housenumber' : hnr}}) - assert info['hnr'] == hnr - assert info['hnr_tokens'].startswith("{") + assert info['hnr'] == hnr + assert info['hnr_tokens'].startswith("{") -def test_process_place_housenumbers_lists(analyzer, create_housenumbers): - info = analyzer.process_place({'address': {'conscriptionnumber' : '1; 2;3'}}) + @staticmethod + def test_process_place_housenumbers_lists(analyzer): + info = analyzer.process_place({'address': {'conscriptionnumber' : '1; 2;3'}}) - assert set(info['hnr'].split(';')) == set(('1', '2', '3')) + assert set(info['hnr'].split(';')) == set(('1', '2', '3')) -def test_process_place_housenumbers_duplicates(analyzer, create_housenumbers): - info = analyzer.process_place({'address': {'housenumber' : '134', - 'conscriptionnumber' : '134', - 'streetnumber' : '99a'}}) + @staticmethod + def test_process_place_housenumbers_duplicates(analyzer): + info = analyzer.process_place({'address': {'housenumber' : '134', + 'conscriptionnumber' : '134', + 'streetnumber' : '99a'}}) - assert set(info['hnr'].split(';')) == set(('134', '99a')) + assert set(info['hnr'].split(';')) == set(('134', '99a')) diff --git a/test/python/test_tokenizer_legacy_icu.py b/test/python/test_tokenizer_legacy_icu.py index b3c7874d..798fea37 100644 --- a/test/python/test_tokenizer_legacy_icu.py +++ b/test/python/test_tokenizer_legacy_icu.py @@ -46,14 +46,14 @@ def db_prop(temp_db_conn): return _get_db_property @pytest.fixture -def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor): +def tokenizer_setup(tokenizer_factory, test_config): tok = tokenizer_factory() tok.init_new_db(test_config) @pytest.fixture -def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor, - word_table, temp_db_with_extensions, tmp_path): +def analyzer(tokenizer_factory, test_config, monkeypatch, + temp_db_with_extensions, tmp_path): sql = tmp_path / 'sql' / 'tokenizer' / 'legacy_icu_tokenizer.sql' sql.write_text("SELECT 'a';") @@ -74,17 +74,18 @@ def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor, @pytest.fixture def getorcreate_term_id(temp_db_cursor): temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_term_id(lookup_term TEXT) - RETURNS INTEGER AS $$ SELECT nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""") + RETURNS INTEGER AS $$ + SELECT nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""") @pytest.fixture def getorcreate_hnr_id(temp_db_cursor): temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_hnr_id(lookup_term TEXT) - RETURNS INTEGER AS $$ SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""") + RETURNS INTEGER AS $$ + SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""") -def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop, - sql_preprocessor, place_table, word_table): +def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop): monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();') tok = tokenizer_factory() @@ -105,10 +106,9 @@ def test_init_from_project(tokenizer_setup, tokenizer_factory): assert tok.abbreviations is not None -def test_update_sql_functions(temp_db_conn, db_prop, temp_db_cursor, +def test_update_sql_functions(db_prop, temp_db_cursor, tokenizer_factory, test_config, table_factory, - monkeypatch, - sql_preprocessor, place_table, word_table): + monkeypatch): monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133') tok = tokenizer_factory() tok.init_new_db(test_config) @@ -128,25 +128,25 @@ def test_update_sql_functions(temp_db_conn, db_prop, temp_db_cursor, def test_make_standard_word(analyzer): - with analyzer(abbr=(('STREET', 'ST'), ('tiny', 't'))) as a: - assert a.make_standard_word('tiny street') == 'TINY ST' + with analyzer(abbr=(('STREET', 'ST'), ('tiny', 't'))) as anl: + assert anl.make_standard_word('tiny street') == 'TINY ST' - with analyzer(abbr=(('STRASSE', 'STR'), ('STR', 'ST'))) as a: - assert a.make_standard_word('Hauptstrasse') == 'HAUPTST' + with analyzer(abbr=(('STRASSE', 'STR'), ('STR', 'ST'))) as anl: + assert anl.make_standard_word('Hauptstrasse') == 'HAUPTST' def test_make_standard_hnr(analyzer): - with analyzer(abbr=(('IV', '4'),)) as a: - assert a._make_standard_hnr('345') == '345' - assert a._make_standard_hnr('iv') == 'IV' + with analyzer(abbr=(('IV', '4'),)) as anl: + assert anl._make_standard_hnr('345') == '345' + assert anl._make_standard_hnr('iv') == 'IV' def test_update_postcodes_from_db_empty(analyzer, table_factory, word_table): table_factory('location_postcode', 'postcode TEXT', content=(('1234',), ('12 34',), ('AB23',), ('1234',))) - with analyzer() as a: - a.update_postcodes_from_db() + with analyzer() as anl: + anl.update_postcodes_from_db() assert word_table.count() == 3 assert word_table.get_postcodes() == {'1234', '12 34', 'AB23'} @@ -158,26 +158,25 @@ def test_update_postcodes_from_db_add_and_remove(analyzer, table_factory, word_t word_table.add_postcode(' 1234', '1234') word_table.add_postcode(' 5678', '5678') - with analyzer() as a: - a.update_postcodes_from_db() + with analyzer() as anl: + anl.update_postcodes_from_db() assert word_table.count() == 3 assert word_table.get_postcodes() == {'1234', '45BC', 'XX45'} -def test_update_special_phrase_empty_table(analyzer, word_table, temp_db_cursor): - with analyzer() as a: - a.update_special_phrases([ +def test_update_special_phrase_empty_table(analyzer, word_table): + with analyzer() as anl: + anl.update_special_phrases([ ("König bei", "amenity", "royal", "near"), ("Könige", "amenity", "royal", "-"), ("street", "highway", "primary", "in") ], True) - assert temp_db_cursor.row_set("""SELECT word_token, word, class, type, operator - FROM word WHERE class != 'place'""") \ - == set(((' KÖNIG BEI', 'könig bei', 'amenity', 'royal', 'near'), - (' KÖNIGE', 'könige', 'amenity', 'royal', None), - (' ST', 'street', 'highway', 'primary', 'in'))) + assert word_table.get_special() \ + == {(' KÖNIG BEI', 'könig bei', 'amenity', 'royal', 'near'), + (' KÖNIGE', 'könige', 'amenity', 'royal', None), + (' ST', 'street', 'highway', 'primary', 'in')} def test_update_special_phrase_delete_all(analyzer, word_table): @@ -186,8 +185,8 @@ def test_update_special_phrase_delete_all(analyzer, word_table): assert word_table.count_special() == 2 - with analyzer() as a: - a.update_special_phrases([], True) + with analyzer() as anl: + anl.update_special_phrases([], True) assert word_table.count_special() == 0 @@ -198,8 +197,8 @@ def test_update_special_phrases_no_replace(analyzer, word_table): assert word_table.count_special() == 2 - with analyzer() as a: - a.update_special_phrases([], False) + with analyzer() as anl: + anl.update_special_phrases([], False) assert word_table.count_special() == 2 @@ -210,11 +209,11 @@ def test_update_special_phrase_modify(analyzer, word_table): assert word_table.count_special() == 2 - with analyzer() as a: - a.update_special_phrases([ - ('prison', 'amenity', 'prison', 'in'), - ('bar', 'highway', 'road', '-'), - ('garden', 'leisure', 'garden', 'near') + with analyzer() as anl: + anl.update_special_phrases([ + ('prison', 'amenity', 'prison', 'in'), + ('bar', 'highway', 'road', '-'), + ('garden', 'leisure', 'garden', 'near') ], True) assert word_table.get_special() \ @@ -225,50 +224,50 @@ def test_update_special_phrase_modify(analyzer, word_table): def test_process_place_names(analyzer, getorcreate_term_id): - with analyzer() as a: - info = a.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}}) + with analyzer() as anl: + info = anl.process_place({'name' : {'name' : 'Soft bAr', 'ref': '34'}}) assert info['names'] == '{1,2,3,4,5,6}' -@pytest.mark.parametrize('pc', ['12345', 'AB 123', '34-345']) -def test_process_place_postcode(analyzer, word_table, pc): - with analyzer() as a: - info = a.process_place({'address': {'postcode' : pc}}) +@pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345']) +def test_process_place_postcode(analyzer, word_table, pcode): + with analyzer() as anl: + anl.process_place({'address': {'postcode' : pcode}}) - assert word_table.get_postcodes() == {pc, } + assert word_table.get_postcodes() == {pcode, } -@pytest.mark.parametrize('pc', ['12:23', 'ab;cd;f', '123;836']) -def test_process_place_bad_postcode(analyzer, word_table, pc): - with analyzer() as a: - info = a.process_place({'address': {'postcode' : pc}}) +@pytest.mark.parametrize('pcode', ['12:23', 'ab;cd;f', '123;836']) +def test_process_place_bad_postcode(analyzer, word_table, pcode): + with analyzer() as anl: + anl.process_place({'address': {'postcode' : pcode}}) assert not word_table.get_postcodes() @pytest.mark.parametrize('hnr', ['123a', '1', '101']) def test_process_place_housenumbers_simple(analyzer, hnr, getorcreate_hnr_id): - with analyzer() as a: - info = a.process_place({'address': {'housenumber' : hnr}}) + with analyzer() as anl: + info = anl.process_place({'address': {'housenumber' : hnr}}) assert info['hnr'] == hnr.upper() assert info['hnr_tokens'] == "{-1}" def test_process_place_housenumbers_lists(analyzer, getorcreate_hnr_id): - with analyzer() as a: - info = a.process_place({'address': {'conscriptionnumber' : '1; 2;3'}}) + with analyzer() as anl: + info = anl.process_place({'address': {'conscriptionnumber' : '1; 2;3'}}) assert set(info['hnr'].split(';')) == set(('1', '2', '3')) assert info['hnr_tokens'] == "{-1,-2,-3}" def test_process_place_housenumbers_duplicates(analyzer, getorcreate_hnr_id): - with analyzer() as a: - info = a.process_place({'address': {'housenumber' : '134', - 'conscriptionnumber' : '134', - 'streetnumber' : '99a'}}) + with analyzer() as anl: + info = anl.process_place({'address': {'housenumber' : '134', + 'conscriptionnumber' : '134', + 'streetnumber' : '99a'}}) assert set(info['hnr'].split(';')) == set(('134', '99A')) assert info['hnr_tokens'] == "{-1,-2}" diff --git a/test/python/test_tools_admin.py b/test/python/test_tools_admin.py index 646d6755..bd47e0e4 100644 --- a/test/python/test_tools_admin.py +++ b/test/python/test_tools_admin.py @@ -10,7 +10,6 @@ from nominatim.tools import admin def create_placex_table(placex_table): """ All tests in this module require the placex table to be set up. """ - pass def test_analyse_indexing_no_objects(temp_db_conn): diff --git a/test/python/test_tools_check_database.py b/test/python/test_tools_check_database.py index a0afb045..aed5cb7e 100644 --- a/test/python/test_tools_check_database.py +++ b/test/python/test_tools_check_database.py @@ -7,11 +7,11 @@ from nominatim.tools import check_database as chkdb def test_check_database_unknown_db(def_config, monkeypatch): monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'pgsql:dbname=fjgkhughwgh2423gsags') - assert 1 == chkdb.check_database(def_config) + assert chkdb.check_database(def_config) == 1 def test_check_database_fatal_test(def_config, temp_db): - assert 1 == chkdb.check_database(def_config) + assert chkdb.check_database(def_config) == 1 def test_check_conection_good(temp_db_conn, def_config): @@ -49,14 +49,15 @@ def test_check_tokenizer_missing(temp_db_conn, def_config, tmp_path): @pytest.mark.parametrize("check_result,state", [(None, chkdb.CheckState.OK), ("Something wrong", chkdb.CheckState.FAIL)]) -def test_check_tokenizer(tokenizer_mock, temp_db_conn, def_config, monkeypatch, +def test_check_tokenizer(temp_db_conn, def_config, monkeypatch, check_result, state): class _TestTokenizer: - def check_database(self): + @staticmethod + def check_database(): return check_result monkeypatch.setattr(chkdb.tokenizer_factory, 'get_tokenizer_for_db', - lambda *a, **k: _TestTokenizer()) + lambda *a, **k: _TestTokenizer()) assert chkdb.check_tokenizer(temp_db_conn, def_config) == state @@ -81,12 +82,12 @@ def test_check_database_indexes_valid(temp_db_conn, def_config): def test_check_tiger_table_disabled(temp_db_conn, def_config, monkeypatch): - monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA' , 'no') + monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'no') assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.NOT_APPLICABLE def test_check_tiger_table_enabled(temp_db_cursor, temp_db_conn, def_config, monkeypatch): - monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA' , 'yes') + monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'yes') assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.FAIL temp_db_cursor.execute('CREATE TABLE location_property_tiger (place_id int)') @@ -94,4 +95,3 @@ def test_check_tiger_table_enabled(temp_db_cursor, temp_db_conn, def_config, mon temp_db_cursor.execute('INSERT INTO location_property_tiger VALUES (1), (2)') assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.OK - diff --git a/test/python/test_tools_database_import.py b/test/python/test_tools_database_import.py index ef406353..621610cf 100644 --- a/test/python/test_tools_database_import.py +++ b/test/python/test_tools_database_import.py @@ -1,10 +1,10 @@ """ Tests for functions to import a new database. """ +from pathlib import Path + import pytest import psycopg2 -import sys -from pathlib import Path from nominatim.tools import database_import from nominatim.errors import UsageError @@ -34,9 +34,9 @@ def test_setup_skeleton(src_dir, nonexistant_db, no_partitions): try: with conn.cursor() as cur: cur.execute("SELECT distinct partition FROM country_name") - partitions = set([r[0] for r in list(cur)]) + partitions = set((r[0] for r in list(cur))) if no_partitions: - assert partitions == set([0]) + assert partitions == set((0, )) else: assert len(partitions) > 10 finally: @@ -147,7 +147,7 @@ def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory): @pytest.mark.parametrize("threads", (1, 5)) -def test_load_data(dsn, src_dir, place_row, placex_table, osmline_table, +def test_load_data(dsn, place_row, placex_table, osmline_table, word_table, temp_db_cursor, threads): for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'): temp_db_cursor.execute("""CREATE FUNCTION {} (src TEXT) diff --git a/test/python/test_tools_exec_utils.py b/test/python/test_tools_exec_utils.py index e713300e..25ccf163 100644 --- a/test/python/test_tools_exec_utils.py +++ b/test/python/test_tools_exec_utils.py @@ -52,41 +52,45 @@ class TestRunLegacyScript: def test_run_legacy_return_dont_throw_on_success(self): fname = self.mk_script('exit(0);') - assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=self.testenv, - throw_on_fail=True) + assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv, + throw_on_fail=True) == 0 def test_run_legacy_use_given_module_path(self): fname = self.mk_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == '' ? 0 : 23);") - assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) + assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0 def test_run_legacy_do_not_overwrite_module_path(self, monkeypatch): monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', 'other') - fname = self.mk_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);") + fname = self.mk_script( + "exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);") - assert 0 == exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) + assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0 class TestRunApiScript: + @staticmethod @pytest.fixture(autouse=True) - def setup_project_dir(self, tmp_path): + def setup_project_dir(tmp_path): webdir = tmp_path / 'website' webdir.mkdir() (webdir / 'test.php').write_text(" 0 assert temp_db_cursor.table_rows('wikipedia_redirect') > 0 diff --git a/test/python/test_tools_refresh_address_levels.py b/test/python/test_tools_refresh_address_levels.py index a3df6c9c..2821222c 100644 --- a/test/python/test_tools_refresh_address_levels.py +++ b/test/python/test_tools_refresh_address_levels.py @@ -35,7 +35,7 @@ def test_load_ranks_country(temp_db_conn, temp_db_cursor): [{"tags": {"place": {"village": 14}}}, {"countries": ['de'], "tags": {"place": {"village": 15}}}, - {"countries": ['uk', 'us' ], + {"countries": ['uk', 'us'], "tags": {"place": {"village": 16}}} ]) @@ -62,9 +62,8 @@ def test_load_ranks_default_value(temp_db_conn, temp_db_cursor): def test_load_ranks_multiple_keys(temp_db_conn, temp_db_cursor): load_address_levels(temp_db_conn, 'levels', - [{"tags": - {"place": {"city": 14}, - "boundary": {"administrative2" : 4}} + [{"tags": {"place": {"city": 14}, + "boundary": {"administrative2" : 4}} }]) assert temp_db_cursor.row_set('SELECT * FROM levels') == \ @@ -75,9 +74,8 @@ def test_load_ranks_multiple_keys(temp_db_conn, temp_db_cursor): def test_load_ranks_address(temp_db_conn, temp_db_cursor): load_address_levels(temp_db_conn, 'levels', - [{"tags": - {"place": {"city": 14, - "town" : [14, 13]}} + [{"tags": {"place": {"city": 14, + "town" : [14, 13]}} }]) assert temp_db_cursor.row_set('SELECT * FROM levels') == \ diff --git a/test/python/test_tools_refresh_create_functions.py b/test/python/test_tools_refresh_create_functions.py index 3f9bccbd..00b863ab 100644 --- a/test/python/test_tools_refresh_create_functions.py +++ b/test/python/test_tools_refresh_create_functions.py @@ -31,7 +31,8 @@ def test_create_functions(temp_db_cursor, conn, def_config, sql_tmp_path): @pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22))) -def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path, dbg, ret): +def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path, + dbg, ret): sqlfile = sql_tmp_path / 'functions.sql' sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER AS $$ diff --git a/test/python/test_tools_refresh_setup_website.py b/test/python/test_tools_refresh_setup_website.py index 9b60c0e5..8946bd1f 100644 --- a/test/python/test_tools_refresh_setup_website.py +++ b/test/python/test_tools_refresh_setup_website.py @@ -1,7 +1,6 @@ """ Tests for setting up the website scripts. """ -from pathlib import Path import subprocess import pytest @@ -9,67 +8,65 @@ import pytest from nominatim.tools import refresh @pytest.fixture -def envdir(tmpdir): - (tmpdir / 'php').mkdir() - (tmpdir / 'php' / 'website').mkdir() - return tmpdir +def test_script(tmp_path): + (tmp_path / 'php').mkdir() + website_dir = (tmp_path / 'php' / 'website') + website_dir.mkdir() -@pytest.fixture -def test_script(envdir): def _create_file(code): - outfile = envdir / 'php' / 'website' / 'reverse-only-search.php' + outfile = website_dir / 'reverse-only-search.php' outfile.write_text(' """ +@pytest.fixture(autouse=True) +def setup_status_table(status_table): + pass + ### init replication -def test_init_replication_bad_base_url(monkeypatch, status_table, place_row, temp_db_conn): +def test_init_replication_bad_base_url(monkeypatch, place_row, temp_db_conn): place_row(osm_type='N', osm_id=100) - monkeypatch.setattr(nominatim.db.status, "get_url", lambda u : OSM_NODE_DATA) + monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA) with pytest.raises(UsageError, match="Failed to reach replication service"): nominatim.tools.replication.init_replication(temp_db_conn, 'https://test.io') -def test_init_replication_success(monkeypatch, status_table, place_row, temp_db_conn, temp_db_cursor): +def test_init_replication_success(monkeypatch, place_row, temp_db_conn, temp_db_cursor): place_row(osm_type='N', osm_id=100) - monkeypatch.setattr(nominatim.db.status, "get_url", lambda u : OSM_NODE_DATA) + monkeypatch.setattr(nominatim.db.status, "get_url", lambda u: OSM_NODE_DATA) monkeypatch.setattr(nominatim.tools.replication.ReplicationServer, "timestamp_to_sequence", lambda self, date: 234) @@ -48,17 +52,17 @@ def test_init_replication_success(monkeypatch, status_table, place_row, temp_db_ ### checking for updates -def test_check_for_updates_empty_status_table(status_table, temp_db_conn): +def test_check_for_updates_empty_status_table(temp_db_conn): assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254 -def test_check_for_updates_seq_not_set(status_table, temp_db_conn): +def test_check_for_updates_seq_not_set(temp_db_conn): status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc)) assert nominatim.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254 -def test_check_for_updates_no_state(monkeypatch, status_table, temp_db_conn): +def test_check_for_updates_no_state(monkeypatch, temp_db_conn): status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=345) monkeypatch.setattr(nominatim.tools.replication.ReplicationServer, @@ -68,7 +72,7 @@ def test_check_for_updates_no_state(monkeypatch, status_table, temp_db_conn): @pytest.mark.parametrize("server_sequence,result", [(344, 2), (345, 2), (346, 0)]) -def test_check_for_updates_no_new_data(monkeypatch, status_table, temp_db_conn, +def test_check_for_updates_no_new_data(monkeypatch, temp_db_conn, server_sequence, result): date = dt.datetime.now(dt.timezone.utc) status.set_status(temp_db_conn, date, seq=345) @@ -85,24 +89,24 @@ def test_check_for_updates_no_new_data(monkeypatch, status_table, temp_db_conn, @pytest.fixture def update_options(tmpdir): return dict(base_url='https://test.io', - indexed_only=False, - update_interval=3600, - import_file=tmpdir / 'foo.osm', - max_diff_size=1) + indexed_only=False, + update_interval=3600, + import_file=tmpdir / 'foo.osm', + max_diff_size=1) -def test_update_empty_status_table(status_table, temp_db_conn): +def test_update_empty_status_table(temp_db_conn): with pytest.raises(UsageError): nominatim.tools.replication.update(temp_db_conn, {}) -def test_update_already_indexed(status_table, temp_db_conn): +def test_update_already_indexed(temp_db_conn): status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=34, indexed=False) assert nominatim.tools.replication.update(temp_db_conn, dict(indexed_only=True)) \ == nominatim.tools.replication.UpdateState.MORE_PENDING -def test_update_no_data_no_sleep(monkeypatch, status_table, temp_db_conn, update_options): +def test_update_no_data_no_sleep(monkeypatch, temp_db_conn, update_options): date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(days=1) status.set_status(temp_db_conn, date, seq=34) @@ -111,7 +115,7 @@ def test_update_no_data_no_sleep(monkeypatch, status_table, temp_db_conn, update lambda *args, **kwargs: None) sleeptime = [] - monkeypatch.setattr(time, 'sleep', lambda s: sleeptime.append(s)) + monkeypatch.setattr(time, 'sleep', sleeptime.append) assert nominatim.tools.replication.update(temp_db_conn, update_options) \ == nominatim.tools.replication.UpdateState.NO_CHANGES @@ -119,7 +123,7 @@ def test_update_no_data_no_sleep(monkeypatch, status_table, temp_db_conn, update assert not sleeptime -def test_update_no_data_sleep(monkeypatch, status_table, temp_db_conn, update_options): +def test_update_no_data_sleep(monkeypatch, temp_db_conn, update_options): date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(minutes=30) status.set_status(temp_db_conn, date, seq=34) @@ -128,7 +132,7 @@ def test_update_no_data_sleep(monkeypatch, status_table, temp_db_conn, update_op lambda *args, **kwargs: None) sleeptime = [] - monkeypatch.setattr(time, 'sleep', lambda s: sleeptime.append(s)) + monkeypatch.setattr(time, 'sleep', sleeptime.append) assert nominatim.tools.replication.update(temp_db_conn, update_options) \ == nominatim.tools.replication.UpdateState.NO_CHANGES diff --git a/test/python/test_tools_sp_csv_loader.py b/test/python/test_tools_sp_csv_loader.py index 4bf7137e..80d5989e 100644 --- a/test/python/test_tools_sp_csv_loader.py +++ b/test/python/test_tools_sp_csv_loader.py @@ -41,9 +41,13 @@ def check_phrases_content(phrases): the right phrases of the sp_csv_test.csv special phrases. """ return len(phrases) > 1 \ - and any(p.p_label == 'Billboard' and p.p_class == 'advertising' and p.p_type == 'billboard' + and any(p.p_label == 'Billboard' + and p.p_class == 'advertising' + and p.p_type == 'billboard' and p.p_operator == '-' for p in phrases) \ - and any(p.p_label == 'Zip Lines' and p.p_class == 'aerialway' and p.p_type == 'zip_line' + and any(p.p_label == 'Zip Lines' + and p.p_class == 'aerialway' + and p.p_type == 'zip_line' and p.p_operator == '-' for p in phrases) @pytest.fixture diff --git a/test/python/test_tools_sp_wiki_loader.py b/test/python/test_tools_sp_wiki_loader.py index 83e56ef5..35b413d3 100644 --- a/test/python/test_tools_sp_wiki_loader.py +++ b/test/python/test_tools_sp_wiki_loader.py @@ -9,9 +9,8 @@ def xml_wiki_content(src_dir): """ return the content of the static xml test file. """ - xml_test_content_path = (src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt').resolve() - with open(xml_test_content_path) as xml_content_reader: - return xml_content_reader.read() + xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt' + return xml_test_content.read_text() @pytest.fixture @@ -53,4 +52,3 @@ def check_phrases_content(phrases): and p.p_operator == '-' for p in phrases) \ and any(p.p_label == 'Zip Line' and p.p_class == 'aerialway' and p.p_type == 'zip_line' and p.p_operator == '-' for p in phrases) - diff --git a/test/python/test_tools_tiger_data.py b/test/python/test_tools_tiger_data.py index 17dd676c..2e524087 100644 --- a/test/python/test_tools_tiger_data.py +++ b/test/python/test_tools_tiger_data.py @@ -1,13 +1,12 @@ """ Test for tiger data function """ -from pathlib import Path +import tarfile from textwrap import dedent import pytest -import tarfile -from nominatim.tools import tiger_data, database_import +from nominatim.tools import tiger_data from nominatim.errors import UsageError class MockTigerTable: @@ -42,7 +41,8 @@ def tiger_table(def_config, temp_db_conn, sql_preprocessor, stop INTEGER, interpol TEXT, token_info JSONB, postcode TEXT) RETURNS INTEGER AS $$ - INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode) RETURNING 1 + INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode) + RETURNING 1 $$ LANGUAGE SQL;""") (def_config.lib_dir.sql / 'tiger_import_finish.sql').write_text( """DROP FUNCTION tiger_line_import (linegeo GEOMETRY, in_startnumber INTEGER, @@ -110,7 +110,7 @@ def test_add_tiger_data_tarfile(def_config, tiger_table, tokenizer_mock, tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv')) tar.close() - tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1, + tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads, tokenizer_mock()) assert tiger_table.count() == 6213 @@ -126,7 +126,7 @@ def test_add_tiger_data_bad_tarfile(def_config, tiger_table, tokenizer_mock, def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock, - tmp_path, src_dir): + tmp_path): tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz") tar.add(__file__) tar.close() @@ -135,4 +135,3 @@ def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock, tokenizer_mock()) assert tiger_table.count() == 0 - -- 2.39.5