X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/4abaf712341758f50484fe1fe2764a7a5216de78..0fb8eade136ea03e7853aca0795ca69833c33661:/test/python/test_tools_database_import.py diff --git a/test/python/test_tools_database_import.py b/test/python/test_tools_database_import.py index e370e084..aa90f8db 100644 --- a/test/python/test_tools_database_import.py +++ b/test/python/test_tools_database_import.py @@ -1,10 +1,10 @@ """ Tests for functions to import a new database. """ +from pathlib import Path + import pytest import psycopg2 -import sys -from pathlib import Path from nominatim.tools import database_import from nominatim.errors import UsageError @@ -34,9 +34,9 @@ def test_setup_skeleton(src_dir, nonexistant_db, no_partitions): try: with conn.cursor() as cur: cur.execute("SELECT distinct partition FROM country_name") - partitions = set([r[0] for r in list(cur)]) + partitions = set((r[0] for r in list(cur))) if no_partitions: - assert partitions == set([0]) + assert partitions == set((0, )) else: assert len(partitions) > 10 finally: @@ -67,10 +67,11 @@ def test_create_db_missing_ro_user(nonexistant_db): database_import.create_db('dbname=' + nonexistant_db, rouser='sdfwkjkjgdugu2;jgsafkljas;') -def test_setup_extensions(temp_db_conn, temp_db_cursor): +def test_setup_extensions(temp_db_conn, table_factory): database_import.setup_extensions(temp_db_conn) - temp_db_cursor.execute('CREATE TABLE t (h HSTORE, geom GEOMETRY(Geometry, 4326))') + # Use table creation to check that hstore and geometry types are available. + table_factory('t', 'h HSTORE, geom GEOMETRY(Geometry, 4326)') def test_setup_extensions_old_postgis(temp_db_conn, monkeypatch): @@ -80,57 +81,61 @@ def test_setup_extensions_old_postgis(temp_db_conn, monkeypatch): database_import.setup_extensions(temp_db_conn) -def test_import_base_data(src_dir, temp_db, temp_db_cursor): - temp_db_cursor.execute('CREATE EXTENSION hstore') - temp_db_cursor.execute('CREATE EXTENSION postgis') - database_import.import_base_data('dbname=' + temp_db, src_dir / 'data') +def test_import_base_data(dsn, src_dir, temp_db_with_extensions, temp_db_cursor): + database_import.import_base_data(dsn, src_dir / 'data') + + assert temp_db_cursor.table_rows('country_name') > 0 - assert temp_db_cursor.scalar('SELECT count(*) FROM country_name') > 0 +def test_import_base_data_ignore_partitions(dsn, src_dir, temp_db_with_extensions, + temp_db_cursor): + database_import.import_base_data(dsn, src_dir / 'data', ignore_partitions=True) -def test_import_base_data_ignore_partitions(src_dir, temp_db, temp_db_cursor): - temp_db_cursor.execute('CREATE EXTENSION hstore') - temp_db_cursor.execute('CREATE EXTENSION postgis') - database_import.import_base_data('dbname=' + temp_db, src_dir / 'data', - ignore_partitions=True) + assert temp_db_cursor.table_rows('country_name') > 0 + assert temp_db_cursor.table_rows('country_name', where='partition != 0') == 0 - assert temp_db_cursor.scalar('SELECT count(*) FROM country_name') > 0 - assert temp_db_cursor.scalar('SELECT count(*) FROM country_name WHERE partition != 0') == 0 +def test_import_osm_data_simple(table_factory, osm2pgsql_options): + table_factory('place', content=((1, ), )) + + database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options) + + +def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options): + table_factory('place', content=((1, ), )) + osm2pgsql_options['osm2pgsql_cache'] = 0 -def test_import_osm_data_simple(temp_db_cursor,osm2pgsql_options): - temp_db_cursor.execute('CREATE TABLE place (id INT)') - temp_db_cursor.execute('INSERT INTO place values (1)') + files = [tmp_path / 'file1.osm', tmp_path / 'file2.osm'] + for f in files: + f.write_text('test') - database_import.import_osm_data('file.pdf', osm2pgsql_options) + database_import.import_osm_data(files, osm2pgsql_options) -def test_import_osm_data_simple_no_data(temp_db_cursor,osm2pgsql_options): - temp_db_cursor.execute('CREATE TABLE place (id INT)') +def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options): + table_factory('place') with pytest.raises(UsageError, match='No data.*'): - database_import.import_osm_data('file.pdf', osm2pgsql_options) + database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options) -def test_import_osm_data_drop(temp_db_conn, temp_db_cursor, tmp_path, osm2pgsql_options): - temp_db_cursor.execute('CREATE TABLE place (id INT)') - temp_db_cursor.execute('CREATE TABLE planet_osm_nodes (id INT)') - temp_db_cursor.execute('INSERT INTO place values (1)') +def test_import_osm_data_drop(table_factory, temp_db_conn, tmp_path, osm2pgsql_options): + table_factory('place', content=((1, ), )) + table_factory('planet_osm_nodes') flatfile = tmp_path / 'flatfile' flatfile.write_text('touch') osm2pgsql_options['flatnode_file'] = str(flatfile.resolve()) - database_import.import_osm_data('file.pdf', osm2pgsql_options, drop=True) + database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, drop=True) assert not flatfile.exists() assert not temp_db_conn.table_exists('planet_osm_nodes') -def test_import_osm_data_default_cache(temp_db_cursor,osm2pgsql_options): - temp_db_cursor.execute('CREATE TABLE place (id INT)') - temp_db_cursor.execute('INSERT INTO place values (1)') +def test_import_osm_data_default_cache(table_factory, osm2pgsql_options): + table_factory('place', content=((1, ), )) osm2pgsql_options['osm2pgsql_cache'] = 0 @@ -153,7 +158,7 @@ def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory): @pytest.mark.parametrize("threads", (1, 5)) -def test_load_data(dsn, src_dir, place_row, placex_table, osmline_table, +def test_load_data(dsn, place_row, placex_table, osmline_table, word_table, temp_db_cursor, threads): for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'): temp_db_cursor.execute("""CREATE FUNCTION {} (src TEXT) @@ -186,7 +191,7 @@ def test_create_country_names(temp_db_with_extensions, temp_db_conn, temp_db_cur assert len(tokenizer.analyser_cache['countries']) == 2 - result_set = {k: set(v) for k, v in tokenizer.analyser_cache['countries']} + result_set = {k: set(v.values()) for k, v in tokenizer.analyser_cache['countries']} if languages: assert result_set == {'us' : set(('us', 'us1', 'United States')),