]> git.openstreetmap.org Git - nominatim.git/blob - test/python/test_tools_database_import.py
CI: run BDD tests with legacy_icu tokenizer
[nominatim.git] / test / python / test_tools_database_import.py
1 """
2 Tests for functions to import a new database.
3 """
4 import pytest
5 import psycopg2
6 import sys
7 from pathlib import Path
8
9 from nominatim.tools import database_import
10 from nominatim.errors import UsageError
11
12 @pytest.fixture
13 def nonexistant_db():
14     dbname = 'test_nominatim_python_unittest'
15
16     conn = psycopg2.connect(database='postgres')
17
18     conn.set_isolation_level(0)
19     with conn.cursor() as cur:
20         cur.execute('DROP DATABASE IF EXISTS {}'.format(dbname))
21
22     yield dbname
23
24     with conn.cursor() as cur:
25         cur.execute('DROP DATABASE IF EXISTS {}'.format(dbname))
26
27 @pytest.mark.parametrize("no_partitions", (True, False))
28 def test_setup_skeleton(src_dir, nonexistant_db, no_partitions):
29     database_import.setup_database_skeleton('dbname=' + nonexistant_db,
30                                             src_dir / 'data', no_partitions)
31
32     conn = psycopg2.connect(database=nonexistant_db)
33
34     try:
35         with conn.cursor() as cur:
36             cur.execute("SELECT distinct partition FROM country_name")
37             partitions = set([r[0] for r in list(cur)])
38             if no_partitions:
39                 assert partitions == set([0])
40             else:
41                 assert len(partitions) > 10
42     finally:
43         conn.close()
44
45
46 def test_create_db_success(nonexistant_db):
47     database_import.create_db('dbname=' + nonexistant_db, rouser='www-data')
48
49     conn = psycopg2.connect(database=nonexistant_db)
50     conn.close()
51
52
53 def test_create_db_already_exists(temp_db):
54     with pytest.raises(UsageError):
55         database_import.create_db('dbname=' + temp_db)
56
57
58 def test_create_db_unsupported_version(nonexistant_db, monkeypatch):
59     monkeypatch.setattr(database_import, 'POSTGRESQL_REQUIRED_VERSION', (100, 4))
60
61     with pytest.raises(UsageError, match='PostgreSQL server is too old.'):
62         database_import.create_db('dbname=' + nonexistant_db)
63
64
65 def test_create_db_missing_ro_user(nonexistant_db):
66     with pytest.raises(UsageError, match='Missing read-only user.'):
67         database_import.create_db('dbname=' + nonexistant_db, rouser='sdfwkjkjgdugu2;jgsafkljas;')
68
69
70 def test_setup_extensions(temp_db_conn, temp_db_cursor):
71     database_import.setup_extensions(temp_db_conn)
72
73     temp_db_cursor.execute('CREATE TABLE t (h HSTORE, geom GEOMETRY(Geometry, 4326))')
74
75
76 def test_setup_extensions_old_postgis(temp_db_conn, monkeypatch):
77     monkeypatch.setattr(database_import, 'POSTGIS_REQUIRED_VERSION', (50, 50))
78
79     with pytest.raises(UsageError, match='PostGIS version is too old.'):
80         database_import.setup_extensions(temp_db_conn)
81
82
83 def test_import_base_data(src_dir, temp_db, temp_db_cursor):
84     temp_db_cursor.execute('CREATE EXTENSION hstore')
85     temp_db_cursor.execute('CREATE EXTENSION postgis')
86     database_import.import_base_data('dbname=' + temp_db, src_dir / 'data')
87
88     assert temp_db_cursor.scalar('SELECT count(*) FROM country_name') > 0
89
90
91 def test_import_base_data_ignore_partitions(src_dir, temp_db, temp_db_cursor):
92     temp_db_cursor.execute('CREATE EXTENSION hstore')
93     temp_db_cursor.execute('CREATE EXTENSION postgis')
94     database_import.import_base_data('dbname=' + temp_db, src_dir / 'data',
95                                      ignore_partitions=True)
96
97     assert temp_db_cursor.scalar('SELECT count(*) FROM country_name') > 0
98     assert temp_db_cursor.scalar('SELECT count(*) FROM country_name WHERE partition != 0') == 0
99
100
101 def test_import_osm_data_simple(temp_db_cursor,osm2pgsql_options):
102     temp_db_cursor.execute('CREATE TABLE place (id INT)')
103     temp_db_cursor.execute('INSERT INTO place values (1)')
104
105     database_import.import_osm_data('file.pdf', osm2pgsql_options)
106
107
108 def test_import_osm_data_simple_no_data(temp_db_cursor,osm2pgsql_options):
109     temp_db_cursor.execute('CREATE TABLE place (id INT)')
110
111     with pytest.raises(UsageError, match='No data.*'):
112         database_import.import_osm_data('file.pdf', osm2pgsql_options)
113
114
115 def test_import_osm_data_drop(temp_db_conn, temp_db_cursor, tmp_path, osm2pgsql_options):
116     temp_db_cursor.execute('CREATE TABLE place (id INT)')
117     temp_db_cursor.execute('CREATE TABLE planet_osm_nodes (id INT)')
118     temp_db_cursor.execute('INSERT INTO place values (1)')
119
120     flatfile = tmp_path / 'flatfile'
121     flatfile.write_text('touch')
122
123     osm2pgsql_options['flatnode_file'] = str(flatfile.resolve())
124
125     database_import.import_osm_data('file.pdf', osm2pgsql_options, drop=True)
126
127     assert not flatfile.exists()
128     assert not temp_db_conn.table_exists('planet_osm_nodes')
129
130
131 def test_import_osm_data_default_cache(temp_db_cursor,osm2pgsql_options):
132     temp_db_cursor.execute('CREATE TABLE place (id INT)')
133     temp_db_cursor.execute('INSERT INTO place values (1)')
134
135     osm2pgsql_options['osm2pgsql_cache'] = 0
136
137     database_import.import_osm_data(Path(__file__), osm2pgsql_options)
138
139
140 def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory):
141     tables = ('placex', 'place_addressline', 'location_area',
142               'location_area_country',
143               'location_property_tiger', 'location_property_osmline',
144               'location_postcode', 'search_name', 'location_road_23')
145     for table in tables:
146         table_factory(table, content=((1, ), (2, ), (3, )))
147         assert temp_db_cursor.table_rows(table) == 3
148
149     database_import.truncate_data_tables(temp_db_conn)
150
151     for table in tables:
152         assert temp_db_cursor.table_rows(table) == 0
153
154
155 @pytest.mark.parametrize("threads", (1, 5))
156 def test_load_data(dsn, src_dir, place_row, placex_table, osmline_table,
157                    word_table, temp_db_cursor, threads):
158     for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'):
159         temp_db_cursor.execute("""CREATE FUNCTION {} (src TEXT)
160                                   RETURNS TEXT AS $$ SELECT 'a'::TEXT $$ LANGUAGE SQL
161                                """.format(func))
162     for oid in range(100, 130):
163         place_row(osm_id=oid)
164     place_row(osm_type='W', osm_id=342, cls='place', typ='houses',
165               geom='SRID=4326;LINESTRING(0 0, 10 10)')
166
167     database_import.load_data(dsn, threads)
168
169     assert temp_db_cursor.table_rows('placex') == 30
170     assert temp_db_cursor.table_rows('location_property_osmline') == 1
171
172
173 @pytest.mark.parametrize("languages", (None, ' fr,en'))
174 def test_create_country_names(temp_db_with_extensions, temp_db_conn, temp_db_cursor,
175                               table_factory, tokenizer_mock, languages):
176
177     table_factory('country_name', 'country_code varchar(2), name hstore',
178                   content=(('us', '"name"=>"us1","name:af"=>"us2"'),
179                            ('fr', '"name"=>"Fra", "name:en"=>"Fren"')))
180
181     assert temp_db_cursor.scalar("SELECT count(*) FROM country_name") == 2
182
183     tokenizer = tokenizer_mock()
184
185     database_import.create_country_names(temp_db_conn, tokenizer, languages)
186
187     assert len(tokenizer.analyser_cache['countries']) == 2
188
189     result_set = {k: set(v) for k, v in tokenizer.analyser_cache['countries']}
190
191     if languages:
192         assert result_set == {'us' : set(('us', 'us1', 'United States')),
193                               'fr' : set(('fr', 'Fra', 'Fren'))}
194     else:
195         assert result_set == {'us' : set(('us', 'us1', 'us2', 'United States')),
196                               'fr' : set(('fr', 'Fra', 'Fren'))}