]> git.openstreetmap.org Git - nominatim.git/blob - test/python/tools/test_database_import.py
enable flake for Python tests
[nominatim.git] / test / python / tools / test_database_import.py
1 # SPDX-License-Identifier: GPL-3.0-or-later
2 #
3 # This file is part of Nominatim. (https://nominatim.org)
4 #
5 # Copyright (C) 2025 by the Nominatim developer community.
6 # For a full list of authors see the git log.
7 """
8 Tests for functions to import a new database.
9 """
10 from pathlib import Path
11
12 import pytest
13 import pytest_asyncio  # noqa
14 import psycopg
15 from psycopg import sql as pysql
16
17 from nominatim_db.tools import database_import
18 from nominatim_db.errors import UsageError
19
20
21 class TestDatabaseSetup:
22     DBNAME = 'test_nominatim_python_unittest'
23
24     @pytest.fixture(autouse=True)
25     def setup_nonexistant_db(self):
26         with psycopg.connect(dbname='postgres', autocommit=True) as conn:
27             with conn.cursor() as cur:
28                 cur.execute(f'DROP DATABASE IF EXISTS {self.DBNAME}')
29
30             yield True
31
32             with conn.cursor() as cur:
33                 cur.execute(f'DROP DATABASE IF EXISTS {self.DBNAME}')
34
35     @pytest.fixture
36     def cursor(self):
37         with psycopg.connect(dbname=self.DBNAME) as conn:
38             with conn.cursor() as cur:
39                 yield cur
40
41     def conn(self):
42         return psycopg.connect(dbname=self.DBNAME)
43
44     def test_setup_skeleton(self):
45         database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
46
47         # Check that all extensions are set up.
48         with self.conn() as conn:
49             with conn.cursor() as cur:
50                 cur.execute('CREATE TABLE t (h HSTORE, geom GEOMETRY(Geometry, 4326))')
51
52     def test_unsupported_pg_version(self, monkeypatch):
53         monkeypatch.setattr(database_import, 'POSTGRESQL_REQUIRED_VERSION', (100, 4))
54
55         with pytest.raises(UsageError, match='PostgreSQL server is too old.'):
56             database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
57
58     def test_create_db_explicit_ro_user(self):
59         database_import.setup_database_skeleton(f'dbname={self.DBNAME}',
60                                                 rouser='postgres')
61
62     def test_create_db_missing_ro_user(self):
63         with pytest.raises(UsageError, match='Missing read-only user.'):
64             database_import.setup_database_skeleton(f'dbname={self.DBNAME}',
65                                                     rouser='sdfwkjkjgdugu2;jgsafkljas;')
66
67     def test_setup_extensions_old_postgis(self, monkeypatch):
68         monkeypatch.setattr(database_import, 'POSTGIS_REQUIRED_VERSION', (50, 50))
69
70         with pytest.raises(UsageError, match='PostGIS is too old.'):
71             database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
72
73
74 def test_setup_skeleton_already_exists(temp_db):
75     with pytest.raises(UsageError):
76         database_import.setup_database_skeleton(f'dbname={temp_db}')
77
78
79 def test_import_osm_data_simple(table_factory, osm2pgsql_options, capfd):
80     table_factory('place', content=((1, ), ))
81
82     database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
83     captured = capfd.readouterr()
84
85     assert '--create' in captured.out
86     assert '--output gazetteer' in captured.out
87     assert f'--style {osm2pgsql_options["osm2pgsql_style"]}' in captured.out
88     assert f'--number-processes {osm2pgsql_options["threads"]}' in captured.out
89     assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out
90     assert 'file.pbf' in captured.out
91
92
93 def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options, capfd):
94     table_factory('place', content=((1, ), ))
95     osm2pgsql_options['osm2pgsql_cache'] = 0
96
97     files = [tmp_path / 'file1.osm', tmp_path / 'file2.osm']
98     for f in files:
99         f.write_text('test')
100
101     database_import.import_osm_data(files, osm2pgsql_options)
102     captured = capfd.readouterr()
103
104     assert 'file1.osm' in captured.out
105     assert 'file2.osm' in captured.out
106
107
108 def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options):
109     table_factory('place')
110
111     with pytest.raises(UsageError, match='No data imported'):
112         database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
113
114
115 def test_import_osm_data_simple_ignore_no_data(table_factory, osm2pgsql_options):
116     table_factory('place')
117
118     database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options,
119                                     ignore_errors=True)
120
121
122 def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql_options):
123     table_factory('place', content=((1, ), ))
124     table_factory('planet_osm_nodes')
125
126     flatfile = tmp_path / 'flatfile'
127     flatfile.write_text('touch')
128
129     osm2pgsql_options['flatnode_file'] = str(flatfile.resolve())
130
131     database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, drop=True)
132
133     assert not flatfile.exists()
134     assert not temp_db_cursor.table_exists('planet_osm_nodes')
135
136
137 def test_import_osm_data_default_cache(table_factory, osm2pgsql_options, capfd):
138     table_factory('place', content=((1, ), ))
139
140     osm2pgsql_options['osm2pgsql_cache'] = 0
141
142     database_import.import_osm_data(Path(__file__), osm2pgsql_options)
143     captured = capfd.readouterr()
144
145     assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out
146
147
148 @pytest.mark.parametrize("with_search", (True, False))
149 def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory, with_search):
150     tables = ['placex', 'place_addressline', 'location_area',
151               'location_area_country',
152               'location_property_tiger', 'location_property_osmline',
153               'location_postcode', 'location_road_23']
154     if with_search:
155         tables.append('search_name')
156
157     for table in tables:
158         table_factory(table, content=((1, ), (2, ), (3, )))
159         assert temp_db_cursor.table_rows(table) == 3
160
161     database_import.truncate_data_tables(temp_db_conn)
162
163     for table in tables:
164         assert temp_db_cursor.table_rows(table) == 0
165
166
167 @pytest.mark.parametrize("threads", (1, 5))
168 @pytest.mark.asyncio
169 async def test_load_data(dsn, place_row, placex_table, osmline_table,
170                          temp_db_cursor, threads):
171     for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'):
172         temp_db_cursor.execute(pysql.SQL("""CREATE FUNCTION {} (src TEXT)
173                                             RETURNS TEXT AS $$ SELECT 'a'::TEXT $$ LANGUAGE SQL
174                                          """).format(pysql.Identifier(func)))
175     for oid in range(100, 130):
176         place_row(osm_id=oid)
177     place_row(osm_type='W', osm_id=342, cls='place', typ='houses',
178               geom='SRID=4326;LINESTRING(0 0, 10 10)')
179
180     await database_import.load_data(dsn, threads)
181
182     assert temp_db_cursor.table_rows('placex') == 30
183     assert temp_db_cursor.table_rows('location_property_osmline') == 1
184
185
186 class TestSetupSQL:
187
188     @pytest.fixture(autouse=True)
189     def init_env(self, temp_db, tmp_path, def_config, sql_preprocessor_cfg):
190         def_config.lib_dir.sql = tmp_path / 'sql'
191         def_config.lib_dir.sql.mkdir()
192
193         self.config = def_config
194
195     def write_sql(self, fname, content):
196         (self.config.lib_dir.sql / fname).write_text(content)
197
198     @pytest.mark.parametrize("reverse", [True, False])
199     def test_create_tables(self, temp_db_conn, temp_db_cursor, reverse):
200         self.write_sql('tables.sql',
201                        """CREATE FUNCTION test() RETURNS bool
202                           AS $$ SELECT {{db.reverse_only}} $$ LANGUAGE SQL""")
203
204         database_import.create_tables(temp_db_conn, self.config, reverse)
205
206         temp_db_cursor.scalar('SELECT test()') == reverse
207
208     def test_create_table_triggers(self, temp_db_conn, temp_db_cursor):
209         self.write_sql('table-triggers.sql',
210                        """CREATE FUNCTION test() RETURNS TEXT
211                           AS $$ SELECT 'a'::text $$ LANGUAGE SQL""")
212
213         database_import.create_table_triggers(temp_db_conn, self.config)
214
215         temp_db_cursor.scalar('SELECT test()') == 'a'
216
217     def test_create_partition_tables(self, temp_db_conn, temp_db_cursor):
218         self.write_sql('partition-tables.src.sql',
219                        """CREATE FUNCTION test() RETURNS TEXT
220                           AS $$ SELECT 'b'::text $$ LANGUAGE SQL""")
221
222         database_import.create_partition_tables(temp_db_conn, self.config)
223
224         temp_db_cursor.scalar('SELECT test()') == 'b'
225
226     @pytest.mark.parametrize("drop", [True, False])
227     @pytest.mark.asyncio
228     async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop):
229         self.write_sql('indices.sql',
230                        """CREATE FUNCTION test() RETURNS bool
231                           AS $$ SELECT {{drop}} $$ LANGUAGE SQL""")
232
233         await database_import.create_search_indices(temp_db_conn, self.config, drop)
234
235         temp_db_cursor.scalar('SELECT test()') == drop