]> git.openstreetmap.org Git - nominatim.git/blob - test/python/tools/test_database_import.py
remove remaining references to php code
[nominatim.git] / test / python / tools / test_database_import.py
1 # SPDX-License-Identifier: GPL-3.0-or-later
2 #
3 # This file is part of Nominatim. (https://nominatim.org)
4 #
5 # Copyright (C) 2024 by the Nominatim developer community.
6 # For a full list of authors see the git log.
7 """
8 Tests for functions to import a new database.
9 """
10 from pathlib import Path
11
12 import pytest
13 import pytest_asyncio
14 import psycopg
15 from psycopg import sql as pysql
16
17 from nominatim_db.tools import database_import
18 from nominatim_db.errors import UsageError
19
20 class TestDatabaseSetup:
21     DBNAME = 'test_nominatim_python_unittest'
22
23     @pytest.fixture(autouse=True)
24     def setup_nonexistant_db(self):
25         with psycopg.connect(dbname='postgres', autocommit=True) as conn:
26             with conn.cursor() as cur:
27                 cur.execute(f'DROP DATABASE IF EXISTS {self.DBNAME}')
28
29             yield True
30
31             with conn.cursor() as cur:
32                 cur.execute(f'DROP DATABASE IF EXISTS {self.DBNAME}')
33
34
35     @pytest.fixture
36     def cursor(self):
37         with psycopg.connect(dbname=self.DBNAME) as conn:
38             with conn.cursor() as cur:
39                 yield cur
40
41
42     def conn(self):
43         return psycopg.connect(dbname=self.DBNAME)
44
45
46     def test_setup_skeleton(self):
47         database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
48
49         # Check that all extensions are set up.
50         with self.conn() as conn:
51             with conn.cursor() as cur:
52                 cur.execute('CREATE TABLE t (h HSTORE, geom GEOMETRY(Geometry, 4326))')
53
54
55     def test_unsupported_pg_version(self, monkeypatch):
56         monkeypatch.setattr(database_import, 'POSTGRESQL_REQUIRED_VERSION', (100, 4))
57
58         with pytest.raises(UsageError, match='PostgreSQL server is too old.'):
59             database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
60
61
62     def test_create_db_explicit_ro_user(self):
63         database_import.setup_database_skeleton(f'dbname={self.DBNAME}',
64                                                 rouser='postgres')
65
66
67     def test_create_db_missing_ro_user(self):
68         with pytest.raises(UsageError, match='Missing read-only user.'):
69             database_import.setup_database_skeleton(f'dbname={self.DBNAME}',
70                                                     rouser='sdfwkjkjgdugu2;jgsafkljas;')
71
72
73     def test_setup_extensions_old_postgis(self, monkeypatch):
74         monkeypatch.setattr(database_import, 'POSTGIS_REQUIRED_VERSION', (50, 50))
75
76         with pytest.raises(UsageError, match='PostGIS is too old.'):
77             database_import.setup_database_skeleton(f'dbname={self.DBNAME}')
78
79
80 def test_setup_skeleton_already_exists(temp_db):
81     with pytest.raises(UsageError):
82         database_import.setup_database_skeleton(f'dbname={temp_db}')
83
84
85 def test_import_osm_data_simple(table_factory, osm2pgsql_options, capfd):
86     table_factory('place', content=((1, ), ))
87
88     database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
89     captured = capfd.readouterr()
90
91     assert '--create' in captured.out
92     assert '--output gazetteer' in captured.out
93     assert f'--style {osm2pgsql_options["osm2pgsql_style"]}' in captured.out
94     assert f'--number-processes {osm2pgsql_options["threads"]}' in captured.out
95     assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out
96     assert 'file.pbf' in captured.out
97
98
99 def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options, capfd):
100     table_factory('place', content=((1, ), ))
101     osm2pgsql_options['osm2pgsql_cache'] = 0
102
103     files = [tmp_path / 'file1.osm', tmp_path / 'file2.osm']
104     for f in files:
105         f.write_text('test')
106
107     database_import.import_osm_data(files, osm2pgsql_options)
108     captured = capfd.readouterr()
109
110     assert 'file1.osm' in captured.out
111     assert 'file2.osm' in captured.out
112
113
114 def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options):
115     table_factory('place')
116
117     with pytest.raises(UsageError, match='No data imported'):
118         database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
119
120
121 def test_import_osm_data_simple_ignore_no_data(table_factory, osm2pgsql_options):
122     table_factory('place')
123
124     database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options,
125                                     ignore_errors=True)
126
127
128 def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql_options):
129     table_factory('place', content=((1, ), ))
130     table_factory('planet_osm_nodes')
131
132     flatfile = tmp_path / 'flatfile'
133     flatfile.write_text('touch')
134
135     osm2pgsql_options['flatnode_file'] = str(flatfile.resolve())
136
137     database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, drop=True)
138
139     assert not flatfile.exists()
140     assert not temp_db_cursor.table_exists('planet_osm_nodes')
141
142
143 def test_import_osm_data_default_cache(table_factory, osm2pgsql_options, capfd):
144     table_factory('place', content=((1, ), ))
145
146     osm2pgsql_options['osm2pgsql_cache'] = 0
147
148     database_import.import_osm_data(Path(__file__), osm2pgsql_options)
149     captured = capfd.readouterr()
150
151     assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out
152
153
154 @pytest.mark.parametrize("with_search", (True, False))
155 def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory, with_search):
156     tables = ['placex', 'place_addressline', 'location_area',
157               'location_area_country',
158               'location_property_tiger', 'location_property_osmline',
159               'location_postcode', 'location_road_23']
160     if with_search:
161         tables.append('search_name')
162
163     for table in tables:
164         table_factory(table, content=((1, ), (2, ), (3, )))
165         assert temp_db_cursor.table_rows(table) == 3
166
167     database_import.truncate_data_tables(temp_db_conn)
168
169     for table in tables:
170         assert temp_db_cursor.table_rows(table) == 0
171
172
173 @pytest.mark.parametrize("threads", (1, 5))
174 @pytest.mark.asyncio
175 async def test_load_data(dsn, place_row, placex_table, osmline_table,
176                    temp_db_cursor, threads):
177     for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'):
178         temp_db_cursor.execute(pysql.SQL("""CREATE FUNCTION {} (src TEXT)
179                                             RETURNS TEXT AS $$ SELECT 'a'::TEXT $$ LANGUAGE SQL
180                                          """).format(pysql.Identifier(func)))
181     for oid in range(100, 130):
182         place_row(osm_id=oid)
183     place_row(osm_type='W', osm_id=342, cls='place', typ='houses',
184               geom='SRID=4326;LINESTRING(0 0, 10 10)')
185
186     await database_import.load_data(dsn, threads)
187
188     assert temp_db_cursor.table_rows('placex') == 30
189     assert temp_db_cursor.table_rows('location_property_osmline') == 1
190
191
192 class TestSetupSQL:
193
194     @pytest.fixture(autouse=True)
195     def init_env(self, temp_db, tmp_path, def_config, sql_preprocessor_cfg):
196         def_config.lib_dir.sql = tmp_path / 'sql'
197         def_config.lib_dir.sql.mkdir()
198
199         self.config = def_config
200
201
202     def write_sql(self, fname, content):
203         (self.config.lib_dir.sql / fname).write_text(content)
204
205
206     @pytest.mark.parametrize("reverse", [True, False])
207     def test_create_tables(self, temp_db_conn, temp_db_cursor, reverse):
208         self.write_sql('tables.sql',
209                        """CREATE FUNCTION test() RETURNS bool
210                           AS $$ SELECT {{db.reverse_only}} $$ LANGUAGE SQL""")
211
212         database_import.create_tables(temp_db_conn, self.config, reverse)
213
214         temp_db_cursor.scalar('SELECT test()') == reverse
215
216
217     def test_create_table_triggers(self, temp_db_conn, temp_db_cursor):
218         self.write_sql('table-triggers.sql',
219                        """CREATE FUNCTION test() RETURNS TEXT
220                           AS $$ SELECT 'a'::text $$ LANGUAGE SQL""")
221
222         database_import.create_table_triggers(temp_db_conn, self.config)
223
224         temp_db_cursor.scalar('SELECT test()') == 'a'
225
226
227     def test_create_partition_tables(self, temp_db_conn, temp_db_cursor):
228         self.write_sql('partition-tables.src.sql',
229                        """CREATE FUNCTION test() RETURNS TEXT
230                           AS $$ SELECT 'b'::text $$ LANGUAGE SQL""")
231
232         database_import.create_partition_tables(temp_db_conn, self.config)
233
234         temp_db_cursor.scalar('SELECT test()') == 'b'
235
236
237     @pytest.mark.parametrize("drop", [True, False])
238     @pytest.mark.asyncio
239     async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop):
240         self.write_sql('indices.sql',
241                        """CREATE FUNCTION test() RETURNS bool
242                           AS $$ SELECT {{drop}} $$ LANGUAGE SQL""")
243
244         await database_import.create_search_indices(temp_db_conn, self.config, drop)
245
246         temp_db_cursor.scalar('SELECT test()') == drop