1 # SPDX-License-Identifier: GPL-3.0-or-later
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2024 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Test for tiger data function
11 from textwrap import dedent
15 from nominatim_db.tools import tiger_data, freeze
16 from nominatim_db.errors import UsageError
20 def __init__(self, conn):
22 with conn.cursor() as cur:
23 cur.execute("""CREATE TABLE tiger (linegeo GEOMETRY,
30 # We need this table to determine if the database is frozen or not
31 cur.execute("CREATE TABLE place (number INTEGER)")
34 with self.conn.cursor() as cur:
35 return cur.scalar("SELECT count(*) FROM tiger")
38 with self.conn.cursor() as cur:
39 cur.execute("SELECT * FROM tiger LIMIT 1")
43 def tiger_table(def_config, temp_db_conn, sql_preprocessor,
44 temp_db_with_extensions, tmp_path):
45 def_config.lib_dir.sql = tmp_path / 'sql'
46 def_config.lib_dir.sql.mkdir()
48 (def_config.lib_dir.sql / 'tiger_import_start.sql').write_text(
49 """CREATE OR REPLACE FUNCTION tiger_line_import(linegeo GEOMETRY, start INTEGER,
50 stop INTEGER, interpol TEXT,
51 token_info JSONB, postcode TEXT)
53 INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode)
56 (def_config.lib_dir.sql / 'tiger_import_finish.sql').write_text(
57 """DROP FUNCTION tiger_line_import (linegeo GEOMETRY, in_startnumber INTEGER,
58 in_endnumber INTEGER, interpolationtype TEXT,
59 token_info JSONB, in_postcode TEXT);""")
61 return MockTigerTable(temp_db_conn)
65 def csv_factory(tmp_path):
66 def _mk_file(fname, hnr_from=1, hnr_to=9, interpol='odd', street='Main St',
67 city='Newtown', state='AL', postcode='12345',
68 geometry='LINESTRING(-86.466995 32.428956,-86.466923 32.428933)'):
69 (tmp_path / (fname + '.csv')).write_text(dedent("""\
70 from;to;interpolation;street;city;state;postcode;geometry
71 {};{};{};{};{};{};{};{}
72 """.format(hnr_from, hnr_to, interpol, street, city, state,
78 @pytest.mark.parametrize("threads", (1, 5))
79 def test_add_tiger_data(def_config, src_dir, tiger_table, tokenizer_mock, threads):
80 tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'),
81 def_config, threads, tokenizer_mock())
83 assert tiger_table.count() == 6213
86 def test_add_tiger_data_database_frozen(def_config, temp_db_conn, tiger_table, tokenizer_mock,
88 freeze.drop_update_tables(temp_db_conn)
90 with pytest.raises(UsageError) as excinfo:
91 tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
93 assert "database frozen" in str(excinfo.value)
95 assert tiger_table.count() == 0
97 def test_add_tiger_data_no_files(def_config, tiger_table, tokenizer_mock,
99 tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
101 assert tiger_table.count() == 0
104 def test_add_tiger_data_bad_file(def_config, tiger_table, tokenizer_mock,
106 sqlfile = tmp_path / '1010.csv'
107 sqlfile.write_text("""Random text""")
109 tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
111 assert tiger_table.count() == 0
114 def test_add_tiger_data_hnr_nan(def_config, tiger_table, tokenizer_mock,
115 csv_factory, tmp_path):
116 csv_factory('file1', hnr_from=99)
117 csv_factory('file2', hnr_from='L12')
118 csv_factory('file3', hnr_to='12.4')
120 tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
122 assert tiger_table.count() == 1
123 assert tiger_table.row()['start'] == 99
126 @pytest.mark.parametrize("threads", (1, 5))
127 def test_add_tiger_data_tarfile(def_config, tiger_table, tokenizer_mock,
128 tmp_path, src_dir, threads):
129 tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
130 tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv'))
133 tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads,
136 assert tiger_table.count() == 6213
139 def test_add_tiger_data_bad_tarfile(def_config, tiger_table, tokenizer_mock,
141 tarfile = tmp_path / 'sample.tar.gz'
142 tarfile.write_text("""Random text""")
144 with pytest.raises(UsageError):
145 tiger_data.add_tiger_data(str(tarfile), def_config, 1, tokenizer_mock())
148 def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock,
150 tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
154 tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1,
157 assert tiger_table.count() == 0