2 Functions for importing tiger data and handling tarbar and directory files
9 from nominatim.db.connection import connect
10 from nominatim.db.async_connection import DBConnection
11 from nominatim.db.sql_preprocessor import SQLPreprocessor
14 LOG = logging.getLogger()
17 def handle_tarfile_or_directory(data_dir):
18 """ Handles tarfile or directory for importing tiger data
22 if data_dir.endswith('.tar.gz'):
23 tar = tarfile.open(data_dir)
24 sql_files = [i for i in tar.getmembers() if i.name.endswith('.sql')]
25 LOG.warning("Found %d SQL files in tarfile with path %s", len(sql_files), data_dir)
27 LOG.warning("Tiger data import selected but no files in tarfile's path %s", data_dir)
30 files = os.listdir(data_dir)
31 sql_files = [os.path.join(data_dir, i) for i in files if i.endswith('.sql')]
32 LOG.warning("Found %d SQL files in path %s", len(sql_files), data_dir)
34 LOG.warning("Tiger data import selected but no files found in path %s", data_dir)
40 def handle_threaded_sql_statements(sel, file):
41 """ Handles sql statement with multiplexing
46 # Using pool of database connections to execute sql statements
47 while not end_of_file:
48 for key, _ in sel.select(1):
52 sql_query = file.readline()
57 conn.perform(sql_query)
59 print('. ', end='', flush=True)
61 except Exception as exc: # pylint: disable=broad-except
62 LOG.info('Wrong SQL statement: %s', exc)
64 def handle_unregister_connection_pool(sel, place_threads):
65 """ Handles unregistering pool of connections
68 while place_threads > 0:
69 for key, _ in sel.select(1):
74 except Exception as exc: # pylint: disable=broad-except
75 LOG.info('Wrong SQL statement: %s', exc)
79 def add_tiger_data(data_dir, config, threads):
80 """ Import tiger data from directory or tar file `data dir`.
82 dsn = config.get_libpq_dsn()
83 sql_files, tar = handle_tarfile_or_directory(data_dir)
88 with connect(dsn) as conn:
89 sql = SQLPreprocessor(conn, config, config.lib_dir.sql)
90 sql.run_sql_file(conn, 'tiger_import_start.sql')
92 # Reading sql_files and then for each file line handling
93 # sql_query in <threads - 1> chunks.
94 sel = selectors.DefaultSelector()
95 place_threads = max(1, threads - 1)
97 # Creates a pool of database connections
98 for _ in range(place_threads):
99 conn = DBConnection(dsn)
101 sel.register(conn, selectors.EVENT_WRITE, conn)
103 for sql_file in sql_files:
105 file = open(sql_file)
107 file = tar.extractfile(sql_file)
109 handle_threaded_sql_statements(sel, file)
111 # Unregistering pool of database connections
112 handle_unregister_connection_pool(sel, place_threads)
117 LOG.warning("Creating indexes on Tiger data")
118 with connect(dsn) as conn:
119 sql = SQLPreprocessor(conn, config, config.lib_dir.sql)
120 sql.run_sql_file(conn, 'tiger_import_finish.sql')