import tarfile
import selectors
-from ..db.connection import connect
-from ..db.async_connection import DBConnection
-from ..db.sql_preprocessor import SQLPreprocessor
+from nominatim.db.connection import connect
+from nominatim.db.async_connection import DBConnection
+from nominatim.db.sql_preprocessor import SQLPreprocessor
LOG = logging.getLogger()
except Exception as exc: # pylint: disable=broad-except
LOG.info('Wrong SQL statement: %s', exc)
-
-def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
- """ Import tiger data from directory or tar file
+def handle_unregister_connection_pool(sel, place_threads):
+ """ Handles unregistering pool of connections
"""
+ while place_threads > 0:
+ for key, _ in sel.select(1):
+ conn = key.data
+ sel.unregister(conn)
+ try:
+ conn.wait()
+ except Exception as exc: # pylint: disable=broad-except
+ LOG.info('Wrong SQL statement: %s', exc)
+ conn.close()
+ place_threads -= 1
+
+def add_tiger_data(data_dir, config, threads):
+ """ Import tiger data from directory or tar file `data dir`.
+ """
+ dsn = config.get_libpq_dsn()
sql_files, tar = handle_tarfile_or_directory(data_dir)
if not sql_files:
return
with connect(dsn) as conn:
- sql = SQLPreprocessor(conn, config, sqllib_dir)
+ sql = SQLPreprocessor(conn, config)
sql.run_sql_file(conn, 'tiger_import_start.sql')
# Reading sql_files and then for each file line handling
handle_threaded_sql_statements(sel, file)
# Unregistering pool of database connections
- while place_threads > 0:
- for key, _ in sel.select(1):
- conn = key.data
- sel.unregister(conn)
- conn.wait()
- conn.close()
- place_threads -= 1
+ handle_unregister_connection_pool(sel, place_threads)
if tar:
tar.close()
print('\n')
LOG.warning("Creating indexes on Tiger data")
with connect(dsn) as conn:
- sql = SQLPreprocessor(conn, config, sqllib_dir)
+ sql = SQLPreprocessor(conn, config)
sql.run_sql_file(conn, 'tiger_import_finish.sql')