2 Implementation of the 'import' subcommand.
5 from pathlib import Path
9 from nominatim.db.connection import connect
10 from nominatim.db import status, properties
11 from nominatim.version import NOMINATIM_VERSION
12 from nominatim.errors import UsageError
14 # Do not repeat documentation of subcommand classes.
15 # pylint: disable=C0111
16 # Using non-top-level imports to avoid eventually unused imports.
17 # pylint: disable=E0012,C0415
19 LOG = logging.getLogger()
23 Create a new Nominatim database from an OSM file.
28 group_name = parser.add_argument_group('Required arguments')
29 group = group_name.add_mutually_exclusive_group(required=True)
30 group.add_argument('--osm-file', metavar='FILE',
31 help='OSM file to be imported.')
32 group.add_argument('--continue', dest='continue_at',
33 choices=['load-data', 'indexing', 'db-postprocess'],
34 help='Continue an import that was interrupted')
35 group = parser.add_argument_group('Optional arguments')
36 group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
37 help='Size of cache to be used by osm2pgsql (in MB)')
38 group.add_argument('--reverse-only', action='store_true',
39 help='Do not create tables and indexes for searching')
40 group.add_argument('--no-partitions', action='store_true',
41 help=("Do not partition search indices "
42 "(speeds up import of single country extracts)"))
43 group.add_argument('--no-updates', action='store_true',
44 help="Do not keep tables that are only needed for "
45 "updating the database later")
46 group = parser.add_argument_group('Expert options')
47 group.add_argument('--ignore-errors', action='store_true',
48 help='Continue import even when errors in SQL are present')
49 group.add_argument('--index-noanalyse', action='store_true',
50 help='Do not perform analyse operations during index')
54 def run(args): # pylint: disable=too-many-statements
55 from ..tools import database_import
56 from ..tools import refresh
57 from ..indexer.indexer import Indexer
58 from ..tools import postcodes
60 if args.osm_file and not Path(args.osm_file).is_file():
61 LOG.fatal("OSM file '%s' does not exist.", args.osm_file)
62 raise UsageError('Cannot access file.')
64 if args.continue_at is None:
65 database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
68 rouser=args.config.DATABASE_WEBUSER)
70 LOG.warning('Installing database module')
71 with connect(args.config.get_libpq_dsn()) as conn:
72 database_import.install_module(args.module_dir, args.project_dir,
73 args.config.DATABASE_MODULE_PATH,
76 LOG.warning('Importing OSM data file')
77 database_import.import_osm_data(Path(args.osm_file),
78 args.osm2pgsql_options(0, 1),
80 ignore_errors=args.ignore_errors)
82 with connect(args.config.get_libpq_dsn()) as conn:
83 LOG.warning('Create functions (1st pass)')
84 refresh.create_functions(conn, args.config, False, False)
85 LOG.warning('Create tables')
86 database_import.create_tables(conn, args.config,
87 reverse_only=args.reverse_only)
88 refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
89 LOG.warning('Create functions (2nd pass)')
90 refresh.create_functions(conn, args.config, False, False)
91 LOG.warning('Create table triggers')
92 database_import.create_table_triggers(conn, args.config)
93 LOG.warning('Create partition tables')
94 database_import.create_partition_tables(conn, args.config)
95 LOG.warning('Create functions (3rd pass)')
96 refresh.create_functions(conn, args.config, False, False)
98 LOG.warning('Importing wikipedia importance data')
99 data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
100 if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
102 LOG.error('Wikipedia importance dump file not found. '
103 'Will be using default importances.')
105 if args.continue_at is None or args.continue_at == 'load-data':
106 LOG.warning('Initialise tables')
107 with connect(args.config.get_libpq_dsn()) as conn:
108 database_import.truncate_data_tables(conn, args.config.MAX_WORD_FREQUENCY)
110 LOG.warning('Load data into placex table')
111 database_import.load_data(args.config.get_libpq_dsn(),
113 args.threads or psutil.cpu_count() or 1)
115 LOG.warning('Calculate postcodes')
116 postcodes.import_postcodes(args.config.get_libpq_dsn(), args.project_dir)
118 if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
119 if args.continue_at is not None and args.continue_at != 'load-data':
120 with connect(args.config.get_libpq_dsn()) as conn:
121 SetupAll._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
122 LOG.warning('Indexing places')
123 indexer = Indexer(args.config.get_libpq_dsn(),
124 args.threads or psutil.cpu_count() or 1)
125 indexer.index_full(analyse=not args.index_noanalyse)
127 LOG.warning('Post-process tables')
128 with connect(args.config.get_libpq_dsn()) as conn:
129 database_import.create_search_indices(conn, args.config,
130 drop=args.no_updates)
131 LOG.warning('Create search index for default country names.')
132 database_import.create_country_names(conn, args.config)
134 webdir = args.project_dir / 'website'
135 LOG.warning('Setup website at %s', webdir)
136 refresh.setup_website(webdir, args.config)
138 with connect(args.config.get_libpq_dsn()) as conn:
140 dbdate = status.compute_database_date(conn)
141 status.set_status(conn, dbdate)
142 LOG.info('Database is at %s.', dbdate)
143 except Exception as exc: # pylint: disable=broad-except
144 LOG.error('Cannot determine date of database: %s', exc)
146 properties.set_property(conn, 'database_version',
147 '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
153 def _create_pending_index(conn, tablespace):
154 """ Add a supporting index for finding places still to be indexed.
156 This index is normally created at the end of the import process
157 for later updates. When indexing was partially done, then this
158 index can greatly improve speed going through already indexed data.
160 if conn.index_exists('idx_placex_pendingsector'):
163 with conn.cursor() as cur:
164 LOG.warning('Creating support index')
166 tablespace = 'TABLESPACE ' + tablespace
167 cur.execute("""CREATE INDEX idx_placex_pendingsector
168 ON placex USING BTREE (rank_address,geometry_sector)
169 {} WHERE indexed_status > 0
170 """.format(tablespace))