1 # SPDX-License-Identifier: GPL-2.0-only
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2022 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Implementation of the 'import' subcommand.
10 from typing import Optional
13 from pathlib import Path
17 from nominatim.config import Configuration
18 from nominatim.db.connection import connect
19 from nominatim.db import status, properties
20 from nominatim.tokenizer.base import AbstractTokenizer
21 from nominatim.version import NOMINATIM_VERSION
22 from nominatim.clicmd.args import NominatimArgs
23 from nominatim.errors import UsageError
25 # Do not repeat documentation of subcommand classes.
26 # pylint: disable=C0111
27 # Using non-top-level imports to avoid eventually unused imports.
28 # pylint: disable=C0415
30 LOG = logging.getLogger()
34 Create a new Nominatim database from an OSM file.
36 This sub-command sets up a new Nominatim database from scratch starting
37 with creating a new database in Postgresql. The user running this command
38 needs superuser rights on the database.
41 def add_args(self, parser: argparse.ArgumentParser) -> None:
42 group_name = parser.add_argument_group('Required arguments')
43 group1 = group_name.add_argument_group()
44 group1.add_argument('--osm-file', metavar='FILE', action='append',
45 help='OSM file to be imported'
46 ' (repeat for importing multiple files)',
48 group1.add_argument('--continue', dest='continue_at',
49 choices=['import-from-file', 'load-data', 'indexing', 'db-postprocess'],
50 help='Continue an import that was interrupted',
52 group2 = parser.add_argument_group('Optional arguments')
53 group2.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
54 help='Size of cache to be used by osm2pgsql (in MB)')
55 group2.add_argument('--reverse-only', action='store_true',
56 help='Do not create tables and indexes for searching')
57 group2.add_argument('--no-partitions', action='store_true',
58 help=("Do not partition search indices "
59 "(speeds up import of single country extracts)"))
60 group2.add_argument('--no-updates', action='store_true',
61 help="Do not keep tables that are only needed for "
62 "updating the database later")
63 group2.add_argument('--offline', action='store_true',
64 help="Do not attempt to load any additional data from the internet")
65 group3 = parser.add_argument_group('Expert options')
66 group3.add_argument('--ignore-errors', action='store_true',
67 help='Continue import even when errors in SQL are present')
68 group3.add_argument('--index-noanalyse', action='store_true',
69 help='Do not perform analyse operations during index (expert only)')
70 group3.add_argument('--prepare-database', action='store_true',
71 help='Create the database but do not import any data')
74 def run(self, args: NominatimArgs) -> int: # pylint: disable=too-many-statements
75 from ..data import country_info
76 from ..tools import database_import, refresh, postcodes, freeze
77 from ..indexer.indexer import Indexer
79 num_threads = args.threads or psutil.cpu_count() or 1
81 country_info.setup_country_config(args.config)
83 if args.osm_file is None and args.continue_at is None and not args.prepare_database:
84 raise UsageError("No input files (use --osm-file).")
86 if args.osm_file is not None and args.continue_at not in ('import-from-file', None):
87 raise UsageError(f"Cannot use --continue {args.continue_at} and --osm-file together.")
89 if args.continue_at is not None and args.prepare_database:
91 "Cannot use --continue and --prepare-database together."
96 if args.continue_at in (None, 'import-from-file'):
97 files = args.get_osm_file_list()
98 if not files and not args.prepare_database:
99 raise UsageError("No input files (use --osm-file).")
101 if args.prepare_database or args.continue_at is None:
102 LOG.warning('Creating database')
103 database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
104 rouser=args.config.DATABASE_WEBUSER)
107 if args.continue_at in ('import-from-file', None):
108 # Check if the correct plugins are installed
109 database_import.check_existing_database_plugins(args.config.get_libpq_dsn())
110 LOG.warning('Setting up country tables')
111 country_info.setup_country_tables(args.config.get_libpq_dsn(),
112 args.config.lib_dir.data,
115 LOG.warning('Importing OSM data file')
116 database_import.import_osm_data(files,
117 args.osm2pgsql_options(0, 1),
118 drop=args.no_updates,
119 ignore_errors=args.ignore_errors)
121 LOG.warning('Importing wikipedia importance data')
122 data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
123 if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
125 LOG.error('Wikipedia importance dump file not found. '
126 'Calculating importance values of locations will not '
127 'use Wikipedia importance data.')
129 LOG.warning('Importing secondary importance raster data')
130 if refresh.import_secondary_importance(args.config.get_libpq_dsn(),
131 args.project_dir) != 0:
132 LOG.error('Secondary importance file not imported. '
133 'Falling back to default ranking.')
135 self._setup_tables(args.config, args.reverse_only)
137 if args.continue_at in ('import-from-file', 'load-data', None):
138 LOG.warning('Initialise tables')
139 with connect(args.config.get_libpq_dsn()) as conn:
140 database_import.truncate_data_tables(conn)
142 LOG.warning('Load data into placex table')
143 database_import.load_data(args.config.get_libpq_dsn(), num_threads)
145 LOG.warning("Setting up tokenizer")
146 tokenizer = self._get_tokenizer(args.continue_at, args.config)
148 if args.continue_at in ('import-from-file', 'load-data', None):
149 LOG.warning('Calculate postcodes')
150 postcodes.update_postcodes(args.config.get_libpq_dsn(),
151 args.project_dir, tokenizer)
153 if args.continue_at in \
154 ('import-from-file', 'load-data', 'indexing', None):
155 LOG.warning('Indexing places')
156 indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, num_threads)
157 indexer.index_full(analyse=not args.index_noanalyse)
159 LOG.warning('Post-process tables')
160 with connect(args.config.get_libpq_dsn()) as conn:
161 database_import.create_search_indices(conn, args.config,
162 drop=args.no_updates,
164 LOG.warning('Create search index for default country names.')
165 country_info.create_country_names(conn, tokenizer,
166 args.config.get_str_list('LANGUAGES'))
168 freeze.drop_update_tables(conn)
169 tokenizer.finalize_import(args.config)
171 LOG.warning('Recompute word counts')
172 tokenizer.update_statistics()
174 webdir = args.project_dir / 'website'
175 LOG.warning('Setup website at %s', webdir)
176 with connect(args.config.get_libpq_dsn()) as conn:
177 refresh.setup_website(webdir, args.config, conn)
179 self._finalize_database(args.config.get_libpq_dsn(), args.offline)
184 def _setup_tables(self, config: Configuration, reverse_only: bool) -> None:
185 """ Set up the basic database layout: tables, indexes and functions.
187 from ..tools import database_import, refresh
189 with connect(config.get_libpq_dsn()) as conn:
190 LOG.warning('Create functions (1st pass)')
191 refresh.create_functions(conn, config, False, False)
192 LOG.warning('Create tables')
193 database_import.create_tables(conn, config, reverse_only=reverse_only)
194 refresh.load_address_levels_from_config(conn, config)
195 LOG.warning('Create functions (2nd pass)')
196 refresh.create_functions(conn, config, False, False)
197 LOG.warning('Create table triggers')
198 database_import.create_table_triggers(conn, config)
199 LOG.warning('Create partition tables')
200 database_import.create_partition_tables(conn, config)
201 LOG.warning('Create functions (3rd pass)')
202 refresh.create_functions(conn, config, False, False)
205 def _get_tokenizer(self, continue_at: Optional[str],
206 config: Configuration) -> AbstractTokenizer:
207 """ Set up a new tokenizer or load an already initialised one.
209 from ..tokenizer import factory as tokenizer_factory
211 if continue_at in ('import-from-file', 'load-data', None):
212 # (re)initialise the tokenizer data
213 return tokenizer_factory.create_tokenizer(config)
215 # just load the tokenizer
216 return tokenizer_factory.get_tokenizer_for_db(config)
219 def _finalize_database(self, dsn: str, offline: bool) -> None:
220 """ Determine the database date and set the status accordingly.
222 with connect(dsn) as conn:
225 dbdate = status.compute_database_date(conn)
226 status.set_status(conn, dbdate)
227 LOG.info('Database is at %s.', dbdate)
228 except Exception as exc: # pylint: disable=broad-except
229 LOG.error('Cannot determine date of database: %s', exc)
231 properties.set_property(conn, 'database_version', str(NOMINATIM_VERSION))