1 # SPDX-License-Identifier: GPL-3.0-or-later
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2024 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Implementation of the 'import' subcommand.
10 from typing import Optional
13 from pathlib import Path
18 from ..errors import UsageError
19 from ..config import Configuration
20 from ..db.connection import connect
21 from ..db import status, properties
22 from ..tokenizer.base import AbstractTokenizer
23 from ..version import NOMINATIM_VERSION
24 from .args import NominatimArgs
27 LOG = logging.getLogger()
32 Create a new Nominatim database from an OSM file.
34 This sub-command sets up a new Nominatim database from scratch starting
35 with creating a new database in Postgresql. The user running this command
36 needs superuser rights on the database.
39 def add_args(self, parser: argparse.ArgumentParser) -> None:
40 group1 = parser.add_argument_group('Required arguments')
41 group1.add_argument('--osm-file', metavar='FILE', action='append',
42 help='OSM file to be imported'
43 ' (repeat for importing multiple files)',
45 group1.add_argument('--continue', dest='continue_at',
46 choices=['import-from-file', 'load-data', 'indexing', 'db-postprocess'],
47 help='Continue an import that was interrupted',
49 group2 = parser.add_argument_group('Optional arguments')
50 group2.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
51 help='Size of cache to be used by osm2pgsql (in MB)')
52 group2.add_argument('--reverse-only', action='store_true',
53 help='Do not create tables and indexes for searching')
54 group2.add_argument('--no-partitions', action='store_true',
55 help="Do not partition search indices "
56 "(speeds up import of single country extracts)")
57 group2.add_argument('--no-updates', action='store_true',
58 help="Do not keep tables that are only needed for "
59 "updating the database later")
60 group2.add_argument('--offline', action='store_true',
61 help="Do not attempt to load any additional data from the internet")
62 group3 = parser.add_argument_group('Expert options')
63 group3.add_argument('--ignore-errors', action='store_true',
64 help='Continue import even when errors in SQL are present')
65 group3.add_argument('--index-noanalyse', action='store_true',
66 help='Do not perform analyse operations during index (expert only)')
67 group3.add_argument('--prepare-database', action='store_true',
68 help='Create the database but do not import any data')
70 def run(self, args: NominatimArgs) -> int:
71 if args.osm_file is None and args.continue_at is None and not args.prepare_database:
72 raise UsageError("No input files (use --osm-file).")
74 if args.osm_file is not None and args.continue_at not in ('import-from-file', None):
75 raise UsageError(f"Cannot use --continue {args.continue_at} and --osm-file together.")
77 if args.continue_at is not None and args.prepare_database:
79 "Cannot use --continue and --prepare-database together."
82 return asyncio.run(self.async_run(args))
84 async def async_run(self, args: NominatimArgs) -> int:
85 from ..data import country_info
86 from ..tools import database_import, postcodes, freeze
87 from ..indexer.indexer import Indexer
89 num_threads = args.threads or psutil.cpu_count() or 1
90 country_info.setup_country_config(args.config)
92 if args.prepare_database or args.continue_at is None:
93 LOG.warning('Creating database')
94 database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
95 rouser=args.config.DATABASE_WEBUSER)
96 if args.prepare_database:
99 if args.continue_at in (None, 'import-from-file'):
100 self._base_import(args)
102 if args.continue_at in ('import-from-file', 'load-data', None):
103 LOG.warning('Initialise tables')
104 with connect(args.config.get_libpq_dsn()) as conn:
105 database_import.truncate_data_tables(conn)
107 LOG.warning('Load data into placex table')
108 await database_import.load_data(args.config.get_libpq_dsn(), num_threads)
110 LOG.warning("Setting up tokenizer")
111 tokenizer = self._get_tokenizer(args.continue_at, args.config)
113 if args.continue_at in ('import-from-file', 'load-data', None):
114 LOG.warning('Calculate postcodes')
115 postcodes.update_postcodes(args.config.get_libpq_dsn(),
116 args.project_dir, tokenizer)
118 if args.continue_at in ('import-from-file', 'load-data', 'indexing', None):
119 LOG.warning('Indexing places')
120 indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, num_threads)
121 await indexer.index_full(analyse=not args.index_noanalyse)
123 LOG.warning('Post-process tables')
124 with connect(args.config.get_libpq_dsn()) as conn:
125 await database_import.create_search_indices(conn, args.config,
126 drop=args.no_updates,
128 LOG.warning('Create search index for default country names.')
129 country_info.create_country_names(conn, tokenizer,
130 args.config.get_str_list('LANGUAGES'))
132 freeze.drop_update_tables(conn)
133 tokenizer.finalize_import(args.config)
135 LOG.warning('Recompute word counts')
136 tokenizer.update_statistics(args.config, threads=num_threads)
138 self._finalize_database(args.config.get_libpq_dsn(), args.offline)
142 def _base_import(self, args: NominatimArgs) -> None:
143 from ..tools import database_import, refresh
144 from ..data import country_info
146 files = args.get_osm_file_list()
148 raise UsageError("No input files (use --osm-file).")
150 if args.continue_at in ('import-from-file', None):
151 # Check if the correct plugins are installed
152 database_import.check_existing_database_plugins(args.config.get_libpq_dsn())
153 LOG.warning('Setting up country tables')
154 country_info.setup_country_tables(args.config.get_libpq_dsn(),
155 args.config.lib_dir.data,
158 LOG.warning('Importing OSM data file')
159 database_import.import_osm_data(files,
160 args.osm2pgsql_options(0, 1),
161 drop=args.no_updates,
162 ignore_errors=args.ignore_errors)
164 LOG.warning('Importing wikipedia importance data')
165 data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
166 if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
168 LOG.error('Wikipedia importance dump file not found. '
169 'Calculating importance values of locations will not '
170 'use Wikipedia importance data.')
172 LOG.warning('Importing secondary importance raster data')
173 if refresh.import_secondary_importance(args.config.get_libpq_dsn(),
174 args.project_dir) != 0:
175 LOG.error('Secondary importance file not imported. '
176 'Falling back to default ranking.')
178 self._setup_tables(args.config, args.reverse_only)
180 def _setup_tables(self, config: Configuration, reverse_only: bool) -> None:
181 """ Set up the basic database layout: tables, indexes and functions.
183 from ..tools import database_import, refresh
185 with connect(config.get_libpq_dsn()) as conn:
186 LOG.warning('Create functions (1st pass)')
187 refresh.create_functions(conn, config, False, False)
188 LOG.warning('Create tables')
189 database_import.create_tables(conn, config, reverse_only=reverse_only)
190 refresh.load_address_levels_from_config(conn, config)
191 LOG.warning('Create functions (2nd pass)')
192 refresh.create_functions(conn, config, False, False)
193 LOG.warning('Create table triggers')
194 database_import.create_table_triggers(conn, config)
195 LOG.warning('Create partition tables')
196 database_import.create_partition_tables(conn, config)
197 LOG.warning('Create functions (3rd pass)')
198 refresh.create_functions(conn, config, False, False)
200 def _get_tokenizer(self, continue_at: Optional[str],
201 config: Configuration) -> AbstractTokenizer:
202 """ Set up a new tokenizer or load an already initialised one.
204 from ..tokenizer import factory as tokenizer_factory
206 if continue_at in ('import-from-file', 'load-data', None):
207 # (re)initialise the tokenizer data
208 return tokenizer_factory.create_tokenizer(config)
210 # just load the tokenizer
211 return tokenizer_factory.get_tokenizer_for_db(config)
213 def _finalize_database(self, dsn: str, offline: bool) -> None:
214 """ Determine the database date and set the status accordingly.
216 with connect(dsn) as conn:
217 properties.set_property(conn, 'database_version', str(NOMINATIM_VERSION))
220 dbdate = status.compute_database_date(conn, offline)
221 status.set_status(conn, dbdate)
222 LOG.info('Database is at %s.', dbdate)
223 except Exception as exc:
224 LOG.error('Cannot determine date of database: %s', exc)