X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/ed3dd81d049bc3e0833bfd0a81155d0a52987221..38369ca3cfe6e52bb6f7589c714a04294497520e:/nominatim/clicmd/setup.py diff --git a/nominatim/clicmd/setup.py b/nominatim/clicmd/setup.py index b4dde6fe..1a651157 100644 --- a/nominatim/clicmd/setup.py +++ b/nominatim/clicmd/setup.py @@ -15,10 +15,10 @@ from pathlib import Path import psutil from nominatim.config import Configuration -from nominatim.db.connection import connect, Connection +from nominatim.db.connection import connect from nominatim.db import status, properties from nominatim.tokenizer.base import AbstractTokenizer -from nominatim.version import version_str +from nominatim.version import NOMINATIM_VERSION from nominatim.clicmd.args import NominatimArgs from nominatim.errors import UsageError @@ -59,12 +59,16 @@ class SetupAll: help="Do not keep tables that are only needed for " "updating the database later") group2.add_argument('--offline', action='store_true', - help="Do not attempt to load any additional data from the internet") + help="Do not attempt to load any additional data from the internet") group3 = parser.add_argument_group('Expert options') group3.add_argument('--ignore-errors', action='store_true', help='Continue import even when errors in SQL are present') group3.add_argument('--index-noanalyse', action='store_true', help='Do not perform analyse operations during index (expert only)') + group3.add_argument('--no-superuser', action='store_true', + help='Do not attempt to create the database') + group3.add_argument('--prepare-database', action='store_true', + help='Create the database but do not import any data') def run(self, args: NominatimArgs) -> int: # pylint: disable=too-many-statements @@ -81,29 +85,46 @@ class SetupAll: if not files: raise UsageError("No input files (use --osm-file).") - LOG.warning('Creating database') - database_import.setup_database_skeleton(args.config.get_libpq_dsn(), - rouser=args.config.DATABASE_WEBUSER) - - LOG.warning('Setting up country tables') - country_info.setup_country_tables(args.config.get_libpq_dsn(), - args.data_dir, - args.no_partitions) - - LOG.warning('Importing OSM data file') - database_import.import_osm_data(files, - args.osm2pgsql_options(0, 1), - drop=args.no_updates, - ignore_errors=args.ignore_errors) - - self._setup_tables(args.config, args.reverse_only) - - LOG.warning('Importing wikipedia importance data') - data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir) - if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(), - data_path) > 0: - LOG.error('Wikipedia importance dump file not found. ' - 'Will be using default importances.') + if args.no_superuser and args.prepare_database: + raise UsageError("Cannot use --no-superuser and --prepare-database together.") + + if args.prepare_database or self._is_complete_import(args): + LOG.warning('Creating database') + database_import.setup_database_skeleton(args.config.get_libpq_dsn(), + rouser=args.config.DATABASE_WEBUSER) + + if not self._is_complete_import(args): + return 0 + + if not args.prepare_database or args.no_superuser or self._is_complete_import(args): + # Check if the correct plugins are installed + database_import.check_existing_database_plugins(args.config.get_libpq_dsn()) + LOG.warning('Setting up country tables') + country_info.setup_country_tables(args.config.get_libpq_dsn(), + args.config.lib_dir.data, + args.no_partitions) + + LOG.warning('Importing OSM data file') + database_import.import_osm_data(files, + args.osm2pgsql_options(0, 1), + drop=args.no_updates, + ignore_errors=args.ignore_errors) + + LOG.warning('Importing wikipedia importance data') + data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir) + if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(), + data_path) > 0: + LOG.error('Wikipedia importance dump file not found. ' + 'Calculating importance values of locations will not ' + 'use Wikipedia importance data.') + + LOG.warning('Importing secondary importance raster data') + if refresh.import_secondary_importance(args.config.get_libpq_dsn(), + args.project_dir) != 0: + LOG.error('Secondary importance file not imported. ' + 'Falling back to default ranking.') + + self._setup_tables(args.config, args.reverse_only) if args.continue_at is None or args.continue_at == 'load-data': LOG.warning('Initialise tables') @@ -122,9 +143,6 @@ class SetupAll: args.project_dir, tokenizer) if args.continue_at is None or args.continue_at in ('load-data', 'indexing'): - if args.continue_at is not None and args.continue_at != 'load-data': - with connect(args.config.get_libpq_dsn()) as conn: - self._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX) LOG.warning('Indexing places') indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, num_threads) indexer.index_full(analyse=not args.index_noanalyse) @@ -152,6 +170,11 @@ class SetupAll: self._finalize_database(args.config.get_libpq_dsn(), args.offline) return 0 + + def _is_complete_import(self, args: NominatimArgs) -> bool: + """ Determine if the import is complete or if only the database should be prepared. + """ + return not args.no_superuser and not args.prepare_database def _setup_tables(self, config: Configuration, reverse_only: bool) -> None: @@ -189,27 +212,6 @@ class SetupAll: return tokenizer_factory.get_tokenizer_for_db(config) - def _create_pending_index(self, conn: Connection, tablespace: str) -> None: - """ Add a supporting index for finding places still to be indexed. - - This index is normally created at the end of the import process - for later updates. When indexing was partially done, then this - index can greatly improve speed going through already indexed data. - """ - if conn.index_exists('idx_placex_pendingsector'): - return - - with conn.cursor() as cur: - LOG.warning('Creating support index') - if tablespace: - tablespace = 'TABLESPACE ' + tablespace - cur.execute(f"""CREATE INDEX idx_placex_pendingsector - ON placex USING BTREE (rank_address,geometry_sector) - {tablespace} WHERE indexed_status > 0 - """) - conn.commit() - - def _finalize_database(self, dsn: str, offline: bool) -> None: """ Determine the database date and set the status accordingly. """ @@ -222,4 +224,4 @@ class SetupAll: except Exception as exc: # pylint: disable=broad-except LOG.error('Cannot determine date of database: %s', exc) - properties.set_property(conn, 'database_version', version_str()) + properties.set_property(conn, 'database_version', str(NOMINATIM_VERSION))