]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/clicmd/setup.py
Fixed ci-tests, osm-file flag
[nominatim.git] / nominatim / clicmd / setup.py
index 6ffa7afb8babb91c58d7f40c0af16cfbb32f110d..4df14bc519608f0281cea6ea8f1b60fe11d07934 100644 (file)
@@ -15,10 +15,10 @@ from pathlib import Path
 import psutil
 
 from nominatim.config import Configuration
-from nominatim.db.connection import connect, Connection
+from nominatim.db.connection import connect
 from nominatim.db import status, properties
 from nominatim.tokenizer.base import AbstractTokenizer
-from nominatim.version import version_str
+from nominatim.version import NOMINATIM_VERSION
 from nominatim.clicmd.args import NominatimArgs
 from nominatim.errors import UsageError
 
@@ -59,12 +59,16 @@ class SetupAll:
                            help="Do not keep tables that are only needed for "
                                 "updating the database later")
         group2.add_argument('--offline', action='store_true',
-                           help="Do not attempt to load any additional data from the internet")
+                            help="Do not attempt to load any additional data from the internet")
         group3 = parser.add_argument_group('Expert options')
         group3.add_argument('--ignore-errors', action='store_true',
                            help='Continue import even when errors in SQL are present')
         group3.add_argument('--index-noanalyse', action='store_true',
                            help='Do not perform analyse operations during index (expert only)')
+        group3.add_argument('--only-import-data', action='store_true',
+                            help='Do not attempt to create the database')
+        group3.add_argument('--only-prepare-database', action='store_true',
+                            help='Create the database but do not import any data')
 
 
     def run(self, args: NominatimArgs) -> int: # pylint: disable=too-many-statements
@@ -72,36 +76,59 @@ class SetupAll:
         from ..tools import database_import, refresh, postcodes, freeze
         from ..indexer.indexer import Indexer
 
+        num_threads = args.threads or psutil.cpu_count() or 1
+
         country_info.setup_country_config(args.config)
 
         if args.continue_at is None:
             files = args.get_osm_file_list()
-            if not files:
+            if not files and not args.only_prepare_database:
                 raise UsageError("No input files (use --osm-file).")
 
-            LOG.warning('Creating database')
-            database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
-                                                    rouser=args.config.DATABASE_WEBUSER)
-
-            LOG.warning('Setting up country tables')
-            country_info.setup_country_tables(args.config.get_libpq_dsn(),
-                                              args.data_dir,
-                                              args.no_partitions)
-
-            LOG.warning('Importing OSM data file')
-            database_import.import_osm_data(files,
-                                            args.osm2pgsql_options(0, 1),
-                                            drop=args.no_updates,
-                                            ignore_errors=args.ignore_errors)
-
-            self._setup_tables(args.config, args.reverse_only)
-
-            LOG.warning('Importing wikipedia importance data')
-            data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
-            if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
-                                                 data_path) > 0:
-                LOG.error('Wikipedia importance dump file not found. '
-                          'Will be using default importances.')
+            if args.only_import_data and args.only_prepare_database:
+                raise UsageError(
+                    "Cannot use --only-import-data and --only-prepare-database together."
+                )
+
+            if args.only_prepare_database or self._is_complete_import(args):
+                LOG.warning('Creating database')
+                database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
+                                                        rouser=args.config.DATABASE_WEBUSER)
+
+                if not self._is_complete_import(args):
+                    return 0
+
+            if not args.only_prepare_database or \
+                    args.only_import_data or \
+                    self._is_complete_import(args):
+                # Check if the correct plugins are installed
+                database_import.check_existing_database_plugins(args.config.get_libpq_dsn())
+                LOG.warning('Setting up country tables')
+                country_info.setup_country_tables(args.config.get_libpq_dsn(),
+                                                args.config.lib_dir.data,
+                                                args.no_partitions)
+
+                LOG.warning('Importing OSM data file')
+                database_import.import_osm_data(files,
+                                                args.osm2pgsql_options(0, 1),
+                                                drop=args.no_updates,
+                                                ignore_errors=args.ignore_errors)
+
+                LOG.warning('Importing wikipedia importance data')
+                data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
+                if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
+                                                    data_path) > 0:
+                    LOG.error('Wikipedia importance dump file not found. '
+                            'Calculating importance values of locations will not '
+                            'use Wikipedia importance data.')
+
+                LOG.warning('Importing secondary importance raster data')
+                if refresh.import_secondary_importance(args.config.get_libpq_dsn(),
+                                                    args.project_dir) != 0:
+                    LOG.error('Secondary importance file not imported. '
+                            'Falling back to default ranking.')
+
+                self._setup_tables(args.config, args.reverse_only)
 
         if args.continue_at is None or args.continue_at == 'load-data':
             LOG.warning('Initialise tables')
@@ -109,8 +136,7 @@ class SetupAll:
                 database_import.truncate_data_tables(conn)
 
             LOG.warning('Load data into placex table')
-            database_import.load_data(args.config.get_libpq_dsn(),
-                                      args.threads or psutil.cpu_count() or 1)
+            database_import.load_data(args.config.get_libpq_dsn(), num_threads)
 
         LOG.warning("Setting up tokenizer")
         tokenizer = self._get_tokenizer(args.continue_at, args.config)
@@ -121,18 +147,15 @@ class SetupAll:
                                        args.project_dir, tokenizer)
 
         if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
-            if args.continue_at is not None and args.continue_at != 'load-data':
-                with connect(args.config.get_libpq_dsn()) as conn:
-                    self._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
             LOG.warning('Indexing places')
-            indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
-                              args.threads or psutil.cpu_count() or 1)
+            indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, num_threads)
             indexer.index_full(analyse=not args.index_noanalyse)
 
         LOG.warning('Post-process tables')
         with connect(args.config.get_libpq_dsn()) as conn:
             database_import.create_search_indices(conn, args.config,
-                                                  drop=args.no_updates)
+                                                  drop=args.no_updates,
+                                                  threads=num_threads)
             LOG.warning('Create search index for default country names.')
             country_info.create_country_names(conn, tokenizer,
                                               args.config.get_str_list('LANGUAGES'))
@@ -152,6 +175,11 @@ class SetupAll:
 
         return 0
 
+    def _is_complete_import(self, args: NominatimArgs) -> bool:
+        """ Determine if the import is complete or if only the database should be prepared.
+        """
+        return not args.only_import_data and not args.only_prepare_database
+
 
     def _setup_tables(self, config: Configuration, reverse_only: bool) -> None:
         """ Set up the basic database layout: tables, indexes and functions.
@@ -188,27 +216,6 @@ class SetupAll:
         return tokenizer_factory.get_tokenizer_for_db(config)
 
 
-    def _create_pending_index(self, conn: Connection, tablespace: str) -> None:
-        """ Add a supporting index for finding places still to be indexed.
-
-            This index is normally created at the end of the import process
-            for later updates. When indexing was partially done, then this
-            index can greatly improve speed going through already indexed data.
-        """
-        if conn.index_exists('idx_placex_pendingsector'):
-            return
-
-        with conn.cursor() as cur:
-            LOG.warning('Creating support index')
-            if tablespace:
-                tablespace = 'TABLESPACE ' + tablespace
-            cur.execute(f"""CREATE INDEX idx_placex_pendingsector
-                            ON placex USING BTREE (rank_address,geometry_sector)
-                            {tablespace} WHERE indexed_status > 0
-                         """)
-        conn.commit()
-
-
     def _finalize_database(self, dsn: str, offline: bool) -> None:
         """ Determine the database date and set the status accordingly.
         """
@@ -221,4 +228,4 @@ class SetupAll:
                 except Exception as exc: # pylint: disable=broad-except
                     LOG.error('Cannot determine date of database: %s', exc)
 
-            properties.set_property(conn, 'database_version', version_str())
+            properties.set_property(conn, 'database_version', str(NOMINATIM_VERSION))