+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
"""
Implementation of the 'admin' subcommand.
"""
-from ..tools.exec_utils import run_legacy_script
-from ..db.connection import connect
+import logging
+import argparse
+import random
+
+from nominatim.db.connection import connect
+from nominatim.clicmd.args import NominatimArgs
+import nominatim.api as napi
# Do not repeat documentation of subcommand classes.
# pylint: disable=C0111
# Using non-top-level imports to avoid eventually unused imports.
# pylint: disable=E0012,C0415
+LOG = logging.getLogger()
+
+
class AdminFuncs:
"""\
Analyse and maintain the database.
"""
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Admin task arguments')
- group.add_argument('--warm', action='store_true',
- help='Warm database caches for search and reverse queries.')
- group.add_argument('--check-database', action='store_true',
- help='Check that the database is complete and operational.')
- group.add_argument('--analyse-indexing', action='store_true',
- help='Print performance analysis of the indexing process.')
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ group = parser.add_argument_group('Admin tasks')
+ objs = group.add_mutually_exclusive_group(required=True)
+ objs.add_argument('--warm', action='store_true',
+ help='Warm database caches for search and reverse queries')
+ objs.add_argument('--check-database', action='store_true',
+ help='Check that the database is complete and operational')
+ objs.add_argument('--migrate', action='store_true',
+ help='Migrate the database to a new software version')
+ objs.add_argument('--analyse-indexing', action='store_true',
+ help='Print performance analysis of the indexing process')
+ objs.add_argument('--collect-os-info', action="store_true",
+ help="Generate a report about the host system information")
group = parser.add_argument_group('Arguments for cache warming')
group.add_argument('--search-only', action='store_const', dest='target',
const='search',
mgroup.add_argument('--place-id', type=int,
help='Analyse indexing of the given Nominatim object')
- @staticmethod
- def run(args):
- from ..tools import admin
+ def run(self, args: NominatimArgs) -> int:
if args.warm:
- AdminFuncs._warm(args)
+ return self._warm(args)
if args.check_database:
- run_legacy_script('check_import_finished.php', nominatim_env=args)
+ LOG.warning('Checking database')
+ from ..tools import check_database
+ return check_database.check_database(args.config)
if args.analyse_indexing:
- conn = connect(args.config.get_libpq_dsn())
- admin.analyse_indexing(conn, osm_id=args.osm_id, place_id=args.place_id)
- conn.close()
+ LOG.warning('Analysing performance of indexing function')
+ from ..tools import admin
+ admin.analyse_indexing(args.config, osm_id=args.osm_id, place_id=args.place_id)
+ return 0
+
+ if args.migrate:
+ LOG.warning('Checking for necessary database migrations')
+ from ..tools import migration
+ return migration.migrate(args.config, args)
+
+ if args.collect_os_info:
+ LOG.warning("Reporting System Information")
+ from ..tools import collect_os_info
+ collect_os_info.report_system_information(args.config)
+ return 0
+
+ return 1
- return 0
+ def _warm(self, args: NominatimArgs) -> int:
+ LOG.warning('Warming database caches')
- @staticmethod
- def _warm(args):
- params = ['warm.php']
- if args.target == 'reverse':
- params.append('--reverse-only')
- if args.target == 'search':
- params.append('--search-only')
- return run_legacy_script(*params, nominatim_env=args)
+ api = napi.NominatimAPI(args.project_dir)
+
+ try:
+ if args.target != 'reverse':
+ for _ in range(1000):
+ api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)),
+ address_details=True)
+
+ if args.target != 'search':
+ from ..tokenizer import factory as tokenizer_factory
+
+ tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+ with connect(args.config.get_libpq_dsn()) as conn:
+ words = tokenizer.most_frequent_words(conn, 1000)
+
+ for word in words:
+ api.search(word)
+ finally:
+ api.close()
+
+ return 0