]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/clicmd/admin.py
rename use of category as POI search to near_item
[nominatim.git] / nominatim / clicmd / admin.py
index 1ed0ac9b6eccba56a975daaac57eb519251e8c80..9557dc988c94712f446f8c21c8f167d69639747f 100644 (file)
@@ -8,8 +8,12 @@
 Implementation of the 'admin' subcommand.
 """
 import logging
+import argparse
+import random
 
-from nominatim.tools.exec_utils import run_legacy_script
+from nominatim.db.connection import connect
+from nominatim.clicmd.args import NominatimArgs
+import nominatim.api as napi
 
 # Do not repeat documentation of subcommand classes.
 # pylint: disable=C0111
@@ -18,13 +22,13 @@ from nominatim.tools.exec_utils import run_legacy_script
 
 LOG = logging.getLogger()
 
+
 class AdminFuncs:
     """\
     Analyse and maintain the database.
     """
 
-    @staticmethod
-    def add_args(parser):
+    def add_args(self, parser: argparse.ArgumentParser) -> None:
         group = parser.add_argument_group('Admin tasks')
         objs = group.add_mutually_exclusive_group(required=True)
         objs.add_argument('--warm', action='store_true',
@@ -35,6 +39,10 @@ class AdminFuncs:
                           help='Migrate the database to a new software version')
         objs.add_argument('--analyse-indexing', action='store_true',
                           help='Print performance analysis of the indexing process')
+        objs.add_argument('--collect-os-info', action="store_true",
+                          help="Generate a report about the host system information")
+        objs.add_argument('--clean-deleted', action='store', metavar='AGE',
+                          help='Clean up deleted relations')
         group = parser.add_argument_group('Arguments for cache warming')
         group.add_argument('--search-only', action='store_const', dest='target',
                            const='search',
@@ -49,10 +57,11 @@ class AdminFuncs:
         mgroup.add_argument('--place-id', type=int,
                             help='Analyse indexing of the given Nominatim object')
 
-    @staticmethod
-    def run(args):
+
+    def run(self, args: NominatimArgs) -> int:
+        # pylint: disable=too-many-return-statements
         if args.warm:
-            return AdminFuncs._warm(args)
+            return self._warm(args)
 
         if args.check_database:
             LOG.warning('Checking database')
@@ -70,15 +79,45 @@ class AdminFuncs:
             from ..tools import migration
             return migration.migrate(args.config, args)
 
+        if args.collect_os_info:
+            LOG.warning("Reporting System Information")
+            from ..tools import collect_os_info
+            collect_os_info.report_system_information(args.config)
+            return 0
+
+        if args.clean_deleted:
+            LOG.warning('Cleaning up deleted relations')
+            from ..tools import admin
+            admin.clean_deleted_relations(args.config, age=args.clean_deleted)
+            return 0
+
         return 1
 
 
-    @staticmethod
-    def _warm(args):
+    def _warm(self, args: NominatimArgs) -> int:
         LOG.warning('Warming database caches')
-        params = ['warm.php']
-        if args.target == 'reverse':
-            params.append('--reverse-only')
-        if args.target == 'search':
-            params.append('--search-only')
-        return run_legacy_script(*params, nominatim_env=args)
+
+        api = napi.NominatimAPI(args.project_dir)
+
+        try:
+            if args.target != 'search':
+                for _ in range(1000):
+                    api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)),
+                                address_details=True)
+
+            if args.target != 'reverse':
+                from ..tokenizer import factory as tokenizer_factory
+
+                tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+                with connect(args.config.get_libpq_dsn()) as conn:
+                    if conn.table_exists('search_name'):
+                        words = tokenizer.most_frequent_words(conn, 1000)
+                    else:
+                        words = []
+
+                for word in words:
+                    api.search(word)
+        finally:
+            api.close()
+
+        return 0