X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/faeee7528f710dc98bf14a08e592137d3e6d37f2..19360a95522828c733e4ab174da9a9147ae308bf:/nominatim/clicmd/admin.py?ds=sidebyside diff --git a/nominatim/clicmd/admin.py b/nominatim/clicmd/admin.py index a84b0db2..9557dc98 100644 --- a/nominatim/clicmd/admin.py +++ b/nominatim/clicmd/admin.py @@ -41,6 +41,8 @@ class AdminFuncs: help='Print performance analysis of the indexing process') objs.add_argument('--collect-os-info', action="store_true", help="Generate a report about the host system information") + objs.add_argument('--clean-deleted', action='store', metavar='AGE', + help='Clean up deleted relations') group = parser.add_argument_group('Arguments for cache warming') group.add_argument('--search-only', action='store_const', dest='target', const='search', @@ -55,7 +57,9 @@ class AdminFuncs: mgroup.add_argument('--place-id', type=int, help='Analyse indexing of the given Nominatim object') + def run(self, args: NominatimArgs) -> int: + # pylint: disable=too-many-return-statements if args.warm: return self._warm(args) @@ -81,6 +85,12 @@ class AdminFuncs: collect_os_info.report_system_information(args.config) return 0 + if args.clean_deleted: + LOG.warning('Cleaning up deleted relations') + from ..tools import admin + admin.clean_deleted_relations(args.config, age=args.clean_deleted) + return 0 + return 1 @@ -89,19 +99,25 @@ class AdminFuncs: api = napi.NominatimAPI(args.project_dir) - if args.target != 'reverse': - for _ in range(1000): - api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)), - address_details=True) - - if args.target != 'search': - from ..tokenizer import factory as tokenizer_factory - - tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config) - with connect(args.config.get_libpq_dsn()) as conn: - words = tokenizer.most_frequent_words(conn, 1000) - - for word in words: - api.search(word) + try: + if args.target != 'search': + for _ in range(1000): + api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)), + address_details=True) + + if args.target != 'reverse': + from ..tokenizer import factory as tokenizer_factory + + tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config) + with connect(args.config.get_libpq_dsn()) as conn: + if conn.table_exists('search_name'): + words = tokenizer.most_frequent_words(conn, 1000) + else: + words = [] + + for word in words: + api.search(word) + finally: + api.close() return 0