help='Print performance analysis of the indexing process')
objs.add_argument('--collect-os-info', action="store_true",
help="Generate a report about the host system information")
+ objs.add_argument('--clean-deleted', action='store', metavar='AGE',
+ help='Clean up deleted relations')
group = parser.add_argument_group('Arguments for cache warming')
group.add_argument('--search-only', action='store_const', dest='target',
const='search',
mgroup.add_argument('--place-id', type=int,
help='Analyse indexing of the given Nominatim object')
+
def run(self, args: NominatimArgs) -> int:
+ # pylint: disable=too-many-return-statements
if args.warm:
return self._warm(args)
collect_os_info.report_system_information(args.config)
return 0
+ if args.clean_deleted:
+ LOG.warning('Cleaning up deleted relations')
+ from ..tools import admin
+ admin.clean_deleted_relations(args.config, age=args.clean_deleted)
+ return 0
+
return 1
api = napi.NominatimAPI(args.project_dir)
- if args.target != 'reverse':
- for _ in range(1000):
- api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)),
- address_details=True)
-
- if args.target != 'search':
- from ..tokenizer import factory as tokenizer_factory
-
- tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
- with connect(args.config.get_libpq_dsn()) as conn:
- words = tokenizer.most_frequent_words(conn, 1000)
-
- for word in words:
- api.search(word)
+ try:
+ if args.target != 'search':
+ for _ in range(1000):
+ api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)),
+ address_details=True)
+
+ if args.target != 'reverse':
+ from ..tokenizer import factory as tokenizer_factory
+
+ tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+ with connect(args.config.get_libpq_dsn()) as conn:
+ if conn.table_exists('search_name'):
+ words = tokenizer.most_frequent_words(conn, 1000)
+ else:
+ words = []
+
+ for word in words:
+ api.search(word)
+ finally:
+ api.close()
return 0