X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/c3f1d34b714c0410652bfa2e28fe3f6e5f9ea1aa..a7a920a9a5b55bf4290b184f05898a8589c95b40:/nominatim/clicmd/refresh.py?ds=sidebyside diff --git a/nominatim/clicmd/refresh.py b/nominatim/clicmd/refresh.py index 4fbdfebb..5eac53da 100644 --- a/nominatim/clicmd/refresh.py +++ b/nominatim/clicmd/refresh.py @@ -7,10 +7,15 @@ """ Implementation of 'refresh' subcommand. """ +from typing import Tuple, Optional +import argparse import logging from pathlib import Path +from nominatim.config import Configuration from nominatim.db.connection import connect +from nominatim.tokenizer.base import AbstractTokenizer +from nominatim.clicmd.args import NominatimArgs # Do not repeat documentation of subcommand classes. # pylint: disable=C0111 @@ -19,12 +24,12 @@ from nominatim.db.connection import connect LOG = logging.getLogger() -def _parse_osm_object(obj): +def _parse_osm_object(obj: str) -> Tuple[str, int]: """ Parse the given argument into a tuple of OSM type and ID. Raises an ArgumentError if the format is not recognized. """ if len(obj) < 2 or obj[0].lower() not in 'nrw' or not obj[1:].isdigit(): - raise ArgumentError("Expect OSM object id of form [N|W|R].") + raise argparse.ArgumentTypeError("Cannot parse OSM ID. Expect format: [N|W|R].") return (obj[0].upper(), int(obj[1:])) @@ -41,11 +46,10 @@ class UpdateRefresh: Warning: the 'update' command must not be run in parallel with other update commands like 'replication' or 'add-data'. """ - def __init__(self): - self.tokenizer = None + def __init__(self) -> None: + self.tokenizer: Optional[AbstractTokenizer] = None - @staticmethod - def add_args(parser): + def add_args(self, parser: argparse.ArgumentParser) -> None: group = parser.add_argument_group('Data arguments') group.add_argument('--postcodes', action='store_true', help='Update postcode centroid table') @@ -59,6 +63,8 @@ class UpdateRefresh: help='Update the PL/pgSQL functions in the database') group.add_argument('--wiki-data', action='store_true', help='Update Wikipedia/data importance numbers') + group.add_argument('--secondary-importance', action='store_true', + help='Update secondary importance raster data') group.add_argument('--importance', action='store_true', help='Recompute place importances (expensive!)') group.add_argument('--website', action='store_true', @@ -79,10 +85,11 @@ class UpdateRefresh: help='Enable debug warning statements in functions') - def run(self, args): + def run(self, args: NominatimArgs) -> int: #pylint: disable=too-many-branches, too-many-statements from ..tools import refresh, postcodes from ..indexer.indexer import Indexer + need_function_refresh = args.functions if args.postcodes: if postcodes.can_compute(args.config.get_libpq_dsn()): @@ -104,30 +111,47 @@ class UpdateRefresh: if args.word_counts: LOG.warning('Recompute word statistics') - self._get_tokenizer(args.config).update_statistics() + self._get_tokenizer(args.config).update_statistics(args.config, + threads=args.threads or 1) if args.address_levels: LOG.warning('Updating address levels') with connect(args.config.get_libpq_dsn()) as conn: refresh.load_address_levels_from_config(conn, args.config) - if args.functions: - LOG.warning('Create functions') + # Attention: must come BEFORE functions + if args.secondary_importance: with connect(args.config.get_libpq_dsn()) as conn: - refresh.create_functions(conn, args.config, - args.diffs, args.enable_debug_statements) - self._get_tokenizer(args.config).update_sql_functions(args.config) + # If the table did not exist before, then the importance code + # needs to be enabled. + if not conn.table_exists('secondary_importance'): + args.functions = True + + LOG.warning('Import secondary importance raster data from %s', args.project_dir) + if refresh.import_secondary_importance(args.config.get_libpq_dsn(), + args.project_dir) > 0: + LOG.fatal('FATAL: Cannot update secondary importance raster data') + return 1 + need_function_refresh = True if args.wiki_data: data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir) - LOG.warning('Import wikipdia article importance from %s', data_path) + LOG.warning('Import wikipedia article importance from %s', data_path) if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(), data_path) > 0: - LOG.fatal('FATAL: Wikipedia importance dump file not found') + LOG.fatal('FATAL: Wikipedia importance file not found in %s', data_path) return 1 + need_function_refresh = True + + if need_function_refresh: + LOG.warning('Create functions') + with connect(args.config.get_libpq_dsn()) as conn: + refresh.create_functions(conn, args.config, + args.diffs, args.enable_debug_statements) + self._get_tokenizer(args.config).update_sql_functions(args.config) - # Attention: importance MUST come after wiki data import. + # Attention: importance MUST come after wiki data import and after functions. if args.importance: LOG.warning('Update importance values for database') with connect(args.config.get_libpq_dsn()) as conn: @@ -154,7 +178,7 @@ class UpdateRefresh: return 0 - def _get_tokenizer(self, config): + def _get_tokenizer(self, config: Configuration) -> AbstractTokenizer: if self.tokenizer is None: from ..tokenizer import factory as tokenizer_factory