X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/de37dc9300ecb94e35878238cf4def1324b6a9e4..9ff98073dbeec61be69dec511d5ddaffc9fda6a2:/nominatim/clicmd/refresh.py diff --git a/nominatim/clicmd/refresh.py b/nominatim/clicmd/refresh.py index 8e69caca..969998ad 100644 --- a/nominatim/clicmd/refresh.py +++ b/nominatim/clicmd/refresh.py @@ -4,8 +4,7 @@ Implementation of 'refresh' subcommand. import logging from pathlib import Path -from ..db.connection import connect -from ..tools.exec_utils import run_legacy_script +from nominatim.db.connection import connect # Do not repeat documentation of subcommand classes. # pylint: disable=C0111 @@ -20,6 +19,8 @@ class UpdateRefresh: These functions must not be run in parallel with other update commands. """ + def __init__(self): + self.tokenizer = None @staticmethod def add_args(parser): @@ -44,45 +45,70 @@ class UpdateRefresh: group.add_argument('--enable-debug-statements', action='store_true', help='Enable debug warning statements in functions') - @staticmethod - def run(args): - from ..tools import refresh + + def run(self, args): + from ..tools import refresh, postcodes + from ..indexer.indexer import Indexer + if args.postcodes: - LOG.warning("Update postcodes centroid") - conn = connect(args.config.get_libpq_dsn()) - refresh.update_postcodes(conn, args.sqllib_dir) - conn.close() + if postcodes.can_compute(args.config.get_libpq_dsn()): + LOG.warning("Update postcodes centroid") + tokenizer = self._get_tokenizer(args.config) + postcodes.update_postcodes(args.config.get_libpq_dsn(), + args.project_dir, tokenizer) + indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, + args.threads or 1) + indexer.index_postcodes() + else: + LOG.error("The place table doesn't exist. " + "Postcode updates on a frozen database is not possible.") if args.word_counts: LOG.warning('Recompute frequency of full-word search terms') - conn = connect(args.config.get_libpq_dsn()) - refresh.recompute_word_counts(conn, args.sqllib_dir) - conn.close() + refresh.recompute_word_counts(args.config.get_libpq_dsn(), args.sqllib_dir) if args.address_levels: cfg = Path(args.config.ADDRESS_LEVEL_CONFIG) LOG.warning('Updating address levels from %s', cfg) - conn = connect(args.config.get_libpq_dsn()) - refresh.load_address_levels_from_file(conn, cfg) - conn.close() + with connect(args.config.get_libpq_dsn()) as conn: + refresh.load_address_levels_from_file(conn, cfg) if args.functions: LOG.warning('Create functions') - conn = connect(args.config.get_libpq_dsn()) - refresh.create_functions(conn, args.config, args.sqllib_dir, - args.diffs, args.enable_debug_statements) - conn.close() + with connect(args.config.get_libpq_dsn()) as conn: + refresh.create_functions(conn, args.config, + args.diffs, args.enable_debug_statements) + self._get_tokenizer(args.config).update_sql_functions(args.config) if args.wiki_data: - run_legacy_script('setup.php', '--import-wikipedia-articles', - nominatim_env=args, throw_on_fail=True) + data_path = Path(args.config.WIKIPEDIA_DATA_PATH + or args.project_dir) + LOG.warning('Import wikipdia article importance from %s', data_path) + if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(), + data_path) > 0: + LOG.fatal('FATAL: Wikipedia importance dump file not found') + return 1 + # Attention: importance MUST come after wiki data import. if args.importance: - run_legacy_script('update.php', '--recompute-importance', - nominatim_env=args, throw_on_fail=True) + LOG.warning('Update importance values for database') + with connect(args.config.get_libpq_dsn()) as conn: + refresh.recompute_importance(conn) + if args.website: - run_legacy_script('setup.php', '--setup-website', - nominatim_env=args, throw_on_fail=True) + webdir = args.project_dir / 'website' + LOG.warning('Setting up website directory at %s', webdir) + with connect(args.config.get_libpq_dsn()) as conn: + refresh.setup_website(webdir, args.config, conn) return 0 + + + def _get_tokenizer(self, config): + if self.tokenizer is None: + from ..tokenizer import factory as tokenizer_factory + + self.tokenizer = tokenizer_factory.get_tokenizer_for_db(config) + + return self.tokenizer