1 # SPDX-License-Identifier: GPL-2.0-only
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2022 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Implementation of 'refresh' subcommand.
11 from pathlib import Path
13 from nominatim.db.connection import connect
15 # Do not repeat documentation of subcommand classes.
16 # pylint: disable=C0111
17 # Using non-top-level imports to avoid eventually unused imports.
18 # pylint: disable=E0012,C0415
20 LOG = logging.getLogger()
24 Recompute auxiliary data used by the indexing process.
26 This sub-commands updates various static data and functions in the database.
27 It usually needs to be run after changing various aspects of the
28 configuration. The configuration documentation will mention the exact
29 command to use in such case.
31 Warning: the 'update' command must not be run in parallel with other update
32 commands like 'replication' or 'add-data'.
39 group = parser.add_argument_group('Data arguments')
40 group.add_argument('--postcodes', action='store_true',
41 help='Update postcode centroid table')
42 group.add_argument('--word-counts', action='store_true',
43 help='Compute frequency of full-word search terms')
44 group.add_argument('--address-levels', action='store_true',
45 help='Reimport address level configuration')
46 group.add_argument('--functions', action='store_true',
47 help='Update the PL/pgSQL functions in the database')
48 group.add_argument('--wiki-data', action='store_true',
49 help='Update Wikipedia/data importance numbers')
50 group.add_argument('--importance', action='store_true',
51 help='Recompute place importances (expensive!)')
52 group.add_argument('--website', action='store_true',
53 help='Refresh the directory that serves the scripts for the web API')
54 group = parser.add_argument_group('Arguments for function refresh')
55 group.add_argument('--no-diff-updates', action='store_false', dest='diffs',
56 help='Do not enable code for propagating updates')
57 group.add_argument('--enable-debug-statements', action='store_true',
58 help='Enable debug warning statements in functions')
62 from ..tools import refresh, postcodes
63 from ..indexer.indexer import Indexer
67 if postcodes.can_compute(args.config.get_libpq_dsn()):
68 LOG.warning("Update postcodes centroid")
69 tokenizer = self._get_tokenizer(args.config)
70 postcodes.update_postcodes(args.config.get_libpq_dsn(),
71 args.project_dir, tokenizer)
72 indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
74 indexer.index_postcodes()
76 LOG.error("The place table doesn't exist. "
77 "Postcode updates on a frozen database is not possible.")
80 LOG.warning('Recompute word statistics')
81 self._get_tokenizer(args.config).update_statistics()
83 if args.address_levels:
84 LOG.warning('Updating address levels')
85 with connect(args.config.get_libpq_dsn()) as conn:
86 refresh.load_address_levels_from_config(conn, args.config)
89 LOG.warning('Create functions')
90 with connect(args.config.get_libpq_dsn()) as conn:
91 refresh.create_functions(conn, args.config,
92 args.diffs, args.enable_debug_statements)
93 self._get_tokenizer(args.config).update_sql_functions(args.config)
96 data_path = Path(args.config.WIKIPEDIA_DATA_PATH
98 LOG.warning('Import wikipdia article importance from %s', data_path)
99 if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
101 LOG.fatal('FATAL: Wikipedia importance dump file not found')
104 # Attention: importance MUST come after wiki data import.
106 LOG.warning('Update importance values for database')
107 with connect(args.config.get_libpq_dsn()) as conn:
108 refresh.recompute_importance(conn)
111 webdir = args.project_dir / 'website'
112 LOG.warning('Setting up website directory at %s', webdir)
113 with connect(args.config.get_libpq_dsn()) as conn:
114 refresh.setup_website(webdir, args.config, conn)
119 def _get_tokenizer(self, config):
120 if self.tokenizer is None:
121 from ..tokenizer import factory as tokenizer_factory
123 self.tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
125 return self.tokenizer