2 Implementation of 'refresh' subcommand.
5 from pathlib import Path
7 from nominatim.db.connection import connect
9 # Do not repeat documentation of subcommand classes.
10 # pylint: disable=C0111
11 # Using non-top-level imports to avoid eventually unused imports.
12 # pylint: disable=E0012,C0415
14 LOG = logging.getLogger()
18 Recompute auxiliary data used by the indexing process.
20 These functions must not be run in parallel with other update commands.
27 group = parser.add_argument_group('Data arguments')
28 group.add_argument('--postcodes', action='store_true',
29 help='Update postcode centroid table')
30 group.add_argument('--word-counts', action='store_true',
31 help='Compute frequency of full-word search terms')
32 group.add_argument('--address-levels', action='store_true',
33 help='Reimport address level configuration')
34 group.add_argument('--functions', action='store_true',
35 help='Update the PL/pgSQL functions in the database')
36 group.add_argument('--wiki-data', action='store_true',
37 help='Update Wikipedia/data importance numbers.')
38 group.add_argument('--importance', action='store_true',
39 help='Recompute place importances (expensive!)')
40 group.add_argument('--website', action='store_true',
41 help='Refresh the directory that serves the scripts for the web API')
42 group = parser.add_argument_group('Arguments for function refresh')
43 group.add_argument('--no-diff-updates', action='store_false', dest='diffs',
44 help='Do not enable code for propagating updates')
45 group.add_argument('--enable-debug-statements', action='store_true',
46 help='Enable debug warning statements in functions')
50 from ..tools import refresh, postcodes
51 from ..indexer.indexer import Indexer
55 LOG.warning("Update postcodes centroid")
56 tokenizer = self._get_tokenizer(args.config)
57 postcodes.update_postcodes(args.config.get_libpq_dsn(),
58 args.project_dir, tokenizer)
59 indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
61 indexer.index_postcodes()
64 LOG.warning('Recompute frequency of full-word search terms')
65 refresh.recompute_word_counts(args.config.get_libpq_dsn(), args.sqllib_dir)
67 if args.address_levels:
68 cfg = Path(args.config.ADDRESS_LEVEL_CONFIG)
69 LOG.warning('Updating address levels from %s', cfg)
70 with connect(args.config.get_libpq_dsn()) as conn:
71 refresh.load_address_levels_from_file(conn, cfg)
74 LOG.warning('Create functions')
75 with connect(args.config.get_libpq_dsn()) as conn:
76 refresh.create_functions(conn, args.config,
77 args.diffs, args.enable_debug_statements)
78 self._get_tokenizer(args.config).update_sql_functions(args.config)
81 data_path = Path(args.config.WIKIPEDIA_DATA_PATH
83 LOG.warning('Import wikipdia article importance from %s', data_path)
84 if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
86 LOG.fatal('FATAL: Wikipedia importance dump file not found')
89 # Attention: importance MUST come after wiki data import.
91 LOG.warning('Update importance values for database')
92 with connect(args.config.get_libpq_dsn()) as conn:
93 refresh.recompute_importance(conn)
96 webdir = args.project_dir / 'website'
97 LOG.warning('Setting up website directory at %s', webdir)
98 with connect(args.config.get_libpq_dsn()) as conn:
99 refresh.setup_website(webdir, args.config, conn)
104 def _get_tokenizer(self, config):
105 if self.tokenizer is None:
106 from ..tokenizer import factory as tokenizer_factory
108 self.tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
110 return self.tokenizer