group = parser.add_argument_group('Data arguments')
group.add_argument('--postcodes', action='store_true',
help='Update postcode centroid table')
+ group.add_argument('--word-tokens', action='store_true',
+ help='Clean up search terms')
group.add_argument('--word-counts', action='store_true',
help='Compute frequency of full-word search terms')
group.add_argument('--address-levels', action='store_true',
LOG.error("The place table doesn't exist. "
"Postcode updates on a frozen database is not possible.")
+ if args.word_tokens:
+ LOG.warning('Updating word tokens')
+ tokenizer = self._get_tokenizer(args.config)
+ tokenizer.update_word_tokens()
+
if args.word_counts:
LOG.warning('Recompute word statistics')
self._get_tokenizer(args.config).update_statistics()
if args.website:
webdir = args.project_dir / 'website'
LOG.warning('Setting up website directory at %s', webdir)
+ # This is a little bit hacky: call the tokenizer setup, so that
+ # the tokenizer directory gets repopulated as well, in case it
+ # wasn't there yet.
+ self._get_tokenizer(args.config)
with connect(args.config.get_libpq_dsn()) as conn:
refresh.setup_website(webdir, args.config, conn)