Just pulls outdated housenumbers for the moment.
group = parser.add_argument_group('Data arguments')
group.add_argument('--postcodes', action='store_true',
help='Update postcode centroid table')
+ group.add_argument('--word-tokens', action='store_true',
+ help='Clean up search terms')
group.add_argument('--word-counts', action='store_true',
help='Compute frequency of full-word search terms')
group.add_argument('--address-levels', action='store_true',
LOG.error("The place table doesn't exist. "
"Postcode updates on a frozen database is not possible.")
+ if args.word_tokens:
+ tokenizer = self._get_tokenizer(args.config)
+ tokenizer.update_word_tokens()
+
if args.word_counts:
LOG.warning('Recompute word statistics')
self._get_tokenizer(args.config).update_statistics()
"""
+ @abstractmethod
+ def update_word_tokens(self) -> None:
+ """ Do house-keeping on the tokenizers internal data structures.
+ Remove unused word tokens, resort data etc.
+ """
+
+
@abstractmethod
def name_analyzer(self) -> AbstractAnalyzer:
""" Create a new analyzer for tokenizing names and queries
conn.commit()
+ def _cleanup_housenumbers(self):
+ """ Remove unused house numbers.
+ """
+ with connect(self.dsn) as conn:
+ with conn.cursor(name="hnr_counter") as cur:
+ cur.execute("""SELECT word_id, word_token FROM word
+ WHERE type = 'H'
+ AND NOT EXISTS(SELECT * FROM search_name
+ WHERE ARRAY[word.word_id] && name_vector)
+ AND (char_length(word_token) > 6
+ OR word_token not similar to '\d+')
+ """)
+ candidates = {token: wid for wid, token in cur}
+ with conn.cursor(name="hnr_counter") as cur:
+ cur.execute("""SELECT housenumber FROM placex
+ WHERE housenumber is not null
+ AND (char_length(housenumber) > 6
+ OR housenumber not similar to '\d+')
+ """)
+ for row in cur:
+ for hnr in row[0].split(';'):
+ candidates.pop(hnr, None)
+ LOG.info("There are %s outdated housenumbers.", len(candidates))
+
+
+ def update_word_tokens(self):
+ """ Remove unused tokens.
+ """
+ LOG.info("Cleaning up housenumber tokens.")
+ self._cleanup_housenumbers()
+ LOG.info("Tokenizer house-keeping done.")
+
+
def name_analyzer(self):
""" Create a new analyzer for tokenizing names and queries
using this tokinzer. Analyzers are context managers and should
cur.drop_table("word_frequencies")
conn.commit()
+
+ def update_word_tokens(self):
+ """ No house-keeping implemented for the legacy tokenizer.
+ """
+ LOG.info("No tokenizer clean-up available.")
+
+
def name_analyzer(self):
""" Create a new analyzer for tokenizing names and queries
using this tokinzer. Analyzers are context managers and should