+ return token_info.to_dict()
+
+
+ def _process_place_address(self, token_info: '_TokenInfo',
+ address: Sequence[PlaceName]) -> None:
+ for item in address:
+ if item.kind == 'postcode':
+ token_info.set_postcode(self._add_postcode(item))
+ elif item.kind == 'housenumber':
+ token_info.add_housenumber(*self._compute_housenumber_token(item))
+ elif item.kind == 'street':
+ token_info.add_street(self._retrieve_full_tokens(item.name))
+ elif item.kind == 'place':
+ if not item.suffix:
+ token_info.add_place(self._compute_partial_tokens(item.name))
+ elif not item.kind.startswith('_') and not item.suffix and \
+ item.kind not in ('country', 'full', 'inclusion'):
+ token_info.add_address_term(item.kind, self._compute_partial_tokens(item.name))
+
+
+ def _compute_housenumber_token(self, hnr: PlaceName) -> Tuple[Optional[int], Optional[str]]:
+ """ Normalize the housenumber and return the word token and the
+ canonical form.
+ """
+ assert self.conn is not None
+ analyzer = self.token_analysis.analysis.get('@housenumber')
+ result: Tuple[Optional[int], Optional[str]] = (None, None)
+
+ if analyzer is None:
+ # When no custom analyzer is set, simply normalize and transliterate
+ norm_name = self._search_normalized(hnr.name)
+ if norm_name:
+ result = self._cache.housenumbers.get(norm_name, result)
+ if result[0] is None:
+ with self.conn.cursor() as cur:
+ hid = cur.scalar("SELECT getorcreate_hnr_id(%s)", (norm_name, ))
+
+ result = hid, norm_name
+ self._cache.housenumbers[norm_name] = result
+ else:
+ # Otherwise use the analyzer to determine the canonical name.
+ # Per convention we use the first variant as the 'lookup name', the
+ # name that gets saved in the housenumber field of the place.
+ word_id = analyzer.get_canonical_id(hnr)
+ if word_id:
+ result = self._cache.housenumbers.get(word_id, result)
+ if result[0] is None:
+ variants = analyzer.compute_variants(word_id)
+ if variants:
+ with self.conn.cursor() as cur:
+ hid = cur.scalar("SELECT create_analyzed_hnr_id(%s, %s)",
+ (word_id, list(variants)))
+ result = hid, variants[0]
+ self._cache.housenumbers[word_id] = result
+
+ return result
+
+
+ def _compute_partial_tokens(self, name: str) -> List[int]:
+ """ Normalize the given term, split it into partial words and return
+ then token list for them.
+ """
+ assert self.conn is not None
+ norm_name = self._search_normalized(name)
+
+ tokens = []
+ need_lookup = []
+ for partial in norm_name.split():
+ token = self._cache.partials.get(partial)
+ if token:
+ tokens.append(token)
+ else:
+ need_lookup.append(partial)
+
+ if need_lookup:
+ with self.conn.cursor() as cur:
+ cur.execute("""SELECT word, getorcreate_partial_word(word)
+ FROM unnest(%s) word""",
+ (need_lookup, ))
+
+ for partial, token in cur:
+ assert token is not None
+ tokens.append(token)
+ self._cache.partials[partial] = token
+
+ return tokens
+