]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/tokenizer/icu_tokenizer.py
Merge remote-tracking branch 'upstream/master'
[nominatim.git] / nominatim / tokenizer / icu_tokenizer.py
index ea6e5d3cca5a9d063cd69b89c214f1d5e9699526..90caec1c9041697f02fec06c62960d08eaf01dcb 100644 (file)
@@ -409,16 +409,18 @@ class LegacyICUNameAnalyzer(AbstractAnalyzer):
     def _process_place_address(self, token_info, address):
         hnrs = []
         addr_terms = []
     def _process_place_address(self, token_info, address):
         hnrs = []
         addr_terms = []
+        streets = []
         for item in address:
             if item.kind == 'postcode':
                 self._add_postcode(item.name)
             elif item.kind in ('housenumber', 'streetnumber', 'conscriptionnumber'):
                 hnrs.append(item.name)
             elif item.kind == 'street':
         for item in address:
             if item.kind == 'postcode':
                 self._add_postcode(item.name)
             elif item.kind in ('housenumber', 'streetnumber', 'conscriptionnumber'):
                 hnrs.append(item.name)
             elif item.kind == 'street':
-                token_info.add_street(self._compute_partial_tokens(item.name))
+                streets.extend(self._retrieve_full_tokens(item.name))
             elif item.kind == 'place':
             elif item.kind == 'place':
-                token_info.add_place(self._compute_partial_tokens(item.name))
-            elif not item.kind.startswith('_') and \
+                if not item.suffix:
+                    token_info.add_place(self._compute_partial_tokens(item.name))
+            elif not item.kind.startswith('_') and not item.suffix and \
                  item.kind not in ('country', 'full'):
                 addr_terms.append((item.kind, self._compute_partial_tokens(item.name)))
 
                  item.kind not in ('country', 'full'):
                 addr_terms.append((item.kind, self._compute_partial_tokens(item.name)))
 
@@ -429,6 +431,9 @@ class LegacyICUNameAnalyzer(AbstractAnalyzer):
         if addr_terms:
             token_info.add_address_terms(addr_terms)
 
         if addr_terms:
             token_info.add_address_terms(addr_terms)
 
+        if streets:
+            token_info.add_street(streets)
+
 
     def _compute_partial_tokens(self, name):
         """ Normalize the given term, split it into partial words and return
 
     def _compute_partial_tokens(self, name):
         """ Normalize the given term, split it into partial words and return
@@ -458,6 +463,26 @@ class LegacyICUNameAnalyzer(AbstractAnalyzer):
         return tokens
 
 
         return tokens
 
 
+    def _retrieve_full_tokens(self, name):
+        """ Get the full name token for the given name, if it exists.
+            The name is only retrived for the standard analyser.
+        """
+        norm_name = self._search_normalized(name)
+
+        # return cached if possible
+        if norm_name in self._cache.fulls:
+            return self._cache.fulls[norm_name]
+
+        with self.conn.cursor() as cur:
+            cur.execute("SELECT word_id FROM word WHERE word_token = %s and type = 'W'",
+                        (norm_name, ))
+            full = [row[0] for row in cur]
+
+        self._cache.fulls[norm_name] = full
+
+        return full
+
+
     def _compute_name_tokens(self, names):
         """ Computes the full name and partial name tokens for the given
             dictionary of names.
     def _compute_name_tokens(self, names):
         """ Computes the full name and partial name tokens for the given
             dictionary of names.
@@ -561,8 +586,7 @@ class _TokenInfo:
     def add_street(self, tokens):
         """ Add addr:street match terms.
         """
     def add_street(self, tokens):
         """ Add addr:street match terms.
         """
-        if tokens:
-            self.data['street'] = self._mk_array(tokens)
+        self.data['street'] = self._mk_array(tokens)
 
 
     def add_place(self, tokens):
 
 
     def add_place(self, tokens):
@@ -591,6 +615,7 @@ class _TokenCache:
     def __init__(self):
         self.names = {}
         self.partials = {}
     def __init__(self):
         self.names = {}
         self.partials = {}
+        self.fulls = {}
         self.postcodes = set()
         self.housenumbers = {}
 
         self.postcodes = set()
         self.housenumbers = {}