]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/api/search/geocoder.py
disallow category tokens in the middle of a query string
[nominatim.git] / nominatim / api / search / geocoder.py
index 91c45b65a76e977f93cc770f6303557211af470d..27e4d91ea6ae412a5b359bf06a65bdc165b8fdc1 100644 (file)
@@ -64,7 +64,7 @@ class ForwardGeocoder:
                     log().table_dump('Searches for assignment',
                                      _dump_searches(searches, query, num_searches))
                 num_searches = len(searches)
-            searches.sort(key=lambda s: s.penalty)
+            searches.sort(key=lambda s: (s.penalty, s.SEARCH_PRIO))
 
         return query, searches
 
@@ -79,12 +79,13 @@ class ForwardGeocoder:
 
         end_time = dt.datetime.now() + self.timeout
 
-        min_ranking = 1000.0
+        min_ranking = searches[0].penalty + 2.0
         prev_penalty = 0.0
         for i, search in enumerate(searches):
             if search.penalty > prev_penalty and (search.penalty > min_ranking or i > 20):
                 break
             log().table_dump(f"{i + 1}. Search", _dump_searches([search], query))
+            log().var_dump('Params', self.params)
             lookup_results = await search.lookup(self.conn, self.params)
             for result in lookup_results:
                 rhash = (result.source_table, result.place_id,
@@ -94,7 +95,7 @@ class ForwardGeocoder:
                     prevresult.accuracy = min(prevresult.accuracy, result.accuracy)
                 else:
                     results[rhash] = result
-                min_ranking = min(min_ranking, result.ranking + 0.5, search.penalty + 0.3)
+                min_ranking = min(min_ranking, result.accuracy * 1.2)
             log().result_dump('Results', ((r.accuracy, r) for r in lookup_results))
             prev_penalty = search.penalty
             if dt.datetime.now() >= end_time:
@@ -140,7 +141,8 @@ class ForwardGeocoder:
                or (result.importance is not None and result.importance < 0):
                 continue
             distance = 0.0
-            norm = self.query_analyzer.normalize_text(result.display_name)
+            norm = self.query_analyzer.normalize_text(' '.join((result.display_name,
+                                                                result.country_code or '')))
             words = set((w for w in norm.split(' ') if w))
             if not words:
                 continue