]> git.openstreetmap.org Git - nominatim.git/commitdiff
remove useless check
authorSarah Hoffmann <lonvia@denofr.de>
Wed, 21 Jun 2023 09:56:39 +0000 (11:56 +0200)
committerSarah Hoffmann <lonvia@denofr.de>
Wed, 21 Jun 2023 09:56:39 +0000 (11:56 +0200)
Found by new mypy version.

nominatim/api/search/icu_tokenizer.py
nominatim/api/search/legacy_tokenizer.py
test/python/api/search/test_icu_query_analyzer.py
test/python/api/search/test_legacy_query_analyzer.py

index 17e679057ecb5a4eb63daeb08f38e361f19a3ada..9bd16e1d85350de6d9879bd95af1c2d809f047b1 100644 (file)
@@ -248,12 +248,11 @@ class ICUQueryAnalyzer(AbstractQueryAnalyzer):
                        and (repl.ttype != qmod.TokenType.HOUSENUMBER
                             or len(tlist.tokens[0].lookup_word) > 4):
                         repl.add_penalty(0.39)
-            elif tlist.ttype == qmod.TokenType.HOUSENUMBER:
+            elif tlist.ttype == qmod.TokenType.HOUSENUMBER \
+                 and len(tlist.tokens[0].lookup_word) <= 3:
                 if any(c.isdigit() for c in tlist.tokens[0].lookup_word):
                     for repl in node.starting:
-                        if repl.end == tlist.end and repl.ttype != qmod.TokenType.HOUSENUMBER \
-                           and (repl.ttype != qmod.TokenType.HOUSENUMBER
-                                or len(tlist.tokens[0].lookup_word) <= 3):
+                        if repl.end == tlist.end and repl.ttype != qmod.TokenType.HOUSENUMBER:
                             repl.add_penalty(0.5 - tlist.tokens[0].penalty)
             elif tlist.ttype not in (qmod.TokenType.COUNTRY, qmod.TokenType.PARTIAL):
                 norm = parts[i].normalized
index 96975704f0ce996fc754a3130e0ebbfe2a1e0722..3346584ccd1b35b4e74e4725ee079cb54e45a905 100644 (file)
@@ -233,12 +233,11 @@ class LegacyQueryAnalyzer(AbstractQueryAnalyzer):
                        and (repl.ttype != qmod.TokenType.HOUSENUMBER
                             or len(tlist.tokens[0].lookup_word) > 4):
                         repl.add_penalty(0.39)
-            elif tlist.ttype == qmod.TokenType.HOUSENUMBER:
+            elif tlist.ttype == qmod.TokenType.HOUSENUMBER \
+                 and len(tlist.tokens[0].lookup_word) <= 3:
                 if any(c.isdigit() for c in tlist.tokens[0].lookup_word):
                     for repl in node.starting:
-                        if repl.end == tlist.end and repl.ttype != qmod.TokenType.HOUSENUMBER \
-                           and (repl.ttype != qmod.TokenType.HOUSENUMBER
-                                or len(tlist.tokens[0].lookup_word) <= 3):
+                        if repl.end == tlist.end and repl.ttype != qmod.TokenType.HOUSENUMBER:
                             repl.add_penalty(0.5 - tlist.tokens[0].penalty)
 
 
index 78cd2c4d7beb2c2e13cf5b1634ef84d0ec2ae796..faf8137526106a0e5db77b325c0fa73f30b94a05 100644 (file)
@@ -118,10 +118,10 @@ async def test_penalty_postcodes_and_housenumbers(conn, term, order):
 
     assert query.num_token_slots() == 1
 
-    torder = [(tl.tokens[0].penalty, tl.ttype) for tl in query.nodes[0].starting]
+    torder = [(tl.tokens[0].penalty, tl.ttype.name) for tl in query.nodes[0].starting]
     torder.sort()
 
-    assert [t[1] for t in torder] == [TokenType[o] for o in order]
+    assert [t[1] for t in torder] == order
 
 @pytest.mark.asyncio
 async def test_category_words_only_at_beginning(conn):
index c21158531b3549ec4d54369ff587b7f377810f19..cdea6ede7cd842fa0c0f25b6915b8c0fcb2f0fe9 100644 (file)
@@ -195,11 +195,10 @@ async def test_penalty_postcodes_and_housenumbers(conn, term, order):
 
     assert query.num_token_slots() == 1
 
-    torder = [(tl.tokens[0].penalty, tl.ttype) for tl in query.nodes[0].starting]
-    print(query.nodes[0].starting)
+    torder = [(tl.tokens[0].penalty, tl.ttype.name) for tl in query.nodes[0].starting]
     torder.sort()
 
-    assert [t[1] for t in torder] == [TokenType[o] for o in order]
+    assert [t[1] for t in torder] == order
 
 
 @pytest.mark.asyncio