X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/8c7140d92b7a679ae5ef5bb3655c69bd39b7dfeb..bc7adbae2bc8ebc61bca3800155d070908502dd9:/test/python/api/search/test_db_search_builder.py diff --git a/test/python/api/search/test_db_search_builder.py b/test/python/api/search/test_db_search_builder.py index 87d75261..68f71298 100644 --- a/test/python/api/search/test_db_search_builder.py +++ b/test/python/api/search/test_db_search_builder.py @@ -31,7 +31,9 @@ def make_query(*args): for end, ttype, tinfo in tlist: for tid, word in tinfo: q.add_token(TokenRange(start, end), ttype, - MyToken(0.5 if ttype == TokenType.PARTIAL else 0.0, tid, 1, word, True)) + MyToken(penalty=0.5 if ttype == TokenType.PARTIAL else 0.0, + token=tid, count=1, addr_count=1, + lookup_word=word, is_indexed=True)) return q @@ -395,14 +397,14 @@ def make_counted_searches(name_part, name_full, address_part, address_full, q.add_node(BreakType.END, PhraseType.NONE) q.add_token(TokenRange(0, 1), TokenType.PARTIAL, - MyToken(0.5, 1, name_part, 'name_part', True)) + MyToken(0.5, 1, name_part, 1, 'name_part', True)) q.add_token(TokenRange(0, 1), TokenType.WORD, - MyToken(0, 101, name_full, 'name_full', True)) + MyToken(0, 101, name_full, 1, 'name_full', True)) for i in range(num_address_parts): q.add_token(TokenRange(i + 1, i + 2), TokenType.PARTIAL, - MyToken(0.5, 2, address_part, 'address_part', True)) + MyToken(0.5, 2, address_part, 1, 'address_part', True)) q.add_token(TokenRange(i + 1, i + 2), TokenType.WORD, - MyToken(0, 102, address_full, 'address_full', True)) + MyToken(0, 102, address_full, 1, 'address_full', True)) builder = SearchBuilder(q, SearchDetails()) @@ -420,8 +422,8 @@ def test_infrequent_partials_in_name(): assert len(search.lookups) == 2 assert len(search.rankings) == 2 - assert set((l.column, l.lookup_type) for l in search.lookups) == \ - {('name_vector', 'lookup_all'), ('nameaddress_vector', 'restrict')} + assert set((l.column, l.lookup_type.__name__) for l in search.lookups) == \ + {('name_vector', 'LookupAll'), ('nameaddress_vector', 'Restrict')} def test_frequent_partials_in_name_and_address(): @@ -432,10 +434,10 @@ def test_frequent_partials_in_name_and_address(): assert all(isinstance(s, dbs.PlaceSearch) for s in searches) searches.sort(key=lambda s: s.penalty) - assert set((l.column, l.lookup_type) for l in searches[0].lookups) == \ - {('name_vector', 'lookup_any'), ('nameaddress_vector', 'restrict')} - assert set((l.column, l.lookup_type) for l in searches[1].lookups) == \ - {('nameaddress_vector', 'lookup_all'), ('name_vector', 'lookup_all')} + assert set((l.column, l.lookup_type.__name__) for l in searches[0].lookups) == \ + {('name_vector', 'LookupAny'), ('nameaddress_vector', 'Restrict')} + assert set((l.column, l.lookup_type.__name__) for l in searches[1].lookups) == \ + {('nameaddress_vector', 'LookupAll'), ('name_vector', 'LookupAll')} def test_too_frequent_partials_in_name_and_address(): @@ -446,5 +448,5 @@ def test_too_frequent_partials_in_name_and_address(): assert all(isinstance(s, dbs.PlaceSearch) for s in searches) searches.sort(key=lambda s: s.penalty) - assert set((l.column, l.lookup_type) for l in searches[0].lookups) == \ - {('name_vector', 'lookup_any'), ('nameaddress_vector', 'restrict')} + assert set((l.column, l.lookup_type.__name__) for l in searches[0].lookups) == \ + {('name_vector', 'LookupAny'), ('nameaddress_vector', 'Restrict')}