X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/a7f5c6c8f54eea4e3d9746141b2e0ac2d5722a4a..a2a44b875a02db284d0f67eb9a20506622da2257:/test/python/api/search/test_api_search_query.py diff --git a/test/python/api/search/test_api_search_query.py b/test/python/api/search/test_api_search_query.py index 69a17412..7154ae08 100644 --- a/test/python/api/search/test_api_search_query.py +++ b/test/python/api/search/test_api_search_query.py @@ -2,14 +2,14 @@ # # This file is part of Nominatim. (https://nominatim.org) # -# Copyright (C) 2023 by the Nominatim developer community. +# Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for tokenized query data structures. """ import pytest -from nominatim.api.search import query +from nominatim_api.search import query class MyToken(query.Token): @@ -18,7 +18,8 @@ class MyToken(query.Token): def mktoken(tid: int): - return MyToken(3.0, tid, 1, 'foo', True) + return MyToken(penalty=3.0, token=tid, count=1, addr_count=1, + lookup_word='foo', is_indexed=True) @pytest.mark.parametrize('ptype,ttype', [('NONE', 'WORD'), @@ -106,11 +107,11 @@ def test_query_struct_amenity_single_word(): q.add_node(query.BreakType.END, query.PhraseType.NONE) q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1)) - q.add_token(query.TokenRange(0, 1), query.TokenType.CATEGORY, mktoken(2)) + q.add_token(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM, mktoken(2)) q.add_token(query.TokenRange(0, 1), query.TokenType.QUALIFIER, mktoken(3)) assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL)) == 1 - assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.CATEGORY)) == 1 + assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM)) == 1 assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.QUALIFIER)) == 0 @@ -121,14 +122,14 @@ def test_query_struct_amenity_two_words(): for trange in [(0, 1), (1, 2)]: q.add_token(query.TokenRange(*trange), query.TokenType.PARTIAL, mktoken(1)) - q.add_token(query.TokenRange(*trange), query.TokenType.CATEGORY, mktoken(2)) + q.add_token(query.TokenRange(*trange), query.TokenType.NEAR_ITEM, mktoken(2)) q.add_token(query.TokenRange(*trange), query.TokenType.QUALIFIER, mktoken(3)) assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL)) == 1 - assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.CATEGORY)) == 0 + assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM)) == 0 assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.QUALIFIER)) == 1 assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.PARTIAL)) == 1 - assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.CATEGORY)) == 0 + assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.NEAR_ITEM)) == 0 assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.QUALIFIER)) == 1