X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/10a5424a71022a787902b86ddcefedb8688bb2b5..c7d80a2cc8cacb7dba95f023c2f480d25f7bf6b1:/test/python/api/search/test_icu_query_analyzer.py diff --git a/test/python/api/search/test_icu_query_analyzer.py b/test/python/api/search/test_icu_query_analyzer.py index 6a17e32a..7f88879c 100644 --- a/test/python/api/search/test_icu_query_analyzer.py +++ b/test/python/api/search/test_icu_query_analyzer.py @@ -2,7 +2,7 @@ # # This file is part of Nominatim. (https://nominatim.org) # -# Copyright (C) 2023 by the Nominatim developer community. +# Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for query analyzer for ICU tokenizer. @@ -12,10 +12,10 @@ from pathlib import Path import pytest import pytest_asyncio -from nominatim.api import NominatimAPIAsync -from nominatim.api.search.query import Phrase, PhraseType, TokenType, BreakType -import nominatim.api.search.icu_tokenizer as tok -from nominatim.api.logging import set_log_output, get_and_disable +from nominatim_api import NominatimAPIAsync +from nominatim_api.search.query import Phrase, PhraseType, TokenType, BreakType +import nominatim_api.search.icu_tokenizer as tok +from nominatim_api.logging import set_log_output, get_and_disable async def add_word(conn, word_id, word_token, wtype, word, info = None): t = conn.t.meta.tables['word'] @@ -40,10 +40,9 @@ async def conn(table_factory): table_factory('word', definition='word_id INT, word_token TEXT, type TEXT, word TEXT, info JSONB') - api = NominatimAPIAsync(Path('/invalid'), {}) - async with api.begin() as conn: - yield conn - await api.close() + async with NominatimAPIAsync(Path('/invalid'), {}) as api: + async with api.begin() as conn: + yield conn @pytest.mark.asyncio @@ -138,6 +137,19 @@ async def test_category_words_only_at_beginning(conn): assert not query.nodes[2].starting +@pytest.mark.asyncio +async def test_freestanding_qualifier_words_become_category(conn): + ana = await tok.create_query_analyzer(conn) + + await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': '-'}) + + query = await ana.analyze_query(make_phrase('foo')) + + assert query.num_token_slots() == 1 + assert len(query.nodes[0].starting) == 1 + assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM + + @pytest.mark.asyncio async def test_qualifier_words(conn): ana = await tok.create_query_analyzer(conn)