1 # SPDX-License-Identifier: GPL-3.0-or-later
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2024 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Tests for query analyzer for ICU tokenizer.
10 from pathlib import Path
15 from nominatim_api import NominatimAPIAsync
16 from nominatim_api.search.query import Phrase, PhraseType, TokenType, BreakType
17 import nominatim_api.search.icu_tokenizer as tok
18 from nominatim_api.logging import set_log_output, get_and_disable
20 async def add_word(conn, word_id, word_token, wtype, word, info = None):
21 t = conn.t.meta.tables['word']
22 await conn.execute(t.insert(), {'word_id': word_id,
23 'word_token': word_token,
29 def make_phrase(query):
30 return [Phrase(PhraseType.NONE, s) for s in query.split(',')]
32 @pytest_asyncio.fixture
33 async def conn(table_factory):
34 """ Create an asynchronous SQLAlchemy engine for the test DB.
36 table_factory('nominatim_properties',
37 definition='property TEXT, value TEXT',
38 content=(('tokenizer_import_normalisation', ':: lower();'),
39 ('tokenizer_import_transliteration', "'1' > '/1/'; 'ä' > 'ä '")))
41 definition='word_id INT, word_token TEXT, type TEXT, word TEXT, info JSONB')
43 async with NominatimAPIAsync(Path('/invalid'), {}) as api:
44 async with api.begin() as conn:
49 async def test_empty_phrase(conn):
50 ana = await tok.create_query_analyzer(conn)
52 query = await ana.analyze_query([])
54 assert len(query.source) == 0
55 assert query.num_token_slots() == 0
59 async def test_single_phrase_with_unknown_terms(conn):
60 ana = await tok.create_query_analyzer(conn)
62 await add_word(conn, 1, 'foo', 'w', 'FOO')
64 query = await ana.analyze_query(make_phrase('foo BAR'))
66 assert len(query.source) == 1
67 assert query.source[0].ptype == PhraseType.NONE
68 assert query.source[0].text == 'foo bar'
70 assert query.num_token_slots() == 2
71 assert len(query.nodes[0].starting) == 1
72 assert not query.nodes[1].starting
76 async def test_multiple_phrases(conn):
77 ana = await tok.create_query_analyzer(conn)
79 await add_word(conn, 1, 'one', 'w', 'one')
80 await add_word(conn, 2, 'two', 'w', 'two')
81 await add_word(conn, 100, 'one two', 'W', 'one two')
82 await add_word(conn, 3, 'three', 'w', 'three')
84 query = await ana.analyze_query(make_phrase('one two,three'))
86 assert len(query.source) == 2
90 async def test_splitting_in_transliteration(conn):
91 ana = await tok.create_query_analyzer(conn)
93 await add_word(conn, 1, 'mä', 'W', 'ma')
94 await add_word(conn, 2, 'fo', 'W', 'fo')
96 query = await ana.analyze_query(make_phrase('mäfo'))
98 assert query.num_token_slots() == 2
99 assert query.nodes[0].starting
100 assert query.nodes[1].starting
101 assert query.nodes[1].btype == BreakType.TOKEN
105 @pytest.mark.parametrize('term,order', [('23456', ['POSTCODE', 'HOUSENUMBER', 'WORD', 'PARTIAL']),
106 ('3', ['HOUSENUMBER', 'POSTCODE', 'WORD', 'PARTIAL'])
108 async def test_penalty_postcodes_and_housenumbers(conn, term, order):
109 ana = await tok.create_query_analyzer(conn)
111 await add_word(conn, 1, term, 'P', None)
112 await add_word(conn, 2, term, 'H', term)
113 await add_word(conn, 3, term, 'w', term)
114 await add_word(conn, 4, term, 'W', term)
116 query = await ana.analyze_query(make_phrase(term))
118 assert query.num_token_slots() == 1
120 torder = [(tl.tokens[0].penalty, tl.ttype.name) for tl in query.nodes[0].starting]
123 assert [t[1] for t in torder] == order
126 async def test_category_words_only_at_beginning(conn):
127 ana = await tok.create_query_analyzer(conn)
129 await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': 'in'})
130 await add_word(conn, 2, 'bar', 'w', 'BAR')
132 query = await ana.analyze_query(make_phrase('foo BAR foo'))
134 assert query.num_token_slots() == 3
135 assert len(query.nodes[0].starting) == 1
136 assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM
137 assert not query.nodes[2].starting
141 async def test_freestanding_qualifier_words_become_category(conn):
142 ana = await tok.create_query_analyzer(conn)
144 await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': '-'})
146 query = await ana.analyze_query(make_phrase('foo'))
148 assert query.num_token_slots() == 1
149 assert len(query.nodes[0].starting) == 1
150 assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM
154 async def test_qualifier_words(conn):
155 ana = await tok.create_query_analyzer(conn)
157 await add_word(conn, 1, 'foo', 'S', None, {'op': '-'})
158 await add_word(conn, 2, 'bar', 'w', None)
160 query = await ana.analyze_query(make_phrase('foo BAR foo BAR foo'))
162 assert query.num_token_slots() == 5
163 assert set(t.ttype for t in query.nodes[0].starting) == {TokenType.QUALIFIER}
164 assert set(t.ttype for t in query.nodes[2].starting) == {TokenType.QUALIFIER}
165 assert set(t.ttype for t in query.nodes[4].starting) == {TokenType.QUALIFIER}
169 async def test_add_unknown_housenumbers(conn):
170 ana = await tok.create_query_analyzer(conn)
172 await add_word(conn, 1, '23', 'H', '23')
174 query = await ana.analyze_query(make_phrase('466 23 99834 34a'))
176 assert query.num_token_slots() == 4
177 assert query.nodes[0].starting[0].ttype == TokenType.HOUSENUMBER
178 assert len(query.nodes[0].starting[0].tokens) == 1
179 assert query.nodes[0].starting[0].tokens[0].token == 0
180 assert query.nodes[1].starting[0].ttype == TokenType.HOUSENUMBER
181 assert len(query.nodes[1].starting[0].tokens) == 1
182 assert query.nodes[1].starting[0].tokens[0].token == 1
183 assert not query.nodes[2].starting
184 assert not query.nodes[3].starting
188 @pytest.mark.parametrize('logtype', ['text', 'html'])
189 async def test_log_output(conn, logtype):
191 ana = await tok.create_query_analyzer(conn)
193 await add_word(conn, 1, 'foo', 'w', 'FOO')
195 set_log_output(logtype)
196 await ana.analyze_query(make_phrase('foo'))
198 assert get_and_disable()