1 # SPDX-License-Identifier: GPL-3.0-or-later
3 # This file is part of Nominatim. (https://nominatim.org)
5 # Copyright (C) 2024 by the Nominatim developer community.
6 # For a full list of authors see the git log.
8 Tests for query analyzer for ICU tokenizer.
10 from pathlib import Path
15 from nominatim_api import NominatimAPIAsync
16 from nominatim_api.search.query import Phrase, PhraseType, TokenType, BreakType
17 import nominatim_api.search.icu_tokenizer as tok
18 from nominatim_api.logging import set_log_output, get_and_disable
20 async def add_word(conn, word_id, word_token, wtype, word, info = None):
21 t = conn.t.meta.tables['word']
22 await conn.execute(t.insert(), {'word_id': word_id,
23 'word_token': word_token,
29 def make_phrase(query):
30 return [Phrase(PhraseType.NONE, s) for s in query.split(',')]
32 @pytest_asyncio.fixture
33 async def conn(table_factory):
34 """ Create an asynchronous SQLAlchemy engine for the test DB.
36 table_factory('nominatim_properties',
37 definition='property TEXT, value TEXT',
38 content=(('tokenizer_import_normalisation', ':: lower();'),
39 ('tokenizer_import_transliteration', "'1' > '/1/'; 'ä' > 'ä '")))
41 definition='word_id INT, word_token TEXT, type TEXT, word TEXT, info JSONB')
43 api = NominatimAPIAsync(Path('/invalid'), {})
44 async with api.begin() as conn:
50 async def test_empty_phrase(conn):
51 ana = await tok.create_query_analyzer(conn)
53 query = await ana.analyze_query([])
55 assert len(query.source) == 0
56 assert query.num_token_slots() == 0
60 async def test_single_phrase_with_unknown_terms(conn):
61 ana = await tok.create_query_analyzer(conn)
63 await add_word(conn, 1, 'foo', 'w', 'FOO')
65 query = await ana.analyze_query(make_phrase('foo BAR'))
67 assert len(query.source) == 1
68 assert query.source[0].ptype == PhraseType.NONE
69 assert query.source[0].text == 'foo bar'
71 assert query.num_token_slots() == 2
72 assert len(query.nodes[0].starting) == 1
73 assert not query.nodes[1].starting
77 async def test_multiple_phrases(conn):
78 ana = await tok.create_query_analyzer(conn)
80 await add_word(conn, 1, 'one', 'w', 'one')
81 await add_word(conn, 2, 'two', 'w', 'two')
82 await add_word(conn, 100, 'one two', 'W', 'one two')
83 await add_word(conn, 3, 'three', 'w', 'three')
85 query = await ana.analyze_query(make_phrase('one two,three'))
87 assert len(query.source) == 2
91 async def test_splitting_in_transliteration(conn):
92 ana = await tok.create_query_analyzer(conn)
94 await add_word(conn, 1, 'mä', 'W', 'ma')
95 await add_word(conn, 2, 'fo', 'W', 'fo')
97 query = await ana.analyze_query(make_phrase('mäfo'))
99 assert query.num_token_slots() == 2
100 assert query.nodes[0].starting
101 assert query.nodes[1].starting
102 assert query.nodes[1].btype == BreakType.TOKEN
106 @pytest.mark.parametrize('term,order', [('23456', ['POSTCODE', 'HOUSENUMBER', 'WORD', 'PARTIAL']),
107 ('3', ['HOUSENUMBER', 'POSTCODE', 'WORD', 'PARTIAL'])
109 async def test_penalty_postcodes_and_housenumbers(conn, term, order):
110 ana = await tok.create_query_analyzer(conn)
112 await add_word(conn, 1, term, 'P', None)
113 await add_word(conn, 2, term, 'H', term)
114 await add_word(conn, 3, term, 'w', term)
115 await add_word(conn, 4, term, 'W', term)
117 query = await ana.analyze_query(make_phrase(term))
119 assert query.num_token_slots() == 1
121 torder = [(tl.tokens[0].penalty, tl.ttype.name) for tl in query.nodes[0].starting]
124 assert [t[1] for t in torder] == order
127 async def test_category_words_only_at_beginning(conn):
128 ana = await tok.create_query_analyzer(conn)
130 await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': 'in'})
131 await add_word(conn, 2, 'bar', 'w', 'BAR')
133 query = await ana.analyze_query(make_phrase('foo BAR foo'))
135 assert query.num_token_slots() == 3
136 assert len(query.nodes[0].starting) == 1
137 assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM
138 assert not query.nodes[2].starting
142 async def test_freestanding_qualifier_words_become_category(conn):
143 ana = await tok.create_query_analyzer(conn)
145 await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': '-'})
147 query = await ana.analyze_query(make_phrase('foo'))
149 assert query.num_token_slots() == 1
150 assert len(query.nodes[0].starting) == 1
151 assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM
155 async def test_qualifier_words(conn):
156 ana = await tok.create_query_analyzer(conn)
158 await add_word(conn, 1, 'foo', 'S', None, {'op': '-'})
159 await add_word(conn, 2, 'bar', 'w', None)
161 query = await ana.analyze_query(make_phrase('foo BAR foo BAR foo'))
163 assert query.num_token_slots() == 5
164 assert set(t.ttype for t in query.nodes[0].starting) == {TokenType.QUALIFIER}
165 assert set(t.ttype for t in query.nodes[2].starting) == {TokenType.QUALIFIER}
166 assert set(t.ttype for t in query.nodes[4].starting) == {TokenType.QUALIFIER}
170 async def test_add_unknown_housenumbers(conn):
171 ana = await tok.create_query_analyzer(conn)
173 await add_word(conn, 1, '23', 'H', '23')
175 query = await ana.analyze_query(make_phrase('466 23 99834 34a'))
177 assert query.num_token_slots() == 4
178 assert query.nodes[0].starting[0].ttype == TokenType.HOUSENUMBER
179 assert len(query.nodes[0].starting[0].tokens) == 1
180 assert query.nodes[0].starting[0].tokens[0].token == 0
181 assert query.nodes[1].starting[0].ttype == TokenType.HOUSENUMBER
182 assert len(query.nodes[1].starting[0].tokens) == 1
183 assert query.nodes[1].starting[0].tokens[0].token == 1
184 assert not query.nodes[2].starting
185 assert not query.nodes[3].starting
189 @pytest.mark.parametrize('logtype', ['text', 'html'])
190 async def test_log_output(conn, logtype):
192 ana = await tok.create_query_analyzer(conn)
194 await add_word(conn, 1, 'foo', 'w', 'FOO')
196 set_log_output(logtype)
197 await ana.analyze_query(make_phrase('foo'))
199 assert get_and_disable()