def make_query(*args):
- q = None
+ q = QueryStruct([Phrase(PhraseType.NONE, '')])
- for tlist in args:
- if q is None:
- q = QueryStruct([Phrase(PhraseType.NONE, '')])
- else:
- q.add_node(BreakType.WORD, PhraseType.NONE)
+ for _ in range(max(inner[0] for tlist in args for inner in tlist)):
+ q.add_node(BreakType.WORD, PhraseType.NONE)
+ q.add_node(BreakType.END, PhraseType.NONE)
- start = len(q.nodes) - 1
+ for start, tlist in enumerate(args):
for end, ttype, tinfo in tlist:
for tid, word in tinfo:
q.add_token(TokenRange(start, end), ttype,
MyToken(0.5 if ttype == TokenType.PARTIAL else 0.0, tid, 1, word, True))
- q.add_node(BreakType.END, PhraseType.NONE)
return q
assert set(search.countries.values) == {'en'}
-def test_country_search_with_confllicting_country_restriction():
+def test_country_search_with_conflicting_country_restriction():
q = make_query([(1, TokenType.COUNTRY, [(2, 'de'), (3, 'en')])])
builder = SearchBuilder(q, SearchDetails.from_kwargs({'countries': 'fr'}))
@pytest.mark.parametrize('kwargs', [{'viewbox': '0,0,1,1', 'bounded_viewbox': True},
{'near': '10,10'}])
-def test_category_only(kwargs):
- q = make_query([(1, TokenType.CATEGORY, [(2, 'foo')])])
+def test_near_item_only(kwargs):
+ q = make_query([(1, TokenType.NEAR_ITEM, [(2, 'foo')])])
builder = SearchBuilder(q, SearchDetails.from_kwargs(kwargs))
- searches = list(builder.build(TokenAssignment(category=TokenRange(0, 1))))
+ searches = list(builder.build(TokenAssignment(near_item=TokenRange(0, 1))))
assert len(searches) == 1
search = searches[0]
assert isinstance(search, dbs.PoiSearch)
- assert search.categories.values == [('this', 'that')]
+ assert search.qualifiers.values == [('this', 'that')]
@pytest.mark.parametrize('kwargs', [{'viewbox': '0,0,1,1'},
{}])
-def test_category_skipped(kwargs):
- q = make_query([(1, TokenType.CATEGORY, [(2, 'foo')])])
+def test_near_item_skipped(kwargs):
+ q = make_query([(1, TokenType.NEAR_ITEM, [(2, 'foo')])])
builder = SearchBuilder(q, SearchDetails.from_kwargs(kwargs))
- searches = list(builder.build(TokenAssignment(category=TokenRange(0, 1))))
+ searches = list(builder.build(TokenAssignment(near_item=TokenRange(0, 1))))
assert len(searches) == 0
def test_name_only_near_search():
- q = make_query([(1, TokenType.CATEGORY, [(88, 'g')])],
+ q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])],
[(2, TokenType.PARTIAL, [(1, 'a')]),
(2, TokenType.WORD, [(100, 'a')])])
builder = SearchBuilder(q, SearchDetails())
searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
- category=TokenRange(0, 1))))
+ near_item=TokenRange(0, 1))))
assert len(searches) == 1
search = searches[0]
assert len(searches) == 1
search = searches[0]
+ assert isinstance(search, dbs.PlaceSearch)
+ assert search.qualifiers.values == [('foo', 'bar')]
+
+
+def test_name_with_near_item_search_with_category_mismatch():
+ q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ near_item=TokenRange(0, 1))))
+
+ assert len(searches) == 0
+
+
+def test_name_with_near_item_search_with_category_match():
+ q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar'),
+ ('this', 'that')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ near_item=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
assert isinstance(search, dbs.NearSearch)
assert isinstance(search.search, dbs.PlaceSearch)
+def test_name_with_qualifier_search_with_category_mismatch():
+ q = make_query([(1, TokenType.QUALIFIER, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ qualifier=TokenRange(0, 1))))
+
+ assert len(searches) == 0
+
+
+def test_name_with_qualifier_search_with_category_match():
+ q = make_query([(1, TokenType.QUALIFIER, [(88, 'g')])],
+ [(2, TokenType.PARTIAL, [(1, 'a')]),
+ (2, TokenType.WORD, [(100, 'a')])])
+ builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar'),
+ ('this', 'that')]}))
+
+ searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2),
+ qualifier=TokenRange(0, 1))))
+
+ assert len(searches) == 1
+ search = searches[0]
+
+ assert isinstance(search, dbs.PlaceSearch)
+ assert search.qualifiers.values == [('this', 'that')]
+
+
def test_name_only_search_with_countries():
q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]),
(1, TokenType.WORD, [(100, 'a')])])
assert len(search.lookups) == 2
assert len(search.rankings) == 2
- assert set((l.column, l.lookup_type) for l in search.lookups) == \
- {('name_vector', 'lookup_all'), ('nameaddress_vector', 'restrict')}
-
-
-def test_frequent_partials_in_name_but_not_in_address():
- searches = make_counted_searches(10000, 1, 1, 1, num_address_parts=4)
-
- assert len(searches) == 1
- search = searches[0]
-
- assert isinstance(search, dbs.PlaceSearch)
- assert len(search.lookups) == 2
- assert len(search.rankings) == 2
-
- assert set((l.column, l.lookup_type) for l in search.lookups) == \
- {('nameaddress_vector', 'lookup_all'), ('name_vector', 'restrict')}
+ assert set((l.column, l.lookup_type.__name__) for l in search.lookups) == \
+ {('name_vector', 'LookupAll'), ('nameaddress_vector', 'Restrict')}
def test_frequent_partials_in_name_and_address():
assert all(isinstance(s, dbs.PlaceSearch) for s in searches)
searches.sort(key=lambda s: s.penalty)
- assert set((l.column, l.lookup_type) for l in searches[0].lookups) == \
- {('name_vector', 'lookup_any'), ('nameaddress_vector', 'restrict')}
- assert set((l.column, l.lookup_type) for l in searches[1].lookups) == \
- {('nameaddress_vector', 'lookup_all'), ('name_vector', 'lookup_all')}
+ assert set((l.column, l.lookup_type.__name__) for l in searches[0].lookups) == \
+ {('name_vector', 'LookupAny'), ('nameaddress_vector', 'Restrict')}
+ assert set((l.column, l.lookup_type.__name__) for l in searches[1].lookups) == \
+ {('nameaddress_vector', 'LookupAll'), ('name_vector', 'LookupAll')}
def test_too_frequent_partials_in_name_and_address():
- searches = make_counted_searches(10000, 1, 10000, 1)
+ searches = make_counted_searches(20000, 1, 10000, 1)
assert len(searches) == 1
assert all(isinstance(s, dbs.PlaceSearch) for s in searches)
searches.sort(key=lambda s: s.penalty)
- assert set((l.column, l.lookup_type) for l in searches[0].lookups) == \
- {('name_vector', 'lookup_any'), ('nameaddress_vector', 'restrict')}
+ assert set((l.column, l.lookup_type.__name__) for l in searches[0].lookups) == \
+ {('name_vector', 'LookupAny'), ('nameaddress_vector', 'Restrict')}