lookup_word='foo')
-@pytest.mark.parametrize('ptype,ttype', [('NONE', 'WORD'),
- ('AMENITY', 'QUALIFIER'),
- ('STREET', 'PARTIAL'),
- ('CITY', 'WORD'),
- ('COUNTRY', 'COUNTRY'),
- ('POSTCODE', 'POSTCODE')])
+@pytest.mark.parametrize('ptype,ttype', [('NONE', 'W'),
+ ('AMENITY', 'Q'),
+ ('STREET', 'w'),
+ ('CITY', 'W'),
+ ('COUNTRY', 'C'),
+ ('POSTCODE', 'P')])
def test_phrase_compatible(ptype, ttype):
- assert query.PhraseType[ptype].compatible_with(query.TokenType[ttype], False)
+ assert query.PhraseType[ptype].compatible_with(ttype, False)
@pytest.mark.parametrize('ptype', ['COUNTRY', 'POSTCODE'])
def test_phrase_incompatible(ptype):
- assert not query.PhraseType[ptype].compatible_with(query.TokenType.PARTIAL, True)
+ assert not query.PhraseType[ptype].compatible_with(query.TOKEN_PARTIAL, True)
def test_query_node_empty():
qn = query.QueryNode(query.BREAK_PHRASE, query.PhraseType.NONE)
- assert not qn.has_tokens(3, query.TokenType.PARTIAL)
- assert qn.get_tokens(3, query.TokenType.WORD) is None
+ assert not qn.has_tokens(3, query.TOKEN_PARTIAL)
+ assert qn.get_tokens(3, query.TOKEN_WORD) is None
def test_query_node_with_content():
qn = query.QueryNode(query.BREAK_PHRASE, query.PhraseType.NONE)
- qn.starting.append(query.TokenList(2, query.TokenType.PARTIAL, [mktoken(100), mktoken(101)]))
- qn.starting.append(query.TokenList(2, query.TokenType.WORD, [mktoken(1000)]))
+ qn.starting.append(query.TokenList(2, query.TOKEN_PARTIAL, [mktoken(100), mktoken(101)]))
+ qn.starting.append(query.TokenList(2, query.TOKEN_WORD, [mktoken(1000)]))
- assert not qn.has_tokens(3, query.TokenType.PARTIAL)
- assert not qn.has_tokens(2, query.TokenType.COUNTRY)
- assert qn.has_tokens(2, query.TokenType.PARTIAL)
- assert qn.has_tokens(2, query.TokenType.WORD)
+ assert not qn.has_tokens(3, query.TOKEN_PARTIAL)
+ assert not qn.has_tokens(2, query.TOKEN_COUNTRY)
+ assert qn.has_tokens(2, query.TOKEN_PARTIAL)
+ assert qn.has_tokens(2, query.TOKEN_WORD)
- assert qn.get_tokens(3, query.TokenType.PARTIAL) is None
- assert qn.get_tokens(2, query.TokenType.COUNTRY) is None
- assert len(qn.get_tokens(2, query.TokenType.PARTIAL)) == 2
- assert len(qn.get_tokens(2, query.TokenType.WORD)) == 1
+ assert qn.get_tokens(3, query.TOKEN_PARTIAL) is None
+ assert qn.get_tokens(2, query.TOKEN_COUNTRY) is None
+ assert len(qn.get_tokens(2, query.TOKEN_PARTIAL)) == 2
+ assert len(qn.get_tokens(2, query.TOKEN_WORD)) == 1
def test_query_struct_empty():
assert q.num_token_slots() == 2
- q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1))
- q.add_token(query.TokenRange(1, 2), query.TokenType.PARTIAL, mktoken(2))
- q.add_token(query.TokenRange(1, 2), query.TokenType.WORD, mktoken(99))
- q.add_token(query.TokenRange(1, 2), query.TokenType.WORD, mktoken(98))
+ q.add_token(query.TokenRange(0, 1), query.TOKEN_PARTIAL, mktoken(1))
+ q.add_token(query.TokenRange(1, 2), query.TOKEN_PARTIAL, mktoken(2))
+ q.add_token(query.TokenRange(1, 2), query.TOKEN_WORD, mktoken(99))
+ q.add_token(query.TokenRange(1, 2), query.TOKEN_WORD, mktoken(98))
- assert q.get_tokens(query.TokenRange(0, 2), query.TokenType.WORD) == []
- assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.WORD)) == 2
+ assert q.get_tokens(query.TokenRange(0, 2), query.TOKEN_WORD) == []
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TOKEN_WORD)) == 2
partials = q.get_partials_list(query.TokenRange(0, 2))
q.add_node(query.BREAK_WORD, query.PhraseType.COUNTRY)
q.add_node(query.BREAK_END, query.PhraseType.NONE)
- q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1))
- q.add_token(query.TokenRange(1, 2), query.TokenType.COUNTRY, mktoken(100))
+ q.add_token(query.TokenRange(0, 1), query.TOKEN_PARTIAL, mktoken(1))
+ q.add_token(query.TokenRange(1, 2), query.TOKEN_COUNTRY, mktoken(100))
- assert q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL) == []
- assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.COUNTRY)) == 1
+ assert q.get_tokens(query.TokenRange(0, 1), query.TOKEN_PARTIAL) == []
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TOKEN_COUNTRY)) == 1
def test_query_struct_amenity_single_word():
q = query.QueryStruct([query.Phrase(query.PhraseType.AMENITY, 'bar')])
q.add_node(query.BREAK_END, query.PhraseType.NONE)
- q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1))
- q.add_token(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM, mktoken(2))
- q.add_token(query.TokenRange(0, 1), query.TokenType.QUALIFIER, mktoken(3))
+ q.add_token(query.TokenRange(0, 1), query.TOKEN_PARTIAL, mktoken(1))
+ q.add_token(query.TokenRange(0, 1), query.TOKEN_NEAR_ITEM, mktoken(2))
+ q.add_token(query.TokenRange(0, 1), query.TOKEN_QUALIFIER, mktoken(3))
- assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL)) == 1
- assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM)) == 1
- assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.QUALIFIER)) == 0
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TOKEN_PARTIAL)) == 1
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TOKEN_NEAR_ITEM)) == 1
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TOKEN_QUALIFIER)) == 0
def test_query_struct_amenity_two_words():
q.add_node(query.BREAK_END, query.PhraseType.NONE)
for trange in [(0, 1), (1, 2)]:
- q.add_token(query.TokenRange(*trange), query.TokenType.PARTIAL, mktoken(1))
- q.add_token(query.TokenRange(*trange), query.TokenType.NEAR_ITEM, mktoken(2))
- q.add_token(query.TokenRange(*trange), query.TokenType.QUALIFIER, mktoken(3))
+ q.add_token(query.TokenRange(*trange), query.TOKEN_PARTIAL, mktoken(1))
+ q.add_token(query.TokenRange(*trange), query.TOKEN_NEAR_ITEM, mktoken(2))
+ q.add_token(query.TokenRange(*trange), query.TOKEN_QUALIFIER, mktoken(3))
- assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL)) == 1
- assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM)) == 0
- assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.QUALIFIER)) == 1
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TOKEN_PARTIAL)) == 1
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TOKEN_NEAR_ITEM)) == 0
+ assert len(q.get_tokens(query.TokenRange(0, 1), query.TOKEN_QUALIFIER)) == 1
- assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.PARTIAL)) == 1
- assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.NEAR_ITEM)) == 0
- assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.QUALIFIER)) == 1
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TOKEN_PARTIAL)) == 1
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TOKEN_NEAR_ITEM)) == 0
+ assert len(q.get_tokens(query.TokenRange(1, 2), query.TOKEN_QUALIFIER)) == 1