]> git.openstreetmap.org Git - nominatim.git/blobdiff - test/python/tokenizer/token_analysis/test_generic.py
Merge pull request #2641 from lonvia/reinit-tokenizer-dir
[nominatim.git] / test / python / tokenizer / token_analysis / test_generic.py
index 9b008cc5ee47042a90fe2b3ba54cb13915d4e8b6..afbd5e9bf813590ff6537f4893fd8325b48f1d09 100644 (file)
@@ -32,8 +32,9 @@ def make_analyser(*variants, variant_only=False):
         rules['mode'] = 'variant-only'
     config = module.configure(rules, DEFAULT_NORMALIZATION)
     trans = Transliterator.createFromRules("test_trans", DEFAULT_TRANSLITERATION)
+    norm = Transliterator.createFromRules("test_norm", DEFAULT_NORMALIZATION)
 
-    return module.create(trans, config)
+    return module.create(norm, trans, config)
 
 
 def get_normalized_variants(proc, name):
@@ -45,8 +46,9 @@ def test_no_variants():
     rules = { 'analyzer': 'generic' }
     config = module.configure(rules, DEFAULT_NORMALIZATION)
     trans = Transliterator.createFromRules("test_trans", DEFAULT_TRANSLITERATION)
+    norm = Transliterator.createFromRules("test_norm", DEFAULT_NORMALIZATION)
 
-    proc = module.create(trans, config)
+    proc = module.create(norm, trans, config)
 
     assert get_normalized_variants(proc, '大德!') == ['dà dé']