5 def create(dsn, data_dir):
6 """ Create a new instance of the tokenizer provided by this module.
8 return DummyTokenizer(dsn, data_dir)
12 def __init__(self, dsn, data_dir):
14 self.data_dir = data_dir
15 self.init_state = None
16 self.analyser_cache = {}
19 def init_new_db(self, *args, **kwargs):
20 assert self.init_state == None
21 self.init_state = "new"
24 def init_from_project(self):
25 assert self.init_state == None
26 self.init_state = "loaded"
29 def finalize_import(self, _):
33 def name_analyzer(self):
34 return DummyNameAnalyzer(self.analyser_cache)
37 class DummyNameAnalyzer:
42 def __exit__(self, exc_type, exc_value, traceback):
46 def __init__(self, cache):
47 self.analyser_cache = cache
48 cache['countries'] = []
54 def normalize_postcode(self, postcode):
57 def update_postcodes_from_db(self):
60 def update_special_phrases(self, phrases):
61 self.analyser_cache['special_phrases'] = phrases
63 def add_country_names(self, code, names):
64 self.analyser_cache['countries'].append((code, names))
66 def process_place(self, place):