5 def create(dsn, data_dir):
6 """ Create a new instance of the tokenizer provided by this module.
8 return DummyTokenizer(dsn, data_dir)
12 def __init__(self, dsn, data_dir):
14 self.data_dir = data_dir
15 self.init_state = None
16 self.analyser_cache = {}
19 def init_new_db(self, *args, **kwargs):
20 assert self.init_state == None
21 self.init_state = "new"
24 def init_from_project(self):
25 assert self.init_state == None
26 self.init_state = "loaded"
29 def name_analyzer(self):
30 return DummyNameAnalyzer(self.analyser_cache)
33 class DummyNameAnalyzer:
38 def __exit__(self, exc_type, exc_value, traceback):
42 def __init__(self, cache):
43 self.analyser_cache = cache
44 cache['countries'] = []
50 def add_postcodes_from_db(self):
53 def update_special_phrases(self, phrases):
54 self.analyser_cache['special_phrases'] = phrases
56 def add_country_names(self, code, names):
57 self.analyser_cache['countries'].append((code, names))
59 def process_place(self, place):