]> git.openstreetmap.org Git - nominatim.git/blobdiff - src/nominatim_db/tokenizer/icu_rule_loader.py
fix style issue found by flake8
[nominatim.git] / src / nominatim_db / tokenizer / icu_rule_loader.py
index 0aca2921b966848d87a9e30146592fbe1f104a42..978739c351c1483661ce69fc6e4a4fd8f76ef1ad 100644 (file)
@@ -61,7 +61,6 @@ class ICURuleLoader:
         # Load optional sanitizer rule set.
         self.sanitizer_rules = rules.get('sanitizers', [])
 
         # Load optional sanitizer rule set.
         self.sanitizer_rules = rules.get('sanitizers', [])
 
-
     def load_config_from_db(self, conn: Connection) -> None:
         """ Get previously saved parts of the configuration from the
             database.
     def load_config_from_db(self, conn: Connection) -> None:
         """ Get previously saved parts of the configuration from the
             database.
@@ -81,7 +80,6 @@ class ICURuleLoader:
             self.analysis_rules = []
         self._setup_analysis()
 
             self.analysis_rules = []
         self._setup_analysis()
 
-
     def save_config_to_db(self, conn: Connection) -> None:
         """ Save the part of the configuration that cannot be changed into
             the database.
     def save_config_to_db(self, conn: Connection) -> None:
         """ Save the part of the configuration that cannot be changed into
             the database.
@@ -90,20 +88,17 @@ class ICURuleLoader:
         set_property(conn, DBCFG_IMPORT_TRANS_RULES, self.transliteration_rules)
         set_property(conn, DBCFG_IMPORT_ANALYSIS_RULES, json.dumps(self.analysis_rules))
 
         set_property(conn, DBCFG_IMPORT_TRANS_RULES, self.transliteration_rules)
         set_property(conn, DBCFG_IMPORT_ANALYSIS_RULES, json.dumps(self.analysis_rules))
 
-
     def make_sanitizer(self) -> PlaceSanitizer:
         """ Create a place sanitizer from the configured rules.
         """
         return PlaceSanitizer(self.sanitizer_rules, self.config)
 
     def make_sanitizer(self) -> PlaceSanitizer:
         """ Create a place sanitizer from the configured rules.
         """
         return PlaceSanitizer(self.sanitizer_rules, self.config)
 
-
     def make_token_analysis(self) -> ICUTokenAnalysis:
         """ Create a token analyser from the reviouly loaded rules.
         """
         return ICUTokenAnalysis(self.normalization_rules,
                                 self.transliteration_rules, self.analysis)
 
     def make_token_analysis(self) -> ICUTokenAnalysis:
         """ Create a token analyser from the reviouly loaded rules.
         """
         return ICUTokenAnalysis(self.normalization_rules,
                                 self.transliteration_rules, self.analysis)
 
-
     def get_search_rules(self) -> str:
         """ Return the ICU rules to be used during search.
             The rules combine normalization and transliteration.
     def get_search_rules(self) -> str:
         """ Return the ICU rules to be used during search.
             The rules combine normalization and transliteration.
@@ -116,23 +111,20 @@ class ICURuleLoader:
         rules.write(self.transliteration_rules)
         return rules.getvalue()
 
         rules.write(self.transliteration_rules)
         return rules.getvalue()
 
-
     def get_normalization_rules(self) -> str:
         """ Return rules for normalisation of a term.
         """
         return self.normalization_rules
 
     def get_normalization_rules(self) -> str:
         """ Return rules for normalisation of a term.
         """
         return self.normalization_rules
 
-
     def get_transliteration_rules(self) -> str:
         """ Return the rules for converting a string into its asciii representation.
         """
         return self.transliteration_rules
 
     def get_transliteration_rules(self) -> str:
         """ Return the rules for converting a string into its asciii representation.
         """
         return self.transliteration_rules
 
-
     def _setup_analysis(self) -> None:
         """ Process the rules used for creating the various token analyzers.
         """
     def _setup_analysis(self) -> None:
         """ Process the rules used for creating the various token analyzers.
         """
-        self.analysis: Dict[Optional[str], TokenAnalyzerRule]  = {}
+        self.analysis: Dict[Optional[str], TokenAnalyzerRule] = {}
 
         if not isinstance(self.analysis_rules, list):
             raise UsageError("Configuration section 'token-analysis' must be a list.")
 
         if not isinstance(self.analysis_rules, list):
             raise UsageError("Configuration section 'token-analysis' must be a list.")
@@ -140,7 +132,7 @@ class ICURuleLoader:
         norm = Transliterator.createFromRules("rule_loader_normalization",
                                               self.normalization_rules)
         trans = Transliterator.createFromRules("rule_loader_transliteration",
         norm = Transliterator.createFromRules("rule_loader_normalization",
                                               self.normalization_rules)
         trans = Transliterator.createFromRules("rule_loader_transliteration",
-                                              self.transliteration_rules)
+                                               self.transliteration_rules)
 
         for section in self.analysis_rules:
             name = section.get('id', None)
 
         for section in self.analysis_rules:
             name = section.get('id', None)
@@ -154,7 +146,6 @@ class ICURuleLoader:
             self.analysis[name] = TokenAnalyzerRule(section, norm, trans,
                                                     self.config)
 
             self.analysis[name] = TokenAnalyzerRule(section, norm, trans,
                                                     self.config)
 
-
     @staticmethod
     def _cfg_to_icu_rules(rules: Mapping[str, Any], section: str) -> str:
         """ Load an ICU ruleset from the given section. If the section is a
     @staticmethod
     def _cfg_to_icu_rules(rules: Mapping[str, Any], section: str) -> str:
         """ Load an ICU ruleset from the given section. If the section is a
@@ -189,7 +180,6 @@ class TokenAnalyzerRule:
         self.config = self._analysis_mod.configure(rules, normalizer,
                                                    transliterator)
 
         self.config = self._analysis_mod.configure(rules, normalizer,
                                                    transliterator)
 
-
     def create(self, normalizer: Any, transliterator: Any) -> Analyzer:
         """ Create a new analyser instance for the given rule.
         """
     def create(self, normalizer: Any, transliterator: Any) -> Analyzer:
         """ Create a new analyser instance for the given rule.
         """