+DBCFG_IMPORT_NORM_RULES = "tokenizer_import_normalisation"
+DBCFG_IMPORT_TRANS_RULES = "tokenizer_import_transliteration"
+DBCFG_IMPORT_ANALYSIS_RULES = "tokenizer_import_analysis_rules"
+
+
+def _get_section(rules: Mapping[str, Any], section: str) -> Any:
+ """ Get the section named 'section' from the rules. If the section does
+ not exist, raise a usage error with a meaningful message.
+ """
+ if section not in rules:
+ LOG.fatal("Section '%s' not found in tokenizer config.", section)
+ raise UsageError("Syntax error in tokenizer configuration file.")
+
+ return rules[section]
+