From b2722650d48ab719ca5f6e8606629c22b02941d9 Mon Sep 17 00:00:00 2001 From: Sarah Hoffmann Date: Tue, 18 May 2021 16:28:21 +0200 Subject: [PATCH] do not hide errors when importing tokenizer Explicitly check for the tokenizer source file to check that the name is correct. We can't use the import error for that because it hides other import errors like a missing library. Fixes #2327. --- nominatim/tokenizer/factory.py | 10 ++++++---- test/python/conftest.py | 3 ++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/nominatim/tokenizer/factory.py b/nominatim/tokenizer/factory.py index e0c06293..069672d4 100644 --- a/nominatim/tokenizer/factory.py +++ b/nominatim/tokenizer/factory.py @@ -15,6 +15,7 @@ normalizer module is installed, when the tokenizer is created. """ import logging import importlib +from pathlib import Path from ..errors import UsageError from ..db import properties @@ -25,12 +26,13 @@ LOG = logging.getLogger() def _import_tokenizer(name): """ Load the tokenizer.py module from project directory. """ - try: - return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') - except ModuleNotFoundError as exp: + src_file = Path(__file__).parent / (name + '_tokenizer.py') + if not src_file.is_file(): LOG.fatal("No tokenizer named '%s' available. " "Check the setting of NOMINATIM_TOKENIZER.", name) - raise UsageError('Tokenizer not found') from exp + raise UsageError('Tokenizer not found') + + return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') def create_tokenizer(config, init_db=True, module_name=None): diff --git a/test/python/conftest.py b/test/python/conftest.py index 923e6876..a3249474 100644 --- a/test/python/conftest.py +++ b/test/python/conftest.py @@ -17,6 +17,7 @@ from nominatim.config import Configuration from nominatim.db import connection from nominatim.db.sql_preprocessor import SQLPreprocessor from nominatim.db import properties +import nominatim.tokenizer.factory import dummy_tokenizer import mocks @@ -273,7 +274,7 @@ def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path): def _import_dummy(module, *args, **kwargs): return dummy_tokenizer - monkeypatch.setattr(importlib, "import_module", _import_dummy) + monkeypatch.setattr(nominatim.tokenizer.factory, "_import_tokenizer", _import_dummy) properties.set_property(temp_db_conn, 'tokenizer', 'dummy') def _create_tokenizer(): -- 2.39.5