]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/tokenizer/legacy_tokenizer.py
update Search.md, fix typo (#3454)
[nominatim.git] / nominatim / tokenizer / legacy_tokenizer.py
index 2d28a8b29891623e72e7f8c2f0f7b7c9cafbd160..93808cc39f3407458bb2d570d2a8740128f2c168 100644 (file)
@@ -210,7 +210,7 @@ class LegacyTokenizer(AbstractTokenizer):
             self._save_config(conn, config)
 
 
-    def update_statistics(self, _: Configuration) -> None:
+    def update_statistics(self, config: Configuration, threads: int = 1) -> None:
         """ Recompute the frequency of full words.
         """
         with connect(self.dsn) as conn:
@@ -269,15 +269,16 @@ class LegacyTokenizer(AbstractTokenizer):
     def _install_php(self, config: Configuration, overwrite: bool = True) -> None:
         """ Install the php script for the tokenizer.
         """
-        php_file = self.data_dir / "tokenizer.php"
-
-        if not php_file.exists() or overwrite:
-            php_file.write_text(dedent(f"""\
-                <?php
-                @define('CONST_Max_Word_Frequency', {config.MAX_WORD_FREQUENCY});
-                @define('CONST_Term_Normalization_Rules', "{config.TERM_NORMALIZATION}");
-                require_once('{config.lib_dir.php}/tokenizer/legacy_tokenizer.php');
-                """), encoding='utf-8')
+        if config.lib_dir.php is not None:
+            php_file = self.data_dir / "tokenizer.php"
+
+            if not php_file.exists() or overwrite:
+                php_file.write_text(dedent(f"""\
+                    <?php
+                    @define('CONST_Max_Word_Frequency', {config.MAX_WORD_FREQUENCY});
+                    @define('CONST_Term_Normalization_Rules', "{config.TERM_NORMALIZATION}");
+                    require_once('{config.lib_dir.php}/tokenizer/legacy_tokenizer.php');
+                    """), encoding='utf-8')
 
 
     def _init_db_tables(self, config: Configuration) -> None: