]> git.openstreetmap.org Git - nominatim.git/blobdiff - nominatim/clicmd/replication.py
Merge remote-tracking branch 'upstream/master'
[nominatim.git] / nominatim / clicmd / replication.py
index f9c5561a5c53edd099bb0855e9a58562335e3514..4c8cd44e2a77d351015600b3112bc4dca8d9f8ca 100644 (file)
@@ -6,9 +6,9 @@ import logging
 import socket
 import time
 
 import socket
 import time
 
-from ..db import status
-from ..db.connection import connect
-from ..errors import UsageError
+from nominatim.db import status
+from nominatim.db.connection import connect
+from nominatim.errors import UsageError
 
 LOG = logging.getLogger()
 
 
 LOG = logging.getLogger()
 
@@ -55,8 +55,7 @@ class UpdateReplication:
             replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
             if args.update_functions:
                 LOG.warning("Create functions")
             replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
             if args.update_functions:
                 LOG.warning("Create functions")
-                refresh.create_functions(conn, args.config, args.sqllib_dir,
-                                         True, False)
+                refresh.create_functions(conn, args.config, True, False)
         return 0
 
 
         return 0
 
 
@@ -84,6 +83,7 @@ class UpdateReplication:
     def _update(args):
         from ..tools import replication
         from ..indexer.indexer import Indexer
     def _update(args):
         from ..tools import replication
         from ..indexer.indexer import Indexer
+        from ..tokenizer import factory as tokenizer_factory
 
         params = args.osm2pgsql_options(default_cache=2000, default_threads=1)
         params.update(base_url=args.config.REPLICATION_URL,
 
         params = args.osm2pgsql_options(default_cache=2000, default_threads=1)
         params.update(base_url=args.config.REPLICATION_URL,
@@ -93,7 +93,7 @@ class UpdateReplication:
                       indexed_only=not args.once)
 
         # Sanity check to not overwhelm the Geofabrik servers.
                       indexed_only=not args.once)
 
         # Sanity check to not overwhelm the Geofabrik servers.
-        if 'download.geofabrik.de'in params['base_url']\
+        if 'download.geofabrik.de' in params['base_url']\
            and params['update_interval'] < 86400:
             LOG.fatal("Update interval too low for download.geofabrik.de.\n"
                       "Please check install documentation "
            and params['update_interval'] < 86400:
             LOG.fatal("Update interval too low for download.geofabrik.de.\n"
                       "Please check install documentation "
@@ -107,6 +107,8 @@ class UpdateReplication:
                 raise UsageError("Bad argument '--no-index'.")
             recheck_interval = args.config.get_int('REPLICATION_RECHECK_INTERVAL')
 
                 raise UsageError("Bad argument '--no-index'.")
             recheck_interval = args.config.get_int('REPLICATION_RECHECK_INTERVAL')
 
+        tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+
         while True:
             with connect(args.config.get_libpq_dsn()) as conn:
                 start = dt.datetime.now(dt.timezone.utc)
         while True:
             with connect(args.config.get_libpq_dsn()) as conn:
                 start = dt.datetime.now(dt.timezone.utc)
@@ -114,10 +116,11 @@ class UpdateReplication:
                 if state is not replication.UpdateState.NO_CHANGES:
                     status.log_status(conn, start, 'import')
                 batchdate, _, _ = status.get_status(conn)
                 if state is not replication.UpdateState.NO_CHANGES:
                     status.log_status(conn, start, 'import')
                 batchdate, _, _ = status.get_status(conn)
+                conn.commit()
 
             if state is not replication.UpdateState.NO_CHANGES and args.do_index:
                 index_start = dt.datetime.now(dt.timezone.utc)
 
             if state is not replication.UpdateState.NO_CHANGES and args.do_index:
                 index_start = dt.datetime.now(dt.timezone.utc)
-                indexer = Indexer(args.config.get_libpq_dsn(),
+                indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
                                   args.threads or 1)
                 indexer.index_boundaries(0, 30)
                 indexer.index_by_rank(0, 30)
                                   args.threads or 1)
                 indexer.index_boundaries(0, 30)
                 indexer.index_by_rank(0, 30)
@@ -125,6 +128,7 @@ class UpdateReplication:
                 with connect(args.config.get_libpq_dsn()) as conn:
                     status.set_indexed(conn, True)
                     status.log_status(conn, index_start, 'index')
                 with connect(args.config.get_libpq_dsn()) as conn:
                     status.set_indexed(conn, True)
                     status.log_status(conn, index_start, 'index')
+                    conn.commit()
             else:
                 index_start = None
 
             else:
                 index_start = None