from typing import cast
import argparse
import logging
+import asyncio
import psutil
from .args import NominatimArgs
+from ..db.connection import connect
+from ..tools.freeze import is_frozen
-# Do not repeat documentation of subcommand classes.
-# pylint: disable=C0111
-# Using non-top-level imports to avoid eventually unused imports.
-# pylint: disable=E0012,C0415
LOG = logging.getLogger()
+
class UpdateAddData:
"""\
Add additional data from a file or an online source.
The command can also be used to add external non-OSM data to the
database. At the moment the only supported format is TIGER housenumber
data. See the online documentation at
- https://nominatim.org/release-docs/latest/admin/Import/#installing-tiger-housenumber-data-for-the-us
+ https://nominatim.org/release-docs/latest/customize/Tiger/
for more information.
"""
group2.add_argument('--socket-timeout', dest='socket_timeout', type=int, default=60,
help='Set timeout for file downloads')
-
def run(self, args: NominatimArgs) -> int:
- from ..tokenizer import factory as tokenizer_factory
- from ..tools import tiger_data, add_osm_data
+ from ..tools import add_osm_data
+
+ with connect(args.config.get_libpq_dsn()) as conn:
+ if is_frozen(conn):
+ print('Database is marked frozen. New data can\'t be added.')
+ return 1
if args.tiger_data:
- tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
- return tiger_data.add_tiger_data(args.tiger_data,
- args.config,
- args.threads or psutil.cpu_count() or 1,
- tokenizer)
+ return asyncio.run(self._add_tiger_data(args))
osm2pgsql_params = args.osm2pgsql_options(default_cache=1000, default_threads=1)
if args.file or args.diff:
osm2pgsql_params)
return 0
+
+ async def _add_tiger_data(self, args: NominatimArgs) -> int:
+ from ..tokenizer import factory as tokenizer_factory
+ from ..tools import tiger_data
+
+ assert args.tiger_data
+
+ tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
+ return await tiger_data.add_tiger_data(args.tiger_data,
+ args.config,
+ args.threads or psutil.cpu_count() or 1,
+ tokenizer)