"""
def add_args(self, parser: argparse.ArgumentParser) -> None:
- group_name = parser.add_argument_group('Required arguments')
- group1 = group_name.add_mutually_exclusive_group(required=True)
+ group1 = parser.add_argument_group('Required arguments')
group1.add_argument('--osm-file', metavar='FILE', action='append',
help='OSM file to be imported'
- ' (repeat for importing multiple files)')
+ ' (repeat for importing multiple files)',
+ default=None)
group1.add_argument('--continue', dest='continue_at',
- choices=['load-data', 'indexing', 'db-postprocess'],
- help='Continue an import that was interrupted')
+ choices=['import-from-file', 'load-data', 'indexing', 'db-postprocess'],
+ help='Continue an import that was interrupted',
+ default=None)
group2 = parser.add_argument_group('Optional arguments')
group2.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
help='Size of cache to be used by osm2pgsql (in MB)')
help='Continue import even when errors in SQL are present')
group3.add_argument('--index-noanalyse', action='store_true',
help='Do not perform analyse operations during index (expert only)')
- group3.add_argument('--no-superuser', action='store_true',
- help='Do not attempt to create the database')
+ group3.add_argument('--prepare-database', action='store_true',
+ help='Create the database but do not import any data')
- def run(self, args: NominatimArgs) -> int: # pylint: disable=too-many-statements
+ def run(self, args: NominatimArgs) -> int: # pylint: disable=too-many-statements, too-many-branches
from ..data import country_info
from ..tools import database_import, refresh, postcodes, freeze
from ..indexer.indexer import Indexer
country_info.setup_country_config(args.config)
- if args.continue_at is None:
+ if args.osm_file is None and args.continue_at is None and not args.prepare_database:
+ raise UsageError("No input files (use --osm-file).")
+
+ if args.osm_file is not None and args.continue_at not in ('import-from-file', None):
+ raise UsageError(f"Cannot use --continue {args.continue_at} and --osm-file together.")
+
+ if args.continue_at is not None and args.prepare_database:
+ raise UsageError(
+ "Cannot use --continue and --prepare-database together."
+ )
+
+
+ if args.prepare_database or args.continue_at is None:
+ LOG.warning('Creating database')
+ database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
+ rouser=args.config.DATABASE_WEBUSER)
+ if args.prepare_database:
+ return 0
+
+ if args.continue_at in (None, 'import-from-file'):
files = args.get_osm_file_list()
if not files:
raise UsageError("No input files (use --osm-file).")
- if not args.no_superuser:
- LOG.warning('Creating database')
- database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
- rouser=args.config.DATABASE_WEBUSER)
-
- LOG.warning('Setting up country tables')
- country_info.setup_country_tables(args.config.get_libpq_dsn(),
- args.config.lib_dir.data,
- args.no_partitions)
-
- LOG.warning('Importing OSM data file')
- database_import.import_osm_data(files,
- args.osm2pgsql_options(0, 1),
- drop=args.no_updates,
- ignore_errors=args.ignore_errors)
-
- LOG.warning('Importing wikipedia importance data')
- data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
- if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
- data_path) > 0:
- LOG.error('Wikipedia importance dump file not found. '
- 'Calculating importance values of locations will not '
- 'use Wikipedia importance data.')
-
- LOG.warning('Importing secondary importance raster data')
- if refresh.import_secondary_importance(args.config.get_libpq_dsn(),
- args.project_dir) != 0:
- LOG.error('Secondary importance file not imported. '
- 'Falling back to default ranking.')
-
- self._setup_tables(args.config, args.reverse_only)
-
- if args.continue_at is None or args.continue_at == 'load-data':
+ if args.continue_at in ('import-from-file', None):
+ # Check if the correct plugins are installed
+ database_import.check_existing_database_plugins(args.config.get_libpq_dsn())
+ LOG.warning('Setting up country tables')
+ country_info.setup_country_tables(args.config.get_libpq_dsn(),
+ args.config.lib_dir.data,
+ args.no_partitions)
+
+ LOG.warning('Importing OSM data file')
+ database_import.import_osm_data(files,
+ args.osm2pgsql_options(0, 1),
+ drop=args.no_updates,
+ ignore_errors=args.ignore_errors)
+
+ LOG.warning('Importing wikipedia importance data')
+ data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
+ if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
+ data_path) > 0:
+ LOG.error('Wikipedia importance dump file not found. '
+ 'Calculating importance values of locations will not '
+ 'use Wikipedia importance data.')
+
+ LOG.warning('Importing secondary importance raster data')
+ if refresh.import_secondary_importance(args.config.get_libpq_dsn(),
+ args.project_dir) != 0:
+ LOG.error('Secondary importance file not imported. '
+ 'Falling back to default ranking.')
+
+ self._setup_tables(args.config, args.reverse_only)
+
+ if args.continue_at in ('import-from-file', 'load-data', None):
LOG.warning('Initialise tables')
with connect(args.config.get_libpq_dsn()) as conn:
database_import.truncate_data_tables(conn)
LOG.warning("Setting up tokenizer")
tokenizer = self._get_tokenizer(args.continue_at, args.config)
- if args.continue_at is None or args.continue_at == 'load-data':
+ if args.continue_at in ('import-from-file', 'load-data', None):
LOG.warning('Calculate postcodes')
postcodes.update_postcodes(args.config.get_libpq_dsn(),
args.project_dir, tokenizer)
- if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
+ if args.continue_at in \
+ ('import-from-file', 'load-data', 'indexing', None):
LOG.warning('Indexing places')
indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, num_threads)
indexer.index_full(analyse=not args.index_noanalyse)
tokenizer.finalize_import(args.config)
LOG.warning('Recompute word counts')
- tokenizer.update_statistics()
+ tokenizer.update_statistics(args.config)
webdir = args.project_dir / 'website'
LOG.warning('Setup website at %s', webdir)
"""
from ..tokenizer import factory as tokenizer_factory
- if continue_at is None or continue_at == 'load-data':
+ if continue_at in ('import-from-file', 'load-data', None):
# (re)initialise the tokenizer data
return tokenizer_factory.create_tokenizer(config)