2 Implementation of the 'import' subcommand.
5 from pathlib import Path
9 from nominatim.db.connection import connect
10 from nominatim.db import status, properties
11 from nominatim.version import NOMINATIM_VERSION
12 from nominatim.errors import UsageError
14 # Do not repeat documentation of subcommand classes.
15 # pylint: disable=C0111
16 # Using non-top-level imports to avoid eventually unused imports.
17 # pylint: disable=E0012,C0415
19 LOG = logging.getLogger()
23 Create a new Nominatim database from an OSM file.
28 group_name = parser.add_argument_group('Required arguments')
29 group = group_name.add_mutually_exclusive_group(required=True)
30 group.add_argument('--osm-file', metavar='FILE',
31 help='OSM file to be imported.')
32 group.add_argument('--continue', dest='continue_at',
33 choices=['load-data', 'indexing', 'db-postprocess'],
34 help='Continue an import that was interrupted')
35 group = parser.add_argument_group('Optional arguments')
36 group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
37 help='Size of cache to be used by osm2pgsql (in MB)')
38 group.add_argument('--reverse-only', action='store_true',
39 help='Do not create tables and indexes for searching')
40 group.add_argument('--no-partitions', action='store_true',
41 help=("Do not partition search indices "
42 "(speeds up import of single country extracts)"))
43 group.add_argument('--no-updates', action='store_true',
44 help="Do not keep tables that are only needed for "
45 "updating the database later")
46 group = parser.add_argument_group('Expert options')
47 group.add_argument('--ignore-errors', action='store_true',
48 help='Continue import even when errors in SQL are present')
49 group.add_argument('--index-noanalyse', action='store_true',
50 help='Do not perform analyse operations during index')
54 def run(args): # pylint: disable=too-many-statements
55 from ..tools import database_import
56 from ..tools import refresh
57 from ..indexer.indexer import Indexer
58 from ..tools import postcodes
59 from ..tokenizer import factory as tokenizer_factory
61 if args.osm_file and not Path(args.osm_file).is_file():
62 LOG.fatal("OSM file '%s' does not exist.", args.osm_file)
63 raise UsageError('Cannot access file.')
65 if args.continue_at is None:
66 database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
69 rouser=args.config.DATABASE_WEBUSER)
71 LOG.warning('Importing OSM data file')
72 database_import.import_osm_data(Path(args.osm_file),
73 args.osm2pgsql_options(0, 1),
75 ignore_errors=args.ignore_errors)
77 with connect(args.config.get_libpq_dsn()) as conn:
78 LOG.warning('Create functions (1st pass)')
79 refresh.create_functions(conn, args.config, False, False)
80 LOG.warning('Create tables')
81 database_import.create_tables(conn, args.config,
82 reverse_only=args.reverse_only)
83 refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
84 LOG.warning('Create functions (2nd pass)')
85 refresh.create_functions(conn, args.config, False, False)
86 LOG.warning('Create table triggers')
87 database_import.create_table_triggers(conn, args.config)
88 LOG.warning('Create partition tables')
89 database_import.create_partition_tables(conn, args.config)
90 LOG.warning('Create functions (3rd pass)')
91 refresh.create_functions(conn, args.config, False, False)
93 LOG.warning('Importing wikipedia importance data')
94 data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
95 if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
97 LOG.error('Wikipedia importance dump file not found. '
98 'Will be using default importances.')
100 if args.continue_at is None or args.continue_at == 'load-data':
101 LOG.warning('Initialise tables')
102 with connect(args.config.get_libpq_dsn()) as conn:
103 database_import.truncate_data_tables(conn, args.config.MAX_WORD_FREQUENCY)
105 LOG.warning('Load data into placex table')
106 database_import.load_data(args.config.get_libpq_dsn(),
108 args.threads or psutil.cpu_count() or 1)
110 LOG.warning("Setting up tokenizer")
111 tokenizer = tokenizer_factory.create_tokenizer(args.config)
113 if args.continue_at is None or args.continue_at == 'load-data':
114 LOG.warning('Calculate postcodes')
115 postcodes.import_postcodes(args.config.get_libpq_dsn(), args.project_dir)
117 if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
118 if args.continue_at is not None and args.continue_at != 'load-data':
119 with connect(args.config.get_libpq_dsn()) as conn:
120 SetupAll._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
121 LOG.warning('Indexing places')
122 indexer = Indexer(args.config.get_libpq_dsn(),
123 args.threads or psutil.cpu_count() or 1)
124 indexer.index_full(analyse=not args.index_noanalyse)
126 LOG.warning('Post-process tables')
127 with connect(args.config.get_libpq_dsn()) as conn:
128 database_import.create_search_indices(conn, args.config,
129 drop=args.no_updates)
130 LOG.warning('Create search index for default country names.')
131 database_import.create_country_names(conn, args.config)
133 webdir = args.project_dir / 'website'
134 LOG.warning('Setup website at %s', webdir)
135 refresh.setup_website(webdir, args.config)
137 with connect(args.config.get_libpq_dsn()) as conn:
139 dbdate = status.compute_database_date(conn)
140 status.set_status(conn, dbdate)
141 LOG.info('Database is at %s.', dbdate)
142 except Exception as exc: # pylint: disable=broad-except
143 LOG.error('Cannot determine date of database: %s', exc)
145 properties.set_property(conn, 'database_version',
146 '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
152 def _create_pending_index(conn, tablespace):
153 """ Add a supporting index for finding places still to be indexed.
155 This index is normally created at the end of the import process
156 for later updates. When indexing was partially done, then this
157 index can greatly improve speed going through already indexed data.
159 if conn.index_exists('idx_placex_pendingsector'):
162 with conn.cursor() as cur:
163 LOG.warning('Creating support index')
165 tablespace = 'TABLESPACE ' + tablespace
166 cur.execute("""CREATE INDEX idx_placex_pendingsector
167 ON placex USING BTREE (rank_address,geometry_sector)
168 {} WHERE indexed_status > 0
169 """.format(tablespace))