]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/setup.py
Merge pull request #2281 from changpingc/changping/fix-tiger-index
[nominatim.git] / nominatim / clicmd / setup.py
1 """
2 Implementation of the 'import' subcommand.
3 """
4 import logging
5 from pathlib import Path
6
7 import psutil
8
9 from nominatim.db.connection import connect
10 from nominatim.db import status, properties
11 from nominatim.version import NOMINATIM_VERSION
12 from nominatim.errors import UsageError
13
14 # Do not repeat documentation of subcommand classes.
15 # pylint: disable=C0111
16 # Using non-top-level imports to avoid eventually unused imports.
17 # pylint: disable=E0012,C0415
18
19 LOG = logging.getLogger()
20
21 class SetupAll:
22     """\
23     Create a new Nominatim database from an OSM file.
24     """
25
26     @staticmethod
27     def add_args(parser):
28         group_name = parser.add_argument_group('Required arguments')
29         group = group_name.add_mutually_exclusive_group(required=True)
30         group.add_argument('--osm-file', metavar='FILE',
31                            help='OSM file to be imported.')
32         group.add_argument('--continue', dest='continue_at',
33                            choices=['load-data', 'indexing', 'db-postprocess'],
34                            help='Continue an import that was interrupted')
35         group = parser.add_argument_group('Optional arguments')
36         group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
37                            help='Size of cache to be used by osm2pgsql (in MB)')
38         group.add_argument('--reverse-only', action='store_true',
39                            help='Do not create tables and indexes for searching')
40         group.add_argument('--no-partitions', action='store_true',
41                            help=("Do not partition search indices "
42                                  "(speeds up import of single country extracts)"))
43         group.add_argument('--no-updates', action='store_true',
44                            help="Do not keep tables that are only needed for "
45                                 "updating the database later")
46         group = parser.add_argument_group('Expert options')
47         group.add_argument('--ignore-errors', action='store_true',
48                            help='Continue import even when errors in SQL are present')
49         group.add_argument('--index-noanalyse', action='store_true',
50                            help='Do not perform analyse operations during index')
51
52
53     @staticmethod
54     def run(args): # pylint: disable=too-many-statements
55         from ..tools import database_import
56         from ..tools import refresh
57         from ..indexer.indexer import Indexer
58         from ..tools import postcodes
59
60         if args.osm_file and not Path(args.osm_file).is_file():
61             LOG.fatal("OSM file '%s' does not exist.", args.osm_file)
62             raise UsageError('Cannot access file.')
63
64         if args.continue_at is None:
65             database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
66                                                     args.data_dir,
67                                                     args.no_partitions,
68                                                     rouser=args.config.DATABASE_WEBUSER)
69
70             LOG.warning('Installing database module')
71             with connect(args.config.get_libpq_dsn()) as conn:
72                 database_import.install_module(args.module_dir, args.project_dir,
73                                                args.config.DATABASE_MODULE_PATH,
74                                                conn=conn)
75
76             LOG.warning('Importing OSM data file')
77             database_import.import_osm_data(Path(args.osm_file),
78                                             args.osm2pgsql_options(0, 1),
79                                             drop=args.no_updates,
80                                             ignore_errors=args.ignore_errors)
81
82             with connect(args.config.get_libpq_dsn()) as conn:
83                 LOG.warning('Create functions (1st pass)')
84                 refresh.create_functions(conn, args.config, args.sqllib_dir,
85                                          False, False)
86                 LOG.warning('Create tables')
87                 database_import.create_tables(conn, args.config, args.sqllib_dir,
88                                               reverse_only=args.reverse_only)
89                 refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
90                 LOG.warning('Create functions (2nd pass)')
91                 refresh.create_functions(conn, args.config, args.sqllib_dir,
92                                          False, False)
93                 LOG.warning('Create table triggers')
94                 database_import.create_table_triggers(conn, args.config, args.sqllib_dir)
95                 LOG.warning('Create partition tables')
96                 database_import.create_partition_tables(conn, args.config, args.sqllib_dir)
97                 LOG.warning('Create functions (3rd pass)')
98                 refresh.create_functions(conn, args.config, args.sqllib_dir,
99                                          False, False)
100
101             LOG.warning('Importing wikipedia importance data')
102             data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
103             if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
104                                                  data_path) > 0:
105                 LOG.error('Wikipedia importance dump file not found. '
106                           'Will be using default importances.')
107
108         if args.continue_at is None or args.continue_at == 'load-data':
109             LOG.warning('Initialise tables')
110             with connect(args.config.get_libpq_dsn()) as conn:
111                 database_import.truncate_data_tables(conn, args.config.MAX_WORD_FREQUENCY)
112
113             LOG.warning('Load data into placex table')
114             database_import.load_data(args.config.get_libpq_dsn(),
115                                       args.data_dir,
116                                       args.threads or psutil.cpu_count() or 1)
117
118             LOG.warning('Calculate postcodes')
119             postcodes.import_postcodes(args.config.get_libpq_dsn(), args.project_dir)
120
121         if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
122             if args.continue_at is not None and args.continue_at != 'load-data':
123                 with connect(args.config.get_libpq_dsn()) as conn:
124                     SetupAll._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
125             LOG.warning('Indexing places')
126             indexer = Indexer(args.config.get_libpq_dsn(),
127                               args.threads or psutil.cpu_count() or 1)
128             indexer.index_full(analyse=not args.index_noanalyse)
129
130         LOG.warning('Post-process tables')
131         with connect(args.config.get_libpq_dsn()) as conn:
132             database_import.create_search_indices(conn, args.config,
133                                                   args.sqllib_dir,
134                                                   drop=args.no_updates)
135             LOG.warning('Create search index for default country names.')
136             database_import.create_country_names(conn, args.config)
137
138         webdir = args.project_dir / 'website'
139         LOG.warning('Setup website at %s', webdir)
140         refresh.setup_website(webdir, args.phplib_dir, args.config)
141
142         with connect(args.config.get_libpq_dsn()) as conn:
143             try:
144                 dbdate = status.compute_database_date(conn)
145                 status.set_status(conn, dbdate)
146                 LOG.info('Database is at %s.', dbdate)
147             except Exception as exc: # pylint: disable=broad-except
148                 LOG.error('Cannot determine date of database: %s', exc)
149
150             properties.set_property(conn, 'database_version',
151                                     '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
152
153         return 0
154
155
156     @staticmethod
157     def _create_pending_index(conn, tablespace):
158         """ Add a supporting index for finding places still to be indexed.
159
160             This index is normally created at the end of the import process
161             for later updates. When indexing was partially done, then this
162             index can greatly improve speed going through already indexed data.
163         """
164         if conn.index_exists('idx_placex_pendingsector'):
165             return
166
167         with conn.cursor() as cur:
168             LOG.warning('Creating support index')
169             if tablespace:
170                 tablespace = 'TABLESPACE ' + tablespace
171             cur.execute("""CREATE INDEX idx_placex_pendingsector
172                            ON placex USING BTREE (rank_address,geometry_sector)
173                            {} WHERE indexed_status > 0
174                         """.format(tablespace))
175         conn.commit()