]> git.openstreetmap.org Git - nominatim.git/blob - nominatim/clicmd/setup.py
move country name generation to country_info module
[nominatim.git] / nominatim / clicmd / setup.py
1 """
2 Implementation of the 'import' subcommand.
3 """
4 import logging
5 from pathlib import Path
6
7 import psutil
8
9 from nominatim.db.connection import connect
10 from nominatim.db import status, properties
11 from nominatim.version import NOMINATIM_VERSION
12
13 # Do not repeat documentation of subcommand classes.
14 # pylint: disable=C0111
15 # Using non-top-level imports to avoid eventually unused imports.
16 # pylint: disable=E0012,C0415
17
18 LOG = logging.getLogger()
19
20 class SetupAll:
21     """\
22     Create a new Nominatim database from an OSM file.
23     """
24
25     @staticmethod
26     def add_args(parser):
27         group_name = parser.add_argument_group('Required arguments')
28         group = group_name.add_mutually_exclusive_group(required=True)
29         group.add_argument('--osm-file', metavar='FILE', action='append',
30                            help='OSM file to be imported'
31                                 ' (repeat for importing multiple files.')
32         group.add_argument('--continue', dest='continue_at',
33                            choices=['load-data', 'indexing', 'db-postprocess'],
34                            help='Continue an import that was interrupted')
35         group = parser.add_argument_group('Optional arguments')
36         group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
37                            help='Size of cache to be used by osm2pgsql (in MB)')
38         group.add_argument('--reverse-only', action='store_true',
39                            help='Do not create tables and indexes for searching')
40         group.add_argument('--no-partitions', action='store_true',
41                            help=("Do not partition search indices "
42                                  "(speeds up import of single country extracts)"))
43         group.add_argument('--no-updates', action='store_true',
44                            help="Do not keep tables that are only needed for "
45                                 "updating the database later")
46         group = parser.add_argument_group('Expert options')
47         group.add_argument('--ignore-errors', action='store_true',
48                            help='Continue import even when errors in SQL are present')
49         group.add_argument('--index-noanalyse', action='store_true',
50                            help='Do not perform analyse operations during index')
51
52
53     @staticmethod
54     def run(args):
55         from ..tools import database_import, refresh, postcodes, freeze, country_info
56         from ..indexer.indexer import Indexer
57
58         if args.continue_at is None:
59             files = args.get_osm_file_list()
60
61             LOG.warning('Creating database')
62             database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
63                                                     rouser=args.config.DATABASE_WEBUSER)
64
65             LOG.warning('Setting up country tables')
66             country_info.setup_country_tables(args.config.get_libpq_dsn(),
67                                               args.data_dir,
68                                               args.no_partitions)
69
70             LOG.warning('Importing OSM data file')
71             database_import.import_osm_data(files,
72                                             args.osm2pgsql_options(0, 1),
73                                             drop=args.no_updates,
74                                             ignore_errors=args.ignore_errors)
75
76             SetupAll._setup_tables(args.config, args.reverse_only)
77
78             LOG.warning('Importing wikipedia importance data')
79             data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
80             if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
81                                                  data_path) > 0:
82                 LOG.error('Wikipedia importance dump file not found. '
83                           'Will be using default importances.')
84
85         if args.continue_at is None or args.continue_at == 'load-data':
86             LOG.warning('Initialise tables')
87             with connect(args.config.get_libpq_dsn()) as conn:
88                 database_import.truncate_data_tables(conn)
89
90             LOG.warning('Load data into placex table')
91             database_import.load_data(args.config.get_libpq_dsn(),
92                                       args.threads or psutil.cpu_count() or 1)
93
94         LOG.warning("Setting up tokenizer")
95         tokenizer = SetupAll._get_tokenizer(args.continue_at, args.config)
96
97         if args.continue_at is None or args.continue_at == 'load-data':
98             LOG.warning('Calculate postcodes')
99             postcodes.update_postcodes(args.config.get_libpq_dsn(),
100                                        args.project_dir, tokenizer)
101
102         if args.continue_at is None or args.continue_at in ('load-data', 'indexing'):
103             if args.continue_at is not None and args.continue_at != 'load-data':
104                 with connect(args.config.get_libpq_dsn()) as conn:
105                     SetupAll._create_pending_index(conn, args.config.TABLESPACE_ADDRESS_INDEX)
106             LOG.warning('Indexing places')
107             indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
108                               args.threads or psutil.cpu_count() or 1)
109             indexer.index_full(analyse=not args.index_noanalyse)
110
111         LOG.warning('Post-process tables')
112         with connect(args.config.get_libpq_dsn()) as conn:
113             database_import.create_search_indices(conn, args.config,
114                                                   drop=args.no_updates)
115             LOG.warning('Create search index for default country names.')
116             country_info.create_country_names(conn, tokenizer,
117                                               args.config.LANGUAGES)
118             conn.commit()
119             if args.no_updates:
120                 freeze.drop_update_tables(conn)
121         tokenizer.finalize_import(args.config)
122
123
124         webdir = args.project_dir / 'website'
125         LOG.warning('Setup website at %s', webdir)
126         with connect(args.config.get_libpq_dsn()) as conn:
127             refresh.setup_website(webdir, args.config, conn)
128
129         with connect(args.config.get_libpq_dsn()) as conn:
130             SetupAll._set_database_date(conn)
131             properties.set_property(conn, 'database_version',
132                                     '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
133
134         return 0
135
136
137     @staticmethod
138     def _setup_tables(config, reverse_only):
139         """ Set up the basic database layout: tables, indexes and functions.
140         """
141         from ..tools import database_import, refresh
142
143         with connect(config.get_libpq_dsn()) as conn:
144             LOG.warning('Create functions (1st pass)')
145             refresh.create_functions(conn, config, False, False)
146             LOG.warning('Create tables')
147             database_import.create_tables(conn, config, reverse_only=reverse_only)
148             refresh.load_address_levels_from_file(conn, Path(config.ADDRESS_LEVEL_CONFIG))
149             LOG.warning('Create functions (2nd pass)')
150             refresh.create_functions(conn, config, False, False)
151             LOG.warning('Create table triggers')
152             database_import.create_table_triggers(conn, config)
153             LOG.warning('Create partition tables')
154             database_import.create_partition_tables(conn, config)
155             LOG.warning('Create functions (3rd pass)')
156             refresh.create_functions(conn, config, False, False)
157
158
159     @staticmethod
160     def _get_tokenizer(continue_at, config):
161         """ Set up a new tokenizer or load an already initialised one.
162         """
163         from ..tokenizer import factory as tokenizer_factory
164
165         if continue_at is None or continue_at == 'load-data':
166             # (re)initialise the tokenizer data
167             return tokenizer_factory.create_tokenizer(config)
168
169         # just load the tokenizer
170         return tokenizer_factory.get_tokenizer_for_db(config)
171
172     @staticmethod
173     def _create_pending_index(conn, tablespace):
174         """ Add a supporting index for finding places still to be indexed.
175
176             This index is normally created at the end of the import process
177             for later updates. When indexing was partially done, then this
178             index can greatly improve speed going through already indexed data.
179         """
180         if conn.index_exists('idx_placex_pendingsector'):
181             return
182
183         with conn.cursor() as cur:
184             LOG.warning('Creating support index')
185             if tablespace:
186                 tablespace = 'TABLESPACE ' + tablespace
187             cur.execute("""CREATE INDEX idx_placex_pendingsector
188                            ON placex USING BTREE (rank_address,geometry_sector)
189                            {} WHERE indexed_status > 0
190                         """.format(tablespace))
191         conn.commit()
192
193
194     @staticmethod
195     def _set_database_date(conn):
196         """ Determine the database date and set the status accordingly.
197         """
198         try:
199             dbdate = status.compute_database_date(conn)
200             status.set_status(conn, dbdate)
201             LOG.info('Database is at %s.', dbdate)
202         except Exception as exc: # pylint: disable=broad-except
203             LOG.error('Cannot determine date of database: %s', exc)