2 Command-line interface to the Nominatim functions for import, update,
3 database administration and querying.
11 from pathlib import Path
13 from .config import Configuration
14 from .tools.exec_utils import run_legacy_script, run_api_script
15 from .db.connection import connect
16 from .db import status
17 from .errors import UsageError
19 LOG = logging.getLogger()
21 def _num_system_cpus():
23 cpus = len(os.sched_getaffinity(0))
24 except NotImplementedError:
27 return cpus or os.cpu_count()
30 class CommandlineParser:
31 """ Wraps some of the common functions for parsing the command line
32 and setting up subcommands.
34 def __init__(self, prog, description):
35 self.parser = argparse.ArgumentParser(
37 description=description,
38 formatter_class=argparse.RawDescriptionHelpFormatter)
40 self.subs = self.parser.add_subparsers(title='available commands',
43 # Arguments added to every sub-command
44 self.default_args = argparse.ArgumentParser(add_help=False)
45 group = self.default_args.add_argument_group('Default arguments')
46 group.add_argument('-h', '--help', action='help',
47 help='Show this help message and exit')
48 group.add_argument('-q', '--quiet', action='store_const', const=0,
49 dest='verbose', default=1,
50 help='Print only error messages')
51 group.add_argument('-v', '--verbose', action='count', default=1,
52 help='Increase verboseness of output')
53 group.add_argument('--project-dir', metavar='DIR', default='.',
54 help='Base directory of the Nominatim installation (default:.)')
55 group.add_argument('-j', '--threads', metavar='NUM', type=int,
56 help='Number of parallel threads to use')
59 def add_subcommand(self, name, cmd):
60 """ Add a subcommand to the parser. The subcommand must be a class
61 with a function add_args() that adds the parameters for the
62 subcommand and a run() function that executes the command.
64 parser = self.subs.add_parser(name, parents=[self.default_args],
65 help=cmd.__doc__.split('\n', 1)[0],
66 description=cmd.__doc__,
67 formatter_class=argparse.RawDescriptionHelpFormatter,
69 parser.set_defaults(command=cmd)
72 def run(self, **kwargs):
73 """ Parse the command line arguments of the program and execute the
74 appropriate subcommand.
76 args = self.parser.parse_args(args=kwargs.get('cli_args'))
78 if args.subcommand is None:
79 self.parser.print_help()
82 for arg in ('module_dir', 'osm2pgsql_path', 'phplib_dir', 'data_dir', 'phpcgi_path'):
83 setattr(args, arg, Path(kwargs[arg]))
84 args.project_dir = Path(args.project_dir).resolve()
86 logging.basicConfig(stream=sys.stderr,
87 format='%(asctime)s: %(message)s',
88 datefmt='%Y-%m-%d %H:%M:%S',
89 level=max(4 - args.verbose, 1) * 10)
91 args.config = Configuration(args.project_dir, args.data_dir / 'settings')
93 log = logging.getLogger()
94 log.warning('Using project directory: %s', str(args.project_dir))
97 return args.command.run(args)
98 except UsageError as exception:
99 if log.isEnabledFor(logging.DEBUG):
100 raise # use Python's exception printing
101 log.fatal('FATAL: %s', exception)
103 # If we get here, then execution has failed in some way.
107 def _osm2pgsql_options_from_args(args, default_cache, default_threads):
108 """ Set up the stanadrd osm2pgsql from the command line arguments.
110 return dict(osm2pgsql=args.osm2pgsql_path,
111 osm2pgsql_cache=args.osm2pgsql_cache or default_cache,
112 osm2pgsql_style=args.config.get_import_style_file(),
113 threads=args.threads or default_threads,
114 dsn=args.config.get_libpq_dsn(),
115 flatnode_file=args.config.FLATNODE_FILE)
117 ##### Subcommand classes
119 # Each class needs to implement two functions: add_args() adds the CLI parameters
120 # for the subfunction, run() executes the subcommand.
122 # The class documentation doubles as the help text for the command. The
123 # first line is also used in the summary when calling the program without
126 # No need to document the functions each time.
127 # pylint: disable=C0111
128 # Using non-top-level imports to make pyosmium optional for replication only.
129 # pylint: disable=E0012,C0415
134 Create a new Nominatim database from an OSM file.
138 def add_args(parser):
139 group_name = parser.add_argument_group('Required arguments')
140 group = group_name.add_mutually_exclusive_group(required=True)
141 group.add_argument('--osm-file',
142 help='OSM file to be imported.')
143 group.add_argument('--continue', dest='continue_at',
144 choices=['load-data', 'indexing', 'db-postprocess'],
145 help='Continue an import that was interrupted')
146 group = parser.add_argument_group('Optional arguments')
147 group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
148 help='Size of cache to be used by osm2pgsql (in MB)')
149 group.add_argument('--reverse-only', action='store_true',
150 help='Do not create tables and indexes for searching')
151 group.add_argument('--enable-debug-statements', action='store_true',
152 help='Include debug warning statements in SQL code')
153 group.add_argument('--no-partitions', action='store_true',
154 help="""Do not partition search indices
155 (speeds up import of single country extracts)""")
156 group.add_argument('--no-updates', action='store_true',
157 help="""Do not keep tables that are only needed for
158 updating the database later""")
159 group = parser.add_argument_group('Expert options')
160 group.add_argument('--ignore-errors', action='store_true',
161 help='Continue import even when errors in SQL are present')
162 group.add_argument('--index-noanalyse', action='store_true',
163 help='Do not perform analyse operations during index')
168 params = ['setup.php']
170 params.extend(('--all', '--osm-file', args.osm_file))
172 if args.continue_at == 'load-data':
173 params.append('--load-data')
174 if args.continue_at in ('load-data', 'indexing'):
175 params.append('--index')
176 params.extend(('--create-search-indices', '--create-country-names',
178 if args.osm2pgsql_cache:
179 params.extend(('--osm2pgsql-cache', args.osm2pgsql_cache))
180 if args.reverse_only:
181 params.append('--reverse-only')
182 if args.enable_debug_statements:
183 params.append('--enable-debug-statements')
184 if args.no_partitions:
185 params.append('--no-partitions')
187 params.append('--drop')
188 if args.ignore_errors:
189 params.append('--ignore-errors')
190 if args.index_noanalyse:
191 params.append('--index-noanalyse')
193 return run_legacy_script(*params, nominatim_env=args)
198 Make database read-only.
200 About half of data in the Nominatim database is kept only to be able to
201 keep the data up-to-date with new changes made in OpenStreetMap. This
202 command drops all this data and only keeps the part needed for geocoding
205 This command has the same effect as the `--no-updates` option for imports.
209 def add_args(parser):
214 return run_legacy_script('setup.php', '--drop', nominatim_env=args)
217 class SetupSpecialPhrases:
219 Maintain special phrases.
223 def add_args(parser):
224 group = parser.add_argument_group('Input arguments')
225 group.add_argument('--from-wiki', action='store_true',
226 help='Pull special phrases from the OSM wiki.')
227 group = parser.add_argument_group('Output arguments')
228 group.add_argument('-o', '--output', default='-',
229 help="""File to write the preprocessed phrases to.
230 If omitted, it will be written to stdout.""")
234 if args.output != '-':
235 raise NotImplementedError('Only output to stdout is currently implemented.')
236 return run_legacy_script('specialphrases.php', '--wiki-import', nominatim_env=args)
239 class UpdateReplication:
241 Update the database using an online replication service.
245 def add_args(parser):
246 group = parser.add_argument_group('Arguments for initialisation')
247 group.add_argument('--init', action='store_true',
248 help='Initialise the update process')
249 group.add_argument('--no-update-functions', dest='update_functions',
250 action='store_false',
251 help="""Do not update the trigger function to
252 support differential updates.""")
253 group = parser.add_argument_group('Arguments for updates')
254 group.add_argument('--check-for-updates', action='store_true',
255 help='Check if new updates are available and exit')
256 group.add_argument('--once', action='store_true',
257 help="""Download and apply updates only once. When
258 not set, updates are continuously applied""")
259 group.add_argument('--no-index', action='store_false', dest='do_index',
260 help="""Do not index the new data. Only applicable
261 together with --once""")
262 group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
263 help='Size of cache to be used by osm2pgsql (in MB)')
266 def _init_replication(args):
267 from .tools import replication, refresh
269 LOG.warning("Initialising replication updates")
270 conn = connect(args.config.get_libpq_dsn())
271 replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
272 if args.update_functions:
273 LOG.warning("Create functions")
274 refresh.create_functions(conn, args.config, args.data_dir,
281 def _check_for_updates(args):
282 from .tools import replication
284 conn = connect(args.config.get_libpq_dsn())
285 ret = replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL)
290 def _report_update(batchdate, start_import, start_index):
291 def round_time(delta):
292 return dt.timedelta(seconds=int(delta.total_seconds()))
294 end = dt.datetime.now(dt.timezone.utc)
295 LOG.warning("Update completed. Import: %s. %sTotal: %s. Remaining backlog: %s.",
296 round_time((start_index or end) - start_import),
297 "Indexing: {} ".format(round_time(end - start_index))
298 if start_index else '',
299 round_time(end - start_import),
300 round_time(end - batchdate))
304 from .tools import replication
305 from .indexer.indexer import Indexer
307 params = _osm2pgsql_options_from_args(args, 2000, 1)
308 params.update(base_url=args.config.REPLICATION_URL,
309 update_interval=args.config.get_int('REPLICATION_UPDATE_INTERVAL'),
310 import_file=args.project_dir / 'osmosischange.osc',
311 max_diff_size=args.config.get_int('REPLICATION_MAX_DIFF'),
312 indexed_only=not args.once)
314 # Sanity check to not overwhelm the Geofabrik servers.
315 if 'download.geofabrik.de'in params['base_url']\
316 and params['update_interval'] < 86400:
317 LOG.fatal("Update interval too low for download.geofabrik.de.\n"
318 "Please check install documentation "
319 "(https://nominatim.org/release-docs/latest/admin/Import-and-Update#"
320 "setting-up-the-update-process).")
321 raise UsageError("Invalid replication update interval setting.")
324 if not args.do_index:
325 LOG.fatal("Indexing cannot be disabled when running updates continuously.")
326 raise UsageError("Bad argument '--no-index'.")
327 recheck_interval = args.config.get_int('REPLICATION_RECHECK_INTERVAL')
330 conn = connect(args.config.get_libpq_dsn())
331 start = dt.datetime.now(dt.timezone.utc)
332 state = replication.update(conn, params)
333 status.log_status(conn, start, 'import')
334 batchdate, _, _ = status.get_status(conn)
337 if state is not replication.UpdateState.NO_CHANGES and args.do_index:
338 index_start = dt.datetime.now(dt.timezone.utc)
339 indexer = Indexer(args.config.get_libpq_dsn(),
341 indexer.index_boundaries(0, 30)
342 indexer.index_by_rank(0, 30)
344 conn = connect(args.config.get_libpq_dsn())
345 status.set_indexed(conn, True)
346 status.log_status(conn, index_start, 'index')
351 if LOG.isEnabledFor(logging.WARNING):
352 UpdateReplication._report_update(batchdate, start, index_start)
357 if state is replication.UpdateState.NO_CHANGES:
358 LOG.warning("No new changes. Sleeping for %d sec.", recheck_interval)
359 time.sleep(recheck_interval)
366 import osmium # pylint: disable=W0611
367 except ModuleNotFoundError:
368 LOG.fatal("pyosmium not installed. Replication functions not available.\n"
369 "To install pyosmium via pip: pip3 install osmium")
373 return UpdateReplication._init_replication(args)
375 if args.check_for_updates:
376 return UpdateReplication._check_for_updates(args)
378 return UpdateReplication._update(args)
382 Add additional data from a file or an online source.
384 Data is only imported, not indexed. You need to call `nominatim-update index`
385 to complete the process.
389 def add_args(parser):
390 group_name = parser.add_argument_group('Source')
391 group = group_name.add_mutually_exclusive_group(required=True)
392 group.add_argument('--file', metavar='FILE',
393 help='Import data from an OSM file')
394 group.add_argument('--diff', metavar='FILE',
395 help='Import data from an OSM diff file')
396 group.add_argument('--node', metavar='ID', type=int,
397 help='Import a single node from the API')
398 group.add_argument('--way', metavar='ID', type=int,
399 help='Import a single way from the API')
400 group.add_argument('--relation', metavar='ID', type=int,
401 help='Import a single relation from the API')
402 group.add_argument('--tiger-data', metavar='DIR',
403 help='Add housenumbers from the US TIGER census database.')
404 group = parser.add_argument_group('Extra arguments')
405 group.add_argument('--use-main-api', action='store_true',
406 help='Use OSM API instead of Overpass to download objects')
411 os.environ['NOMINATIM_TIGER_DATA_PATH'] = args.tiger_data
412 return run_legacy_script('setup.php', '--import-tiger-data', nominatim_env=args)
414 params = ['update.php']
416 params.extend(('--import-file', args.file))
418 params.extend(('--import-diff', args.diff))
420 params.extend(('--import-node', args.node))
422 params.extend(('--import-way', args.way))
424 params.extend(('--import-relation', args.relation))
425 if args.use_main_api:
426 params.append('--use-main-api')
427 return run_legacy_script(*params, nominatim_env=args)
432 Reindex all new and modified data.
436 def add_args(parser):
437 group = parser.add_argument_group('Filter arguments')
438 group.add_argument('--boundaries-only', action='store_true',
439 help="""Index only administrative boundaries.""")
440 group.add_argument('--no-boundaries', action='store_true',
441 help="""Index everything except administrative boundaries.""")
442 group.add_argument('--minrank', '-r', type=int, metavar='RANK', default=0,
443 help='Minimum/starting rank')
444 group.add_argument('--maxrank', '-R', type=int, metavar='RANK', default=30,
445 help='Maximum/finishing rank')
449 from .indexer.indexer import Indexer
451 indexer = Indexer(args.config.get_libpq_dsn(),
452 args.threads or _num_system_cpus() or 1)
454 if not args.no_boundaries:
455 indexer.index_boundaries(args.minrank, args.maxrank)
456 if not args.boundaries_only:
457 indexer.index_by_rank(args.minrank, args.maxrank)
459 if not args.no_boundaries and not args.boundaries_only \
460 and args.minrank == 0 and args.maxrank == 30:
461 conn = connect(args.config.get_libpq_dsn())
462 status.set_indexed(conn, True)
470 Recompute auxiliary data used by the indexing process.
472 These functions must not be run in parallel with other update commands.
476 def add_args(parser):
477 group = parser.add_argument_group('Data arguments')
478 group.add_argument('--postcodes', action='store_true',
479 help='Update postcode centroid table')
480 group.add_argument('--word-counts', action='store_true',
481 help='Compute frequency of full-word search terms')
482 group.add_argument('--address-levels', action='store_true',
483 help='Reimport address level configuration')
484 group.add_argument('--functions', action='store_true',
485 help='Update the PL/pgSQL functions in the database')
486 group.add_argument('--wiki-data', action='store_true',
487 help='Update Wikipedia/data importance numbers.')
488 group.add_argument('--importance', action='store_true',
489 help='Recompute place importances (expensive!)')
490 group.add_argument('--website', action='store_true',
491 help='Refresh the directory that serves the scripts for the web API')
492 group = parser.add_argument_group('Arguments for function refresh')
493 group.add_argument('--no-diff-updates', action='store_false', dest='diffs',
494 help='Do not enable code for propagating updates')
495 group.add_argument('--enable-debug-statements', action='store_true',
496 help='Enable debug warning statements in functions')
500 from .tools import refresh
503 LOG.warning("Update postcodes centroid")
504 conn = connect(args.config.get_libpq_dsn())
505 refresh.update_postcodes(conn, args.data_dir)
509 LOG.warning('Recompute frequency of full-word search terms')
510 conn = connect(args.config.get_libpq_dsn())
511 refresh.recompute_word_counts(conn, args.data_dir)
514 if args.address_levels:
515 cfg = Path(args.config.ADDRESS_LEVEL_CONFIG)
516 LOG.warning('Updating address levels from %s', cfg)
517 conn = connect(args.config.get_libpq_dsn())
518 refresh.load_address_levels_from_file(conn, cfg)
522 LOG.warning('Create functions')
523 conn = connect(args.config.get_libpq_dsn())
524 refresh.create_functions(conn, args.config, args.data_dir,
525 args.diffs, args.enable_debug_statements)
529 run_legacy_script('setup.php', '--import-wikipedia-articles',
530 nominatim_env=args, throw_on_fail=True)
531 # Attention: importance MUST come after wiki data import.
533 run_legacy_script('update.php', '--recompute-importance',
534 nominatim_env=args, throw_on_fail=True)
536 run_legacy_script('setup.php', '--setup-website',
537 nominatim_env=args, throw_on_fail=True)
542 class AdminCheckDatabase:
544 Check that the database is complete and operational.
548 def add_args(parser):
553 return run_legacy_script('check_import_finished.php', nominatim_env=args)
558 Warm database caches for search and reverse queries.
562 def add_args(parser):
563 group = parser.add_argument_group('Target arguments')
564 group.add_argument('--search-only', action='store_const', dest='target',
566 help="Only pre-warm tables for search queries")
567 group.add_argument('--reverse-only', action='store_const', dest='target',
569 help="Only pre-warm tables for reverse queries")
573 params = ['warm.php']
574 if args.target == 'reverse':
575 params.append('--reverse-only')
576 if args.target == 'search':
577 params.append('--search-only')
578 return run_legacy_script(*params, nominatim_env=args)
583 Export addresses as CSV file from the database.
587 def add_args(parser):
588 group = parser.add_argument_group('Output arguments')
589 group.add_argument('--output-type', default='street',
590 choices=('continent', 'country', 'state', 'county',
591 'city', 'suburb', 'street', 'path'),
592 help='Type of places to output (default: street)')
593 group.add_argument('--output-format',
594 default='street;suburb;city;county;state;country',
595 help="""Semicolon-separated list of address types
596 (see --output-type). Multiple ranks can be
597 merged into one column by simply using a
598 comma-separated list.""")
599 group.add_argument('--output-all-postcodes', action='store_true',
600 help="""List all postcodes for address instead of
601 just the most likely one""")
602 group.add_argument('--language',
603 help="""Preferred language for output
604 (use local name, if omitted)""")
605 group = parser.add_argument_group('Filter arguments')
606 group.add_argument('--restrict-to-country', metavar='COUNTRY_CODE',
607 help='Export only objects within country')
608 group.add_argument('--restrict-to-osm-node', metavar='ID', type=int,
609 help='Export only children of this OSM node')
610 group.add_argument('--restrict-to-osm-way', metavar='ID', type=int,
611 help='Export only children of this OSM way')
612 group.add_argument('--restrict-to-osm-relation', metavar='ID', type=int,
613 help='Export only children of this OSM relation')
618 params = ['export.php',
619 '--output-type', args.output_type,
620 '--output-format', args.output_format]
621 if args.output_all_postcodes:
622 params.append('--output-all-postcodes')
624 params.extend(('--language', args.language))
625 if args.restrict_to_country:
626 params.extend(('--restrict-to-country', args.restrict_to_country))
627 if args.restrict_to_osm_node:
628 params.extend(('--restrict-to-osm-node', args.restrict_to_osm_node))
629 if args.restrict_to_osm_way:
630 params.extend(('--restrict-to-osm-way', args.restrict_to_osm_way))
631 if args.restrict_to_osm_relation:
632 params.extend(('--restrict-to-osm-relation', args.restrict_to_osm_relation))
634 return run_legacy_script(*params, nominatim_env=args)
637 ('street', 'housenumber and street'),
638 ('city', 'city, town or village'),
639 ('county', 'county'),
641 ('country', 'country'),
642 ('postalcode', 'postcode')
646 ('addressdetails', 'Include a breakdown of the address into elements.'),
647 ('extratags', """Include additional information if available
648 (e.g. wikipedia link, opening hours)."""),
649 ('namedetails', 'Include a list of alternative names.')
653 ('addressdetails', 'Include a breakdown of the address into elements.'),
654 ('keywords', 'Include a list of name keywords and address keywords.'),
655 ('linkedplaces', 'Include a details of places that are linked with this one.'),
656 ('hierarchy', 'Include details of places lower in the address hierarchy.'),
657 ('group_hierarchy', 'Group the places by type.'),
658 ('polygon_geojson', 'Include geometry of result.')
661 def _add_api_output_arguments(parser):
662 group = parser.add_argument_group('Output arguments')
663 group.add_argument('--format', default='jsonv2',
664 choices=['xml', 'json', 'jsonv2', 'geojson', 'geocodejson'],
665 help='Format of result')
666 for name, desc in EXTRADATA_PARAMS:
667 group.add_argument('--' + name, action='store_true', help=desc)
669 group.add_argument('--lang', '--accept-language', metavar='LANGS',
670 help='Preferred language order for presenting search results')
671 group.add_argument('--polygon-output',
672 choices=['geojson', 'kml', 'svg', 'text'],
673 help='Output geometry of results as a GeoJSON, KML, SVG or WKT.')
674 group.add_argument('--polygon-threshold', type=float, metavar='TOLERANCE',
675 help="""Simplify output geometry.
676 Parameter is difference tolerance in degrees.""")
681 Execute API search query.
685 def add_args(parser):
686 group = parser.add_argument_group('Query arguments')
687 group.add_argument('--query',
688 help='Free-form query string')
689 for name, desc in STRUCTURED_QUERY:
690 group.add_argument('--' + name, help='Structured query: ' + desc)
692 _add_api_output_arguments(parser)
694 group = parser.add_argument_group('Result limitation')
695 group.add_argument('--countrycodes', metavar='CC,..',
696 help='Limit search results to one or more countries.')
697 group.add_argument('--exclude_place_ids', metavar='ID,..',
698 help='List of search object to be excluded')
699 group.add_argument('--limit', type=int,
700 help='Limit the number of returned results')
701 group.add_argument('--viewbox', metavar='X1,Y1,X2,Y2',
702 help='Preferred area to find search results')
703 group.add_argument('--bounded', action='store_true',
704 help='Strictly restrict results to viewbox area')
706 group = parser.add_argument_group('Other arguments')
707 group.add_argument('--no-dedupe', action='store_false', dest='dedupe',
708 help='Do not remove duplicates from the result list')
714 params = dict(q=args.query)
716 params = {k : getattr(args, k) for k, _ in STRUCTURED_QUERY if getattr(args, k)}
718 for param, _ in EXTRADATA_PARAMS:
719 if getattr(args, param):
721 for param in ('format', 'countrycodes', 'exclude_place_ids', 'limit', 'viewbox'):
722 if getattr(args, param):
723 params[param] = getattr(args, param)
725 params['accept-language'] = args.lang
726 if args.polygon_output:
727 params['polygon_' + args.polygon_output] = '1'
728 if args.polygon_threshold:
729 params['polygon_threshold'] = args.polygon_threshold
731 params['bounded'] = '1'
733 params['dedupe'] = '0'
735 return run_api_script('search', args.project_dir,
736 phpcgi_bin=args.phpcgi_path, params=params)
740 Execute API reverse query.
744 def add_args(parser):
745 group = parser.add_argument_group('Query arguments')
746 group.add_argument('--lat', type=float, required=True,
747 help='Latitude of coordinate to look up (in WGS84)')
748 group.add_argument('--lon', type=float, required=True,
749 help='Longitude of coordinate to look up (in WGS84)')
750 group.add_argument('--zoom', type=int,
751 help='Level of detail required for the address')
753 _add_api_output_arguments(parser)
758 params = dict(lat=args.lat, lon=args.lon)
759 if args.zoom is not None:
760 params['zoom'] = args.zoom
762 for param, _ in EXTRADATA_PARAMS:
763 if getattr(args, param):
766 params['format'] = args.format
768 params['accept-language'] = args.lang
769 if args.polygon_output:
770 params['polygon_' + args.polygon_output] = '1'
771 if args.polygon_threshold:
772 params['polygon_threshold'] = args.polygon_threshold
774 return run_api_script('reverse', args.project_dir,
775 phpcgi_bin=args.phpcgi_path, params=params)
780 Execute API reverse query.
784 def add_args(parser):
785 group = parser.add_argument_group('Query arguments')
786 group.add_argument('--id', metavar='OSMID',
787 action='append', required=True, dest='ids',
788 help='OSM id to lookup in format <NRW><id> (may be repeated)')
790 _add_api_output_arguments(parser)
795 params = dict(osm_ids=','.join(args.ids))
797 for param, _ in EXTRADATA_PARAMS:
798 if getattr(args, param):
801 params['format'] = args.format
803 params['accept-language'] = args.lang
804 if args.polygon_output:
805 params['polygon_' + args.polygon_output] = '1'
806 if args.polygon_threshold:
807 params['polygon_threshold'] = args.polygon_threshold
809 return run_api_script('lookup', args.project_dir,
810 phpcgi_bin=args.phpcgi_path, params=params)
815 Execute API lookup query.
819 def add_args(parser):
820 group = parser.add_argument_group('Query arguments')
821 objs = group.add_mutually_exclusive_group(required=True)
822 objs.add_argument('--node', '-n', type=int,
823 help="Look up the OSM node with the given ID.")
824 objs.add_argument('--way', '-w', type=int,
825 help="Look up the OSM way with the given ID.")
826 objs.add_argument('--relation', '-r', type=int,
827 help="Look up the OSM relation with the given ID.")
828 objs.add_argument('--place_id', '-p', type=int,
829 help='Database internal identifier of the OSM object to look up.')
830 group.add_argument('--class', dest='object_class',
831 help="""Class type to disambiguated multiple entries
832 of the same object.""")
834 group = parser.add_argument_group('Output arguments')
835 for name, desc in DETAILS_SWITCHES:
836 group.add_argument('--' + name, action='store_true', help=desc)
837 group.add_argument('--lang', '--accept-language', metavar='LANGS',
838 help='Preferred language order for presenting search results')
843 params = dict(osmtype='N', osmid=args.node)
845 params = dict(osmtype='W', osmid=args.node)
847 params = dict(osmtype='R', osmid=args.node)
849 params = dict(place_id=args.place_id)
850 if args.object_class:
851 params['class'] = args.object_class
852 for name, _ in DETAILS_SWITCHES:
853 params[name] = '1' if getattr(args, name) else '0'
855 return run_api_script('details', args.project_dir,
856 phpcgi_bin=args.phpcgi_path, params=params)
861 Execute API status query.
865 def add_args(parser):
866 group = parser.add_argument_group('API parameters')
867 group.add_argument('--format', default='text', choices=['text', 'json'],
868 help='Format of result')
872 return run_api_script('status', args.project_dir,
873 phpcgi_bin=args.phpcgi_path,
874 params=dict(format=args.format))
877 def nominatim(**kwargs):
879 Command-line tools for importing, updating, administrating and
880 querying the Nominatim database.
882 parser = CommandlineParser('nominatim', nominatim.__doc__)
884 parser.add_subcommand('import', SetupAll)
885 parser.add_subcommand('freeze', SetupFreeze)
886 parser.add_subcommand('replication', UpdateReplication)
888 parser.add_subcommand('check-database', AdminCheckDatabase)
889 parser.add_subcommand('warm', AdminWarm)
891 parser.add_subcommand('special-phrases', SetupSpecialPhrases)
893 parser.add_subcommand('add-data', UpdateAddData)
894 parser.add_subcommand('index', UpdateIndex)
895 parser.add_subcommand('refresh', UpdateRefresh)
897 parser.add_subcommand('export', QueryExport)
899 if kwargs.get('phpcgi_path'):
900 parser.add_subcommand('search', APISearch)
901 parser.add_subcommand('reverse', APIReverse)
902 parser.add_subcommand('lookup', APILookup)
903 parser.add_subcommand('details', APIDetails)
904 parser.add_subcommand('status', APIStatus)
906 parser.parser.epilog = 'php-cgi not found. Query commands not available.'
908 return parser.run(**kwargs)