sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev python3-psycopg2 python3-pyosmium python3-dotenv
shell: bash
+ - name: Download dependencies
+ run: |
+ if [ ! -f country_grid.sql.gz ]; then
+ wget --no-verbose https://www.nominatim.org/data/country_grid.sql.gz
+ fi
+ cp country_grid.sql.gz Nominatim/data/country_osm_grid.sql.gz
+ shell: bash
+
- name: Configure
- run: mkdir build && cd build && cmake ..
+ run: mkdir build && cd build && cmake ../Nominatim
shell: bash
- name: Build
run: |
make -j2 all
- ./nominatim refresh --website
+ sudo make install
shell: bash
working-directory: build
-
- - name: Download dependencies
- run: |
- if [ ! -f data/country_osm_grid.sql.gz ]; then
- wget --no-verbose -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
- fi
- shell: bash
-
- uses: actions/checkout@v2
with:
submodules: true
+ path: Nominatim
- name: Setup PHP
uses: shivammathur/setup-php@v2
- uses: actions/cache@v2
with:
path: |
- data/country_osm_grid.sql.gz
- monaco-latest.osm.pbf
- key: nominatim-data-${{ steps.get-date.outputs.date }}
+ country_grid.sql.gz
+ key: nominatim-country-data-${{ steps.get-date.outputs.date }}
- - uses: ./.github/actions/setup-postgresql
+ - uses: ./Nominatim/.github/actions/setup-postgresql
with:
postgresql-version: ${{ matrix.postgresql }}
postgis-version: ${{ matrix.postgis }}
- - uses: ./.github/actions/build-nominatim
+ - uses: ./Nominatim/.github/actions/build-nominatim
- name: Install test prerequsites
run: sudo apt-get install -y -qq php-codesniffer pylint python3-pytest python3-behave
- name: PHP linting
run: phpcs --report-width=120 .
+ working-directory: Nominatim
- name: Python linting
run: pylint --extension-pkg-whitelist=osmium nominatim
+ working-directory: Nominatim
- name: PHP unit tests
run: phpunit ./
- working-directory: test/php
+ working-directory: Nominatim/test/php
- name: Python unit tests
run: py.test-3 test/python
+ working-directory: Nominatim
- name: BDD tests
- run: behave -DREMOVE_TEMPLATE=1 --format=progress3
- working-directory: test/bdd
+ run: behave -DREMOVE_TEMPLATE=1 -DBUILDDIR=$GITHUB_WORKSPACE/build --format=progress3
+ working-directory: Nominatim/test/bdd
import:
runs-on: ubuntu-20.04
- uses: actions/checkout@v2
with:
submodules: true
+ path: Nominatim
- name: Get Date
id: get-date
- uses: actions/cache@v2
with:
path: |
- data/country_osm_grid.sql.gz
+ country_grid.sql.gz
+ key: nominatim-country-data-${{ steps.get-date.outputs.date }}
+
+ - uses: actions/cache@v2
+ with:
+ path: |
monaco-latest.osm.pbf
- key: nominatim-data-${{ steps.get-date.outputs.date }}
+ key: nominatim-test-data-${{ steps.get-date.outputs.date }}
- - uses: ./.github/actions/setup-postgresql
+ - uses: ./Nominatim/.github/actions/setup-postgresql
with:
postgresql-version: 13
postgis-version: 3
- - uses: ./.github/actions/build-nominatim
+ - uses: ./Nominatim/.github/actions/build-nominatim
+
+ - name: Clean installation
+ run: rm -rf Nominatim build
+ shell: bash
- - name: Download import data
+ - name: Prepare import environment
run: |
if [ ! -f monaco-latest.osm.pbf ]; then
wget --no-verbose https://download.geofabrik.de/europe/monaco-latest.osm.pbf
fi
+ mkdir data-env
+ cd data-env
shell: bash
- name: Import
- run: |
- mkdir data-env
- cd data-env
- ../build/nominatim import --osm-file ../monaco-latest.osm.pbf
+ run: nominatim import --osm-file ../monaco-latest.osm.pbf
shell: bash
+ working-directory: data-env
- name: Import special phrases
- run: ../build/nominatim special-phrases --from-wiki | psql -d nominatim
+ run: nominatim special-phrases --from-wiki | psql -d nominatim
working-directory: data-env
- name: Check import
- run: ../build/nominatim check-database
+ run: nominatim admin --check-database
working-directory: data-env
- name: Run update
run: |
- ../build/nominatim replication --init
- ../build/nominatim replication --once
+ nominatim replication --init
+ nominatim replication --once
working-directory: data-env
- name: Run reverse-only import
- run : |
- echo 'NOMINATIM_DATABASE_DSN="pgsql:dbname=reverse"' > .env
- ../build/nominatim import --osm-file ../monaco-latest.osm.pbf --reverse-only
+ run : nominatim import --osm-file ../monaco-latest.osm.pbf --reverse-only
working-directory: data-env
+ env:
+ NOMINATIM_DATABASE_DSN: pgsql:dbname=reverse
#-----------------------------------------------------------------------------
if (BUILD_IMPORTER)
+ find_file(COUNTRY_GRID_FILE country_osm_grid.sql.gz
+ PATHS ${PROJECT_SOURCE_DIR}/data
+ NO_DEFAULT_PATH
+ DOC "Location of the country grid file."
+ )
+
+ if (NOT COUNTRY_GRID_FILE)
+ message(FATAL_ERROR "\nYou need to download the country_osm_grid first:\n"
+ " wget -O ${PROJECT_SOURCE_DIR}/data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz")
+ endif()
+
set(CUSTOMSCRIPTS
check_import_finished.php
country_languages.php
if (BUILD_DOCS)
add_subdirectory(docs)
endif()
+
+#-----------------------------------------------------------------------------
+# Installation
+#-----------------------------------------------------------------------------
+
+
+include(GNUInstallDirs)
+set(NOMINATIM_DATADIR ${CMAKE_INSTALL_FULL_DATADIR}/${PROJECT_NAME})
+set(NOMINATIM_LIBDIR ${CMAKE_INSTALL_FULL_LIBDIR}/${PROJECT_NAME})
+set(NOMINATIM_CONFIGDIR ${CMAKE_INSTALL_FULL_SYSCONFDIR}/${PROJECT_NAME})
+
+if (BUILD_IMPORTER)
+ configure_file(${PROJECT_SOURCE_DIR}/cmake/tool-installed.tmpl installed.bin)
+ install(PROGRAMS ${PROJECT_BINARY_DIR}/installed.bin
+ DESTINATION ${CMAKE_INSTALL_BINDIR}
+ RENAME nominatim)
+
+ install(DIRECTORY nominatim
+ DESTINATION ${NOMINATIM_LIBDIR}/lib-python
+ FILES_MATCHING PATTERN "*.py"
+ PATTERN __pycache__ EXCLUDE)
+ install(DIRECTORY lib-sql DESTINATION ${NOMINATIM_LIBDIR})
+
+ install(FILES data/country_name.sql
+ ${COUNTRY_GRID_FILE}
+ data/words.sql
+ DESTINATION ${NOMINATIM_DATADIR})
+endif()
+
+if (BUILD_OSM2PGSQL)
+ install(TARGETS osm2pgsql RUNTIME DESTINATION ${NOMINATIM_LIBDIR})
+endif()
+
+if (BUILD_MODULE)
+ install(PROGRAMS ${PROJECT_BINARY_DIR}/module/nominatim.so
+ DESTINATION ${NOMINATIM_LIBDIR}/module)
+endif()
+
+if (BUILD_API)
+ install(DIRECTORY lib-php DESTINATION ${NOMINATIM_LIBDIR})
+endif()
+
+install(FILES settings/env.defaults
+ settings/address-levels.json
+ settings/phrase_settings.php
+ settings/import-admin.style
+ settings/import-street.style
+ settings/import-address.style
+ settings/import-full.style
+ settings/import-extratags.style
+ DESTINATION ${NOMINATIM_CONFIGDIR})
cd build
cmake ..
make
+ sudo make install
2. Create a project directory, get OSM data and import:
mkdir nominatim-project
cd nominatim-project
- ~/build/nominatim import --osm-file <your planet file>
+ nominatim import --osm-file <your planet file>
3. Point your webserver to the nominatim-project/website directory.
#!@PHP_BIN@ -Cq
<?php
-require('@CMAKE_SOURCE_DIR@/lib/dotenv_loader.php');
+require('@CMAKE_SOURCE_DIR@/lib-php/dotenv_loader.php');
@define('CONST_Default_ModulePath', '@CMAKE_BINARY_DIR@/module');
@define('CONST_Default_Osm2pgsql', '@CMAKE_BINARY_DIR@/osm2pgsql/osm2pgsql');
-@define('CONST_BinDir', '@CMAKE_SOURCE_DIR@/utils');
-@define('CONST_DataDir', '@CMAKE_SOURCE_DIR@');
+@define('CONST_DataDir', '@CMAKE_SOURCE_DIR@/data');
+@define('CONST_SqlDir', '@CMAKE_SOURCE_DIR@/lib-sql');
+@define('CONST_ConfigDir', '@CMAKE_SOURCE_DIR@/settings');
loadDotEnv();
$_SERVER['NOMINATIM_NOMINATIM_TOOL'] = '@CMAKE_BINARY_DIR@/nominatim';
-require_once('@CMAKE_SOURCE_DIR@/lib/admin/@script_source@');
+require_once('@CMAKE_SOURCE_DIR@/lib-php/admin/@script_source@');
--- /dev/null
+#!/usr/bin/env python3
+import sys
+import os
+
+sys.path.insert(1, '@NOMINATIM_LIBDIR@/lib-python')
+
+os.environ['NOMINATIM_NOMINATIM_TOOL'] = os.path.abspath(__file__)
+
+from nominatim import cli
+
+exit(cli.nominatim(module_dir='@NOMINATIM_LIBDIR@/module',
+ osm2pgsql_path='@NOMINATIM_LIBDIR@/osm2pgsql',
+ phplib_dir='@NOMINATIM_LIBDIR@/lib-php',
+ sqllib_dir='@NOMINATIM_LIBDIR@/lib-sql',
+ data_dir='@NOMINATIM_DATADIR@',
+ config_dir='@NOMINATIM_CONFIGDIR@',
+ phpcgi_path='@PHPCGI_BIN@'))
exit(cli.nominatim(module_dir='@CMAKE_BINARY_DIR@/module',
osm2pgsql_path='@CMAKE_BINARY_DIR@/osm2pgsql/osm2pgsql',
- phplib_dir='@CMAKE_SOURCE_DIR@/lib',
- data_dir='@CMAKE_SOURCE_DIR@',
+ phplib_dir='@CMAKE_SOURCE_DIR@/lib-php',
+ sqllib_dir='@CMAKE_SOURCE_DIR@/lib-sql',
+ data_dir='@CMAKE_SOURCE_DIR@/data',
+ config_dir='@CMAKE_SOURCE_DIR@/settings',
phpcgi_path='@PHPCGI_BIN@'))
+++ /dev/null
--- This data contains Ordnance Survey data © Crown copyright and database right 2010.
--- Code-Point Open contains Royal Mail data © Royal Mail copyright and database right 2010.
--- OS data may be used under the terms of the OS OpenData licence:
--- http://www.ordnancesurvey.co.uk/oswebsite/opendata/licence/docs/licence.pdf
-
-SET statement_timeout = 0;
-SET client_encoding = 'UTF8';
-SET standard_conforming_strings = off;
-SET check_function_bodies = false;
-SET client_min_messages = warning;
-SET escape_string_warning = off;
-
-SET search_path = public, pg_catalog;
-
-SET default_tablespace = '';
-
-SET default_with_oids = false;
-
-CREATE TABLE gb_postcode (
- id integer,
- postcode character varying(9),
- geometry geometry,
- CONSTRAINT enforce_dims_geometry CHECK ((st_ndims(geometry) = 2)),
- CONSTRAINT enforce_srid_geometry CHECK ((st_srid(geometry) = 4326))
-);
-
+++ /dev/null
-SET statement_timeout = 0;
-SET client_encoding = 'UTF8';
-SET check_function_bodies = false;
-SET client_min_messages = warning;
-
-SET search_path = public, pg_catalog;
-
-SET default_tablespace = '';
-
-SET default_with_oids = false;
-
-CREATE TABLE us_postcode (
- postcode text,
- x double precision,
- y double precision
-);
the PostgreSQL server itself.
Download and compile Nominatim as per standard instructions. Once done, you find
-the nomrmalization library in `build/module/nominatim.so`. Copy the file to
+the normalization library in `build/module/nominatim.so`. Copy the file to
the database server at a location where it is readable and executable by the
PostgreSQL server process.
The following instructions explain how to create a Nominatim database
from an OSM planet file. It is assumed that you have already successfully
-installed the Nominatim software itself. If this is not the case, return to the
+installed the Nominatim software itself and the `nominatim` tool can be found
+in your `PATH`. If this is not the case, return to the
[installation page](Installation.md).
## Creating the project directory
Before you start the import, you should create a project directory for your
new database installation. This directory receives all data that is related
to a single Nominatim setup: configuration, extra data, etc. Create a project
-directory apart from the Nominatim software:
+directory apart from the Nominatim software and change into the directory:
```
mkdir ~/nominatim-planet
+cd ~/nominatim-planet
```
In the following, we refer to the project directory as `$PROJECT_DIR`. To be
The Nominatim tool assumes per default that the current working directory is
the project directory but you may explicitly state a different directory using
-the `--project-dir` parameter. The following instructions assume that you have
-added the Nominatim build directory to your PATH and run all directories from
-the project directory. If you haven't done yet, add the build directory to your
-path and change to the new project directory:
-
-```
-export PATH=~/Nominatim/build:$PATH
-cd $PROJECT_DIR
-```
-
-Of course, you have to replace the path above with the location of your build
-directory.
+the `--project-dir` parameter. The following instructions assume that you run
+all commands from the project directory.
!!! tip "Migration Tip"
Run this script to verify all required tables and indices got created successfully.
```sh
-nominatim check-database
+nominatim admin --check-database
```
Now you can try out your installation by running:
* [PostGIS](https://postgis.net) (2.2+)
* [Python 3](https://www.python.org/) (3.5+)
* [Psycopg2](https://www.psycopg.org)
+ * [Python Dotenv](https://github.com/theskumar/python-dotenv)
* [PHP](https://php.net) (7.0 or later)
* PHP-pgsql
* PHP-intl (bundled with PHP)
- * [Python Dotenv](https://github.com/theskumar/python-dotenv)
+ ( PHP-cgi (for running queries from the command line)
For running continuous updates:
- * [pyosmium](https://osmcode.org/pyosmium/) (with Python 3)
+ * [pyosmium](https://osmcode.org/pyosmium/)
For dependencies for running tests and building documentation, see
the [Development section](../develop/Development-Environment.md).
```
cmake ../Nominatim
make
+sudo make install
+```
+
+Nominatim installs itself into `/usr/local` per default. To choose a different
+installation directory add `-DCMAKE_INSTALL_PREFIX=<install root>` to the
+cmake command. Make sure that the `bin` directory is available in your path
+in that case, e.g.
+
+```
+export PATH=<install root>/bin:$PATH
```
Now continue with [importing the database](Import.md).
* ./utils/setup.php: `import`, `freeze`, `refresh`
* ./utils/update.php: `replication`, `add-data`, `index`, `refresh`
* ./utils/specialphrases.php: `special-phrases`
-* ./utils/check_import_finished.php: `check-database`
-* ./utils/warm.php: `warm`
+* ./utils/check_import_finished.php: `admin`
+* ./utils/warm.php: `admin`
* ./utils/export.php: `export`
Try `nominatim <command> --help` for more information about each subcommand.
function loadDotEnv()
{
$dotenv = new \Symfony\Component\Dotenv\Dotenv();
- $dotenv->load(CONST_DataDir.'/settings/env.defaults');
+ $dotenv->load(CONST_ConfigDir.'/env.defaults');
if (file_exists('.env')) {
$dotenv->load('.env');
// the installed scripts. Neither setting is part of the official
// set of settings.
defined('CONST_DataDir') or define('CONST_DataDir', $_SERVER['NOMINATIM_DATADIR']);
- defined('CONST_BinDir') or define('CONST_BinDir', $_SERVER['NOMINATIM_BINDIR']);
+ defined('CONST_SqlDir') or define('CONST_SqlDir', $_SERVER['NOMINATIM_SQLDIR']);
+ defined('CONST_ConfigDir') or define('CONST_ConfigDir', $_SERVER['NOMINATIM_CONFIGDIR']);
defined('CONST_Default_ModulePath') or define('CONST_Default_ModulePath', $_SERVER['NOMINATIM_DATABASE_MODULE_SRC_PATH']);
}
$sValue = $_SERVER['NOMINATIM_'.$sConfName];
if (!$sValue) {
- return CONST_DataDir.'/settings/'.$sSystemConfig;
+ return CONST_ConfigDir.'/'.$sSystemConfig;
}
return $sValue;
// Try accessing the C module, so we know early if something is wrong
$this->checkModulePresence(); // raises exception on failure
- if (!file_exists(CONST_DataDir.'/data/country_osm_grid.sql.gz')) {
- echo 'Error: you need to download the country_osm_grid first:';
- echo "\n wget -O ".CONST_DataDir."/data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz\n";
- exit(1);
- }
- $this->pgsqlRunScriptFile(CONST_DataDir.'/data/country_name.sql');
- $this->pgsqlRunScriptFile(CONST_DataDir.'/data/country_osm_grid.sql.gz');
- $this->pgsqlRunScriptFile(CONST_DataDir.'/data/gb_postcode_table.sql');
- $this->pgsqlRunScriptFile(CONST_DataDir.'/data/us_postcode_table.sql');
-
- $sPostcodeFilename = CONST_InstallDir.'/gb_postcode_data.sql.gz';
- if (file_exists($sPostcodeFilename)) {
- $this->pgsqlRunScriptFile($sPostcodeFilename);
- } else {
- warn('optional external GB postcode table file ('.$sPostcodeFilename.') not found. Skipping.');
- }
-
- $sPostcodeFilename = CONST_InstallDir.'/us_postcode_data.sql.gz';
- if (file_exists($sPostcodeFilename)) {
- $this->pgsqlRunScriptFile($sPostcodeFilename);
- } else {
- warn('optional external US postcode table file ('.$sPostcodeFilename.') not found. Skipping.');
- }
+ $this->pgsqlRunScriptFile(CONST_DataDir.'/country_name.sql');
+ $this->pgsqlRunScriptFile(CONST_DataDir.'/country_osm_grid.sql.gz');
if ($this->bNoPartitions) {
$this->pgsqlRunScript('update country_name set partition = 0');
{
info('Create Tables');
- $sTemplate = file_get_contents(CONST_DataDir.'/sql/tables.sql');
+ $sTemplate = file_get_contents(CONST_SqlDir.'/tables.sql');
$sTemplate = $this->replaceSqlPatterns($sTemplate);
$this->pgsqlRunScript($sTemplate, false);
{
info('Create Tables');
- $sTemplate = file_get_contents(CONST_DataDir.'/sql/table-triggers.sql');
+ $sTemplate = file_get_contents(CONST_SqlDir.'/table-triggers.sql');
$sTemplate = $this->replaceSqlPatterns($sTemplate);
$this->pgsqlRunScript($sTemplate, false);
{
info('Create Partition Tables');
- $sTemplate = file_get_contents(CONST_DataDir.'/sql/partition-tables.src.sql');
+ $sTemplate = file_get_contents(CONST_SqlDir.'/partition-tables.src.sql');
$sTemplate = $this->replaceSqlPatterns($sTemplate);
$this->pgsqlRunPartitionScript($sTemplate);
// pre-create the word list
if (!$bDisableTokenPrecalc) {
info('Loading word list');
- $this->pgsqlRunScriptFile(CONST_DataDir.'/data/words.sql');
+ $this->pgsqlRunScriptFile(CONST_DataDir.'/words.sql');
}
info('Load Data');
warn('Tiger data import selected but no files found in path '.$sTigerPath);
return;
}
- $sTemplate = file_get_contents(CONST_DataDir.'/sql/tiger_import_start.sql');
+ $sTemplate = file_get_contents(CONST_SqlDir.'/tiger_import_start.sql');
$sTemplate = $this->replaceSqlPatterns($sTemplate);
$this->pgsqlRunScript($sTemplate, false);
}
info('Creating indexes on Tiger data');
- $sTemplate = file_get_contents(CONST_DataDir.'/sql/tiger_import_finish.sql');
+ $sTemplate = file_get_contents(CONST_SqlDir.'/tiger_import_finish.sql');
$sTemplate = $this->replaceSqlPatterns($sTemplate);
$this->pgsqlRunScript($sTemplate, false);
public function calculatePostcodes($bCMDResultAll)
{
info('Calculate Postcodes');
+ $this->pgsqlRunScriptFile(CONST_SqlDir.'/postcode_tables.sql');
+
+ $sPostcodeFilename = CONST_InstallDir.'/gb_postcode_data.sql.gz';
+ if (file_exists($sPostcodeFilename)) {
+ $this->pgsqlRunScriptFile($sPostcodeFilename);
+ } else {
+ warn('optional external GB postcode table file ('.$sPostcodeFilename.') not found. Skipping.');
+ }
+
+ $sPostcodeFilename = CONST_InstallDir.'/us_postcode_data.sql.gz';
+ if (file_exists($sPostcodeFilename)) {
+ $this->pgsqlRunScriptFile($sPostcodeFilename);
+ } else {
+ warn('optional external US postcode table file ('.$sPostcodeFilename.') not found. Skipping.');
+ }
+
+
$this->db()->exec('TRUNCATE location_postcode');
$sSQL = 'INSERT INTO location_postcode';
$this->db()->exec("DROP INDEX $sIndexName;");
}
- $sTemplate = file_get_contents(CONST_DataDir.'/sql/indices.src.sql');
+ $sTemplate = file_get_contents(CONST_SqlDir.'/indices.src.sql');
if (!$this->bDrop) {
- $sTemplate .= file_get_contents(CONST_DataDir.'/sql/indices_updates.src.sql');
+ $sTemplate .= file_get_contents(CONST_SqlDir.'/indices_updates.src.sql');
}
if (!$this->dbReverseOnly()) {
- $sTemplate .= file_get_contents(CONST_DataDir.'/sql/indices_search.src.sql');
+ $sTemplate .= file_get_contents(CONST_SqlDir.'/indices_search.src.sql');
}
$sTemplate = $this->replaceSqlPatterns($sTemplate);
fwrite($rFile, '@define(\'CONST_Debug\', $_GET[\'debug\'] ?? false);'."\n\n");
fwriteConstDef($rFile, 'LibDir', CONST_LibDir);
- fwriteConstDef($rFile, 'DataDir', CONST_DataDir);
- fwriteConstDef($rFile, 'InstallDir', CONST_InstallDir);
fwriteConstDef($rFile, 'Database_DSN', getSetting('DATABASE_DSN'));
fwriteConstDef($rFile, 'Default_Language', getSetting('DEFAULT_LANGUAGE'));
fwriteConstDef($rFile, 'Log_DB', getSettingBool('LOG_DB'));
fwriteConstDef($rFile, 'Use_US_Tiger_Data', getSettingBool('USE_US_TIGER_DATA'));
fwriteConstDef($rFile, 'MapIcon_URL', getSetting('MAPICON_URL'));
- // XXX scripts should go into the library.
- fwrite($rFile, 'require_once(\''.CONST_DataDir.'/website/'.$sScript."');\n");
+ fwrite($rFile, 'require_once(\''.CONST_LibDir.'/website/'.$sScript."');\n");
fclose($rFile);
chmod(CONST_InstallDir.'/website/'.$sScript, 0755);
$sStyle = getSetting('IMPORT_STYLE');
if (in_array($sStyle, array('admin', 'street', 'address', 'full', 'extratags'))) {
- return CONST_DataDir.'/settings/import-'.$sStyle.'.style';
+ return CONST_ConfigDir.'/import-'.$sStyle.'.style';
}
return $sStyle;
--- /dev/null
+DROP TABLE IF EXISTS gb_postcode;
+CREATE TABLE gb_postcode (
+ id integer,
+ postcode character varying(9),
+ geometry geometry,
+ CONSTRAINT enforce_dims_geometry CHECK ((st_ndims(geometry) = 2)),
+ CONSTRAINT enforce_srid_geometry CHECK ((st_srid(geometry) = 4326))
+);
+
+DROP TABLE IF EXISTS us_postcode;
+CREATE TABLE us_postcode (
+ postcode text,
+ x double precision,
+ y double precision
+);
GRANT SELECT ON new_query_log TO "{www-user}" ;
GRANT SELECT ON TABLE country_name TO "{www-user}";
-GRANT SELECT ON TABLE gb_postcode TO "{www-user}";
-GRANT SELECT ON TABLE us_postcode TO "{www-user}";
drop table IF EXISTS word;
CREATE TABLE word (
Command-line interface to the Nominatim functions for import, update,
database administration and querying.
"""
-import datetime as dt
+import logging
import os
-import socket
import sys
-import time
import argparse
-import logging
from pathlib import Path
from .config import Configuration
-from .tools.exec_utils import run_legacy_script, run_api_script, run_php_server
-from .db.connection import connect
-from .db import status
+from .tools.exec_utils import run_legacy_script, run_php_server
from .errors import UsageError
+from . import clicmd
LOG = logging.getLogger()
-def _num_system_cpus():
- try:
- cpus = len(os.sched_getaffinity(0))
- except NotImplementedError:
- cpus = None
-
- return cpus or os.cpu_count()
-
class CommandlineParser:
""" Wraps some of the common functions for parsing the command line
self.parser.print_help()
return 1
- for arg in ('module_dir', 'osm2pgsql_path', 'phplib_dir', 'data_dir', 'phpcgi_path'):
+ for arg in ('module_dir', 'osm2pgsql_path', 'phplib_dir', 'sqllib_dir',
+ 'data_dir', 'config_dir', 'phpcgi_path'):
setattr(args, arg, Path(kwargs[arg]))
args.project_dir = Path(args.project_dir).resolve()
datefmt='%Y-%m-%d %H:%M:%S',
level=max(4 - args.verbose, 1) * 10)
- args.config = Configuration(args.project_dir, args.data_dir / 'settings')
+ args.config = Configuration(args.project_dir, args.config_dir)
log = logging.getLogger()
log.warning('Using project directory: %s', str(args.project_dir))
return 1
-def _osm2pgsql_options_from_args(args, default_cache, default_threads):
- """ Set up the stanadrd osm2pgsql from the command line arguments.
- """
- return dict(osm2pgsql=args.osm2pgsql_path,
- osm2pgsql_cache=args.osm2pgsql_cache or default_cache,
- osm2pgsql_style=args.config.get_import_style_file(),
- threads=args.threads or default_threads,
- dsn=args.config.get_libpq_dsn(),
- flatnode_file=args.config.FLATNODE_FILE)
-
##### Subcommand classes
#
# Each class needs to implement two functions: add_args() adds the CLI parameters
return run_legacy_script('specialphrases.php', '--wiki-import', nominatim_env=args)
-class UpdateReplication:
- """\
- Update the database using an online replication service.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Arguments for initialisation')
- group.add_argument('--init', action='store_true',
- help='Initialise the update process')
- group.add_argument('--no-update-functions', dest='update_functions',
- action='store_false',
- help="""Do not update the trigger function to
- support differential updates.""")
- group = parser.add_argument_group('Arguments for updates')
- group.add_argument('--check-for-updates', action='store_true',
- help='Check if new updates are available and exit')
- group.add_argument('--once', action='store_true',
- help="""Download and apply updates only once. When
- not set, updates are continuously applied""")
- group.add_argument('--no-index', action='store_false', dest='do_index',
- help="""Do not index the new data. Only applicable
- together with --once""")
- group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
- help='Size of cache to be used by osm2pgsql (in MB)')
- group = parser.add_argument_group('Download parameters')
- group.add_argument('--socket-timeout', dest='socket_timeout', type=int, default=60,
- help='Set timeout for file downloads.')
-
- @staticmethod
- def _init_replication(args):
- from .tools import replication, refresh
-
- socket.setdefaulttimeout(args.socket_timeout)
-
- LOG.warning("Initialising replication updates")
- conn = connect(args.config.get_libpq_dsn())
- replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
- if args.update_functions:
- LOG.warning("Create functions")
- refresh.create_functions(conn, args.config, args.data_dir,
- True, False)
- conn.close()
- return 0
-
-
- @staticmethod
- def _check_for_updates(args):
- from .tools import replication
-
- conn = connect(args.config.get_libpq_dsn())
- ret = replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL)
- conn.close()
- return ret
-
- @staticmethod
- def _report_update(batchdate, start_import, start_index):
- def round_time(delta):
- return dt.timedelta(seconds=int(delta.total_seconds()))
-
- end = dt.datetime.now(dt.timezone.utc)
- LOG.warning("Update completed. Import: %s. %sTotal: %s. Remaining backlog: %s.",
- round_time((start_index or end) - start_import),
- "Indexing: {} ".format(round_time(end - start_index))
- if start_index else '',
- round_time(end - start_import),
- round_time(end - batchdate))
-
- @staticmethod
- def _update(args):
- from .tools import replication
- from .indexer.indexer import Indexer
-
- params = _osm2pgsql_options_from_args(args, 2000, 1)
- params.update(base_url=args.config.REPLICATION_URL,
- update_interval=args.config.get_int('REPLICATION_UPDATE_INTERVAL'),
- import_file=args.project_dir / 'osmosischange.osc',
- max_diff_size=args.config.get_int('REPLICATION_MAX_DIFF'),
- indexed_only=not args.once)
-
- # Sanity check to not overwhelm the Geofabrik servers.
- if 'download.geofabrik.de'in params['base_url']\
- and params['update_interval'] < 86400:
- LOG.fatal("Update interval too low for download.geofabrik.de.\n"
- "Please check install documentation "
- "(https://nominatim.org/release-docs/latest/admin/Import-and-Update#"
- "setting-up-the-update-process).")
- raise UsageError("Invalid replication update interval setting.")
-
- if not args.once:
- if not args.do_index:
- LOG.fatal("Indexing cannot be disabled when running updates continuously.")
- raise UsageError("Bad argument '--no-index'.")
- recheck_interval = args.config.get_int('REPLICATION_RECHECK_INTERVAL')
-
- while True:
- conn = connect(args.config.get_libpq_dsn())
- start = dt.datetime.now(dt.timezone.utc)
- state = replication.update(conn, params)
- if state is not replication.UpdateState.NO_CHANGES:
- status.log_status(conn, start, 'import')
- batchdate, _, _ = status.get_status(conn)
- conn.close()
-
- if state is not replication.UpdateState.NO_CHANGES and args.do_index:
- index_start = dt.datetime.now(dt.timezone.utc)
- indexer = Indexer(args.config.get_libpq_dsn(),
- args.threads or 1)
- indexer.index_boundaries(0, 30)
- indexer.index_by_rank(0, 30)
-
- conn = connect(args.config.get_libpq_dsn())
- status.set_indexed(conn, True)
- status.log_status(conn, index_start, 'index')
- conn.close()
- else:
- index_start = None
-
- if LOG.isEnabledFor(logging.WARNING):
- UpdateReplication._report_update(batchdate, start, index_start)
-
- if args.once:
- break
-
- if state is replication.UpdateState.NO_CHANGES:
- LOG.warning("No new changes. Sleeping for %d sec.", recheck_interval)
- time.sleep(recheck_interval)
-
- return state.value
-
- @staticmethod
- def run(args):
- try:
- import osmium # pylint: disable=W0611
- except ModuleNotFoundError:
- LOG.fatal("pyosmium not installed. Replication functions not available.\n"
- "To install pyosmium via pip: pip3 install osmium")
- return 1
-
- if args.init:
- return UpdateReplication._init_replication(args)
-
- if args.check_for_updates:
- return UpdateReplication._check_for_updates(args)
-
- return UpdateReplication._update(args)
-
class UpdateAddData:
"""\
Add additional data from a file or an online source.
return run_legacy_script(*params, nominatim_env=args)
-class UpdateIndex:
- """\
- Reindex all new and modified data.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Filter arguments')
- group.add_argument('--boundaries-only', action='store_true',
- help="""Index only administrative boundaries.""")
- group.add_argument('--no-boundaries', action='store_true',
- help="""Index everything except administrative boundaries.""")
- group.add_argument('--minrank', '-r', type=int, metavar='RANK', default=0,
- help='Minimum/starting rank')
- group.add_argument('--maxrank', '-R', type=int, metavar='RANK', default=30,
- help='Maximum/finishing rank')
-
- @staticmethod
- def run(args):
- from .indexer.indexer import Indexer
-
- indexer = Indexer(args.config.get_libpq_dsn(),
- args.threads or _num_system_cpus() or 1)
-
- if not args.no_boundaries:
- indexer.index_boundaries(args.minrank, args.maxrank)
- if not args.boundaries_only:
- indexer.index_by_rank(args.minrank, args.maxrank)
-
- if not args.no_boundaries and not args.boundaries_only \
- and args.minrank == 0 and args.maxrank == 30:
- conn = connect(args.config.get_libpq_dsn())
- status.set_indexed(conn, True)
- conn.close()
-
- return 0
-
-
-class UpdateRefresh:
- """\
- Recompute auxiliary data used by the indexing process.
-
- These functions must not be run in parallel with other update commands.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Data arguments')
- group.add_argument('--postcodes', action='store_true',
- help='Update postcode centroid table')
- group.add_argument('--word-counts', action='store_true',
- help='Compute frequency of full-word search terms')
- group.add_argument('--address-levels', action='store_true',
- help='Reimport address level configuration')
- group.add_argument('--functions', action='store_true',
- help='Update the PL/pgSQL functions in the database')
- group.add_argument('--wiki-data', action='store_true',
- help='Update Wikipedia/data importance numbers.')
- group.add_argument('--importance', action='store_true',
- help='Recompute place importances (expensive!)')
- group.add_argument('--website', action='store_true',
- help='Refresh the directory that serves the scripts for the web API')
- group = parser.add_argument_group('Arguments for function refresh')
- group.add_argument('--no-diff-updates', action='store_false', dest='diffs',
- help='Do not enable code for propagating updates')
- group.add_argument('--enable-debug-statements', action='store_true',
- help='Enable debug warning statements in functions')
-
- @staticmethod
- def run(args):
- from .tools import refresh
-
- if args.postcodes:
- LOG.warning("Update postcodes centroid")
- conn = connect(args.config.get_libpq_dsn())
- refresh.update_postcodes(conn, args.data_dir)
- conn.close()
-
- if args.word_counts:
- LOG.warning('Recompute frequency of full-word search terms')
- conn = connect(args.config.get_libpq_dsn())
- refresh.recompute_word_counts(conn, args.data_dir)
- conn.close()
-
- if args.address_levels:
- cfg = Path(args.config.ADDRESS_LEVEL_CONFIG)
- LOG.warning('Updating address levels from %s', cfg)
- conn = connect(args.config.get_libpq_dsn())
- refresh.load_address_levels_from_file(conn, cfg)
- conn.close()
-
- if args.functions:
- LOG.warning('Create functions')
- conn = connect(args.config.get_libpq_dsn())
- refresh.create_functions(conn, args.config, args.data_dir,
- args.diffs, args.enable_debug_statements)
- conn.close()
-
- if args.wiki_data:
- run_legacy_script('setup.php', '--import-wikipedia-articles',
- nominatim_env=args, throw_on_fail=True)
- # Attention: importance MUST come after wiki data import.
- if args.importance:
- run_legacy_script('update.php', '--recompute-importance',
- nominatim_env=args, throw_on_fail=True)
- if args.website:
- run_legacy_script('setup.php', '--setup-website',
- nominatim_env=args, throw_on_fail=True)
-
- return 0
-
-
-class AdminCheckDatabase:
- """\
- Check that the database is complete and operational.
- """
-
- @staticmethod
- def add_args(parser):
- pass # No options
-
- @staticmethod
- def run(args):
- return run_legacy_script('check_import_finished.php', nominatim_env=args)
-
-
-class AdminWarm:
- """\
- Warm database caches for search and reverse queries.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Target arguments')
- group.add_argument('--search-only', action='store_const', dest='target',
- const='search',
- help="Only pre-warm tables for search queries")
- group.add_argument('--reverse-only', action='store_const', dest='target',
- const='reverse',
- help="Only pre-warm tables for reverse queries")
-
- @staticmethod
- def run(args):
- params = ['warm.php']
- if args.target == 'reverse':
- params.append('--reverse-only')
- if args.target == 'search':
- params.append('--search-only')
- return run_legacy_script(*params, nominatim_env=args)
-
-
class QueryExport:
"""\
Export addresses as CSV file from the database.
def run(args):
run_php_server(args.server, args.project_dir / 'website')
-STRUCTURED_QUERY = (
- ('street', 'housenumber and street'),
- ('city', 'city, town or village'),
- ('county', 'county'),
- ('state', 'state'),
- ('country', 'country'),
- ('postalcode', 'postcode')
-)
-
-EXTRADATA_PARAMS = (
- ('addressdetails', 'Include a breakdown of the address into elements.'),
- ('extratags', """Include additional information if available
- (e.g. wikipedia link, opening hours)."""),
- ('namedetails', 'Include a list of alternative names.')
-)
-
-DETAILS_SWITCHES = (
- ('addressdetails', 'Include a breakdown of the address into elements.'),
- ('keywords', 'Include a list of name keywords and address keywords.'),
- ('linkedplaces', 'Include a details of places that are linked with this one.'),
- ('hierarchy', 'Include details of places lower in the address hierarchy.'),
- ('group_hierarchy', 'Group the places by type.'),
- ('polygon_geojson', 'Include geometry of result.')
-)
-
-def _add_api_output_arguments(parser):
- group = parser.add_argument_group('Output arguments')
- group.add_argument('--format', default='jsonv2',
- choices=['xml', 'json', 'jsonv2', 'geojson', 'geocodejson'],
- help='Format of result')
- for name, desc in EXTRADATA_PARAMS:
- group.add_argument('--' + name, action='store_true', help=desc)
-
- group.add_argument('--lang', '--accept-language', metavar='LANGS',
- help='Preferred language order for presenting search results')
- group.add_argument('--polygon-output',
- choices=['geojson', 'kml', 'svg', 'text'],
- help='Output geometry of results as a GeoJSON, KML, SVG or WKT.')
- group.add_argument('--polygon-threshold', type=float, metavar='TOLERANCE',
- help="""Simplify output geometry.
- Parameter is difference tolerance in degrees.""")
-
-
-class APISearch:
- """\
- Execute API search query.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Query arguments')
- group.add_argument('--query',
- help='Free-form query string')
- for name, desc in STRUCTURED_QUERY:
- group.add_argument('--' + name, help='Structured query: ' + desc)
-
- _add_api_output_arguments(parser)
-
- group = parser.add_argument_group('Result limitation')
- group.add_argument('--countrycodes', metavar='CC,..',
- help='Limit search results to one or more countries.')
- group.add_argument('--exclude_place_ids', metavar='ID,..',
- help='List of search object to be excluded')
- group.add_argument('--limit', type=int,
- help='Limit the number of returned results')
- group.add_argument('--viewbox', metavar='X1,Y1,X2,Y2',
- help='Preferred area to find search results')
- group.add_argument('--bounded', action='store_true',
- help='Strictly restrict results to viewbox area')
-
- group = parser.add_argument_group('Other arguments')
- group.add_argument('--no-dedupe', action='store_false', dest='dedupe',
- help='Do not remove duplicates from the result list')
-
-
- @staticmethod
- def run(args):
- if args.query:
- params = dict(q=args.query)
- else:
- params = {k : getattr(args, k) for k, _ in STRUCTURED_QUERY if getattr(args, k)}
-
- for param, _ in EXTRADATA_PARAMS:
- if getattr(args, param):
- params[param] = '1'
- for param in ('format', 'countrycodes', 'exclude_place_ids', 'limit', 'viewbox'):
- if getattr(args, param):
- params[param] = getattr(args, param)
- if args.lang:
- params['accept-language'] = args.lang
- if args.polygon_output:
- params['polygon_' + args.polygon_output] = '1'
- if args.polygon_threshold:
- params['polygon_threshold'] = args.polygon_threshold
- if args.bounded:
- params['bounded'] = '1'
- if not args.dedupe:
- params['dedupe'] = '0'
-
- return run_api_script('search', args.project_dir,
- phpcgi_bin=args.phpcgi_path, params=params)
-
-class APIReverse:
- """\
- Execute API reverse query.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Query arguments')
- group.add_argument('--lat', type=float, required=True,
- help='Latitude of coordinate to look up (in WGS84)')
- group.add_argument('--lon', type=float, required=True,
- help='Longitude of coordinate to look up (in WGS84)')
- group.add_argument('--zoom', type=int,
- help='Level of detail required for the address')
-
- _add_api_output_arguments(parser)
-
-
- @staticmethod
- def run(args):
- params = dict(lat=args.lat, lon=args.lon)
- if args.zoom is not None:
- params['zoom'] = args.zoom
-
- for param, _ in EXTRADATA_PARAMS:
- if getattr(args, param):
- params[param] = '1'
- if args.format:
- params['format'] = args.format
- if args.lang:
- params['accept-language'] = args.lang
- if args.polygon_output:
- params['polygon_' + args.polygon_output] = '1'
- if args.polygon_threshold:
- params['polygon_threshold'] = args.polygon_threshold
-
- return run_api_script('reverse', args.project_dir,
- phpcgi_bin=args.phpcgi_path, params=params)
-
-
-class APILookup:
- """\
- Execute API reverse query.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Query arguments')
- group.add_argument('--id', metavar='OSMID',
- action='append', required=True, dest='ids',
- help='OSM id to lookup in format <NRW><id> (may be repeated)')
-
- _add_api_output_arguments(parser)
-
-
- @staticmethod
- def run(args):
- params = dict(osm_ids=','.join(args.ids))
-
- for param, _ in EXTRADATA_PARAMS:
- if getattr(args, param):
- params[param] = '1'
- if args.format:
- params['format'] = args.format
- if args.lang:
- params['accept-language'] = args.lang
- if args.polygon_output:
- params['polygon_' + args.polygon_output] = '1'
- if args.polygon_threshold:
- params['polygon_threshold'] = args.polygon_threshold
-
- return run_api_script('lookup', args.project_dir,
- phpcgi_bin=args.phpcgi_path, params=params)
-
-
-class APIDetails:
- """\
- Execute API lookup query.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('Query arguments')
- objs = group.add_mutually_exclusive_group(required=True)
- objs.add_argument('--node', '-n', type=int,
- help="Look up the OSM node with the given ID.")
- objs.add_argument('--way', '-w', type=int,
- help="Look up the OSM way with the given ID.")
- objs.add_argument('--relation', '-r', type=int,
- help="Look up the OSM relation with the given ID.")
- objs.add_argument('--place_id', '-p', type=int,
- help='Database internal identifier of the OSM object to look up.')
- group.add_argument('--class', dest='object_class',
- help="""Class type to disambiguated multiple entries
- of the same object.""")
-
- group = parser.add_argument_group('Output arguments')
- for name, desc in DETAILS_SWITCHES:
- group.add_argument('--' + name, action='store_true', help=desc)
- group.add_argument('--lang', '--accept-language', metavar='LANGS',
- help='Preferred language order for presenting search results')
-
- @staticmethod
- def run(args):
- if args.node:
- params = dict(osmtype='N', osmid=args.node)
- elif args.way:
- params = dict(osmtype='W', osmid=args.node)
- elif args.relation:
- params = dict(osmtype='R', osmid=args.node)
- else:
- params = dict(place_id=args.place_id)
- if args.object_class:
- params['class'] = args.object_class
- for name, _ in DETAILS_SWITCHES:
- params[name] = '1' if getattr(args, name) else '0'
-
- return run_api_script('details', args.project_dir,
- phpcgi_bin=args.phpcgi_path, params=params)
-
-
-class APIStatus:
- """\
- Execute API status query.
- """
-
- @staticmethod
- def add_args(parser):
- group = parser.add_argument_group('API parameters')
- group.add_argument('--format', default='text', choices=['text', 'json'],
- help='Format of result')
-
- @staticmethod
- def run(args):
- return run_api_script('status', args.project_dir,
- phpcgi_bin=args.phpcgi_path,
- params=dict(format=args.format))
-
def nominatim(**kwargs):
"""\
parser.add_subcommand('import', SetupAll)
parser.add_subcommand('freeze', SetupFreeze)
- parser.add_subcommand('replication', UpdateReplication)
-
- parser.add_subcommand('check-database', AdminCheckDatabase)
- parser.add_subcommand('warm', AdminWarm)
+ parser.add_subcommand('replication', clicmd.UpdateReplication)
parser.add_subcommand('special-phrases', SetupSpecialPhrases)
parser.add_subcommand('add-data', UpdateAddData)
- parser.add_subcommand('index', UpdateIndex)
- parser.add_subcommand('refresh', UpdateRefresh)
+ parser.add_subcommand('index', clicmd.UpdateIndex)
+ parser.add_subcommand('refresh', clicmd.UpdateRefresh)
+
+ parser.add_subcommand('admin', clicmd.AdminFuncs)
parser.add_subcommand('export', QueryExport)
parser.add_subcommand('serve', AdminServe)
if kwargs.get('phpcgi_path'):
- parser.add_subcommand('search', APISearch)
- parser.add_subcommand('reverse', APIReverse)
- parser.add_subcommand('lookup', APILookup)
- parser.add_subcommand('details', APIDetails)
- parser.add_subcommand('status', APIStatus)
+ parser.add_subcommand('search', clicmd.APISearch)
+ parser.add_subcommand('reverse', clicmd.APIReverse)
+ parser.add_subcommand('lookup', clicmd.APILookup)
+ parser.add_subcommand('details', clicmd.APIDetails)
+ parser.add_subcommand('status', clicmd.APIStatus)
else:
parser.parser.epilog = 'php-cgi not found. Query commands not available.'
--- /dev/null
+"""
+Subcommand definitions for the command-line tool.
+"""
+
+from .replication import UpdateReplication
+from .api import APISearch, APIReverse, APILookup, APIDetails, APIStatus
+from .index import UpdateIndex
+from .refresh import UpdateRefresh
+from .admin import AdminFuncs
--- /dev/null
+"""
+Implementation of the 'admin' subcommand.
+"""
+from ..tools.exec_utils import run_legacy_script
+from ..db.connection import connect
+
+# Do not repeat documentation of subcommand classes.
+# pylint: disable=C0111
+# Using non-top-level imports to avoid eventually unused imports.
+# pylint: disable=E0012,C0415
+
+class AdminFuncs:
+ """\
+ Analyse and maintain the database.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('Admin task arguments')
+ group.add_argument('--warm', action='store_true',
+ help='Warm database caches for search and reverse queries.')
+ group.add_argument('--check-database', action='store_true',
+ help='Check that the database is complete and operational.')
+ group.add_argument('--analyse-indexing', action='store_true',
+ help='Print performance analysis of the indexing process.')
+ group = parser.add_argument_group('Arguments for cache warming')
+ group.add_argument('--search-only', action='store_const', dest='target',
+ const='search',
+ help="Only pre-warm tables for search queries")
+ group.add_argument('--reverse-only', action='store_const', dest='target',
+ const='reverse',
+ help="Only pre-warm tables for reverse queries")
+ group = parser.add_argument_group('Arguments for index anaysis')
+ mgroup = group.add_mutually_exclusive_group()
+ mgroup.add_argument('--osm-id', type=str,
+ help='Analyse indexing of the given OSM object')
+ mgroup.add_argument('--place-id', type=int,
+ help='Analyse indexing of the given Nominatim object')
+
+ @staticmethod
+ def run(args):
+ from ..tools import admin
+ if args.warm:
+ AdminFuncs._warm(args)
+
+ if args.check_database:
+ run_legacy_script('check_import_finished.php', nominatim_env=args)
+
+ if args.analyse_indexing:
+ conn = connect(args.config.get_libpq_dsn())
+ admin.analyse_indexing(conn, osm_id=args.osm_id, place_id=args.place_id)
+ conn.close()
+
+ return 0
+
+
+ @staticmethod
+ def _warm(args):
+ params = ['warm.php']
+ if args.target == 'reverse':
+ params.append('--reverse-only')
+ if args.target == 'search':
+ params.append('--search-only')
+ return run_legacy_script(*params, nominatim_env=args)
--- /dev/null
+"""
+Subcommand definitions for API calls from the command line.
+"""
+import logging
+
+from ..tools.exec_utils import run_api_script
+
+# Do not repeat documentation of subcommand classes.
+# pylint: disable=C0111
+
+LOG = logging.getLogger()
+
+STRUCTURED_QUERY = (
+ ('street', 'housenumber and street'),
+ ('city', 'city, town or village'),
+ ('county', 'county'),
+ ('state', 'state'),
+ ('country', 'country'),
+ ('postalcode', 'postcode')
+)
+
+EXTRADATA_PARAMS = (
+ ('addressdetails', 'Include a breakdown of the address into elements.'),
+ ('extratags', """Include additional information if available
+ (e.g. wikipedia link, opening hours)."""),
+ ('namedetails', 'Include a list of alternative names.')
+)
+
+DETAILS_SWITCHES = (
+ ('addressdetails', 'Include a breakdown of the address into elements.'),
+ ('keywords', 'Include a list of name keywords and address keywords.'),
+ ('linkedplaces', 'Include a details of places that are linked with this one.'),
+ ('hierarchy', 'Include details of places lower in the address hierarchy.'),
+ ('group_hierarchy', 'Group the places by type.'),
+ ('polygon_geojson', 'Include geometry of result.')
+)
+
+def _add_api_output_arguments(parser):
+ group = parser.add_argument_group('Output arguments')
+ group.add_argument('--format', default='jsonv2',
+ choices=['xml', 'json', 'jsonv2', 'geojson', 'geocodejson'],
+ help='Format of result')
+ for name, desc in EXTRADATA_PARAMS:
+ group.add_argument('--' + name, action='store_true', help=desc)
+
+ group.add_argument('--lang', '--accept-language', metavar='LANGS',
+ help='Preferred language order for presenting search results')
+ group.add_argument('--polygon-output',
+ choices=['geojson', 'kml', 'svg', 'text'],
+ help='Output geometry of results as a GeoJSON, KML, SVG or WKT.')
+ group.add_argument('--polygon-threshold', type=float, metavar='TOLERANCE',
+ help="""Simplify output geometry.
+ Parameter is difference tolerance in degrees.""")
+
+
+class APISearch:
+ """\
+ Execute API search query.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('Query arguments')
+ group.add_argument('--query',
+ help='Free-form query string')
+ for name, desc in STRUCTURED_QUERY:
+ group.add_argument('--' + name, help='Structured query: ' + desc)
+
+ _add_api_output_arguments(parser)
+
+ group = parser.add_argument_group('Result limitation')
+ group.add_argument('--countrycodes', metavar='CC,..',
+ help='Limit search results to one or more countries.')
+ group.add_argument('--exclude_place_ids', metavar='ID,..',
+ help='List of search object to be excluded')
+ group.add_argument('--limit', type=int,
+ help='Limit the number of returned results')
+ group.add_argument('--viewbox', metavar='X1,Y1,X2,Y2',
+ help='Preferred area to find search results')
+ group.add_argument('--bounded', action='store_true',
+ help='Strictly restrict results to viewbox area')
+
+ group = parser.add_argument_group('Other arguments')
+ group.add_argument('--no-dedupe', action='store_false', dest='dedupe',
+ help='Do not remove duplicates from the result list')
+
+
+ @staticmethod
+ def run(args):
+ if args.query:
+ params = dict(q=args.query)
+ else:
+ params = {k : getattr(args, k) for k, _ in STRUCTURED_QUERY if getattr(args, k)}
+
+ for param, _ in EXTRADATA_PARAMS:
+ if getattr(args, param):
+ params[param] = '1'
+ for param in ('format', 'countrycodes', 'exclude_place_ids', 'limit', 'viewbox'):
+ if getattr(args, param):
+ params[param] = getattr(args, param)
+ if args.lang:
+ params['accept-language'] = args.lang
+ if args.polygon_output:
+ params['polygon_' + args.polygon_output] = '1'
+ if args.polygon_threshold:
+ params['polygon_threshold'] = args.polygon_threshold
+ if args.bounded:
+ params['bounded'] = '1'
+ if not args.dedupe:
+ params['dedupe'] = '0'
+
+ return run_api_script('search', args.project_dir,
+ phpcgi_bin=args.phpcgi_path, params=params)
+
+class APIReverse:
+ """\
+ Execute API reverse query.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('Query arguments')
+ group.add_argument('--lat', type=float, required=True,
+ help='Latitude of coordinate to look up (in WGS84)')
+ group.add_argument('--lon', type=float, required=True,
+ help='Longitude of coordinate to look up (in WGS84)')
+ group.add_argument('--zoom', type=int,
+ help='Level of detail required for the address')
+
+ _add_api_output_arguments(parser)
+
+
+ @staticmethod
+ def run(args):
+ params = dict(lat=args.lat, lon=args.lon)
+ if args.zoom is not None:
+ params['zoom'] = args.zoom
+
+ for param, _ in EXTRADATA_PARAMS:
+ if getattr(args, param):
+ params[param] = '1'
+ if args.format:
+ params['format'] = args.format
+ if args.lang:
+ params['accept-language'] = args.lang
+ if args.polygon_output:
+ params['polygon_' + args.polygon_output] = '1'
+ if args.polygon_threshold:
+ params['polygon_threshold'] = args.polygon_threshold
+
+ return run_api_script('reverse', args.project_dir,
+ phpcgi_bin=args.phpcgi_path, params=params)
+
+
+class APILookup:
+ """\
+ Execute API reverse query.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('Query arguments')
+ group.add_argument('--id', metavar='OSMID',
+ action='append', required=True, dest='ids',
+ help='OSM id to lookup in format <NRW><id> (may be repeated)')
+
+ _add_api_output_arguments(parser)
+
+
+ @staticmethod
+ def run(args):
+ params = dict(osm_ids=','.join(args.ids))
+
+ for param, _ in EXTRADATA_PARAMS:
+ if getattr(args, param):
+ params[param] = '1'
+ if args.format:
+ params['format'] = args.format
+ if args.lang:
+ params['accept-language'] = args.lang
+ if args.polygon_output:
+ params['polygon_' + args.polygon_output] = '1'
+ if args.polygon_threshold:
+ params['polygon_threshold'] = args.polygon_threshold
+
+ return run_api_script('lookup', args.project_dir,
+ phpcgi_bin=args.phpcgi_path, params=params)
+
+
+class APIDetails:
+ """\
+ Execute API lookup query.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('Query arguments')
+ objs = group.add_mutually_exclusive_group(required=True)
+ objs.add_argument('--node', '-n', type=int,
+ help="Look up the OSM node with the given ID.")
+ objs.add_argument('--way', '-w', type=int,
+ help="Look up the OSM way with the given ID.")
+ objs.add_argument('--relation', '-r', type=int,
+ help="Look up the OSM relation with the given ID.")
+ objs.add_argument('--place_id', '-p', type=int,
+ help='Database internal identifier of the OSM object to look up.')
+ group.add_argument('--class', dest='object_class',
+ help="""Class type to disambiguated multiple entries
+ of the same object.""")
+
+ group = parser.add_argument_group('Output arguments')
+ for name, desc in DETAILS_SWITCHES:
+ group.add_argument('--' + name, action='store_true', help=desc)
+ group.add_argument('--lang', '--accept-language', metavar='LANGS',
+ help='Preferred language order for presenting search results')
+
+ @staticmethod
+ def run(args):
+ if args.node:
+ params = dict(osmtype='N', osmid=args.node)
+ elif args.way:
+ params = dict(osmtype='W', osmid=args.node)
+ elif args.relation:
+ params = dict(osmtype='R', osmid=args.node)
+ else:
+ params = dict(place_id=args.place_id)
+ if args.object_class:
+ params['class'] = args.object_class
+ for name, _ in DETAILS_SWITCHES:
+ params[name] = '1' if getattr(args, name) else '0'
+
+ return run_api_script('details', args.project_dir,
+ phpcgi_bin=args.phpcgi_path, params=params)
+
+
+class APIStatus:
+ """\
+ Execute API status query.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('API parameters')
+ group.add_argument('--format', default='text', choices=['text', 'json'],
+ help='Format of result')
+
+ @staticmethod
+ def run(args):
+ return run_api_script('status', args.project_dir,
+ phpcgi_bin=args.phpcgi_path,
+ params=dict(format=args.format))
--- /dev/null
+"""
+Implementation of the 'index' subcommand.
+"""
+import os
+
+from ..db import status
+from ..db.connection import connect
+
+# Do not repeat documentation of subcommand classes.
+# pylint: disable=C0111
+# Using non-top-level imports to avoid eventually unused imports.
+# pylint: disable=E0012,C0415
+
+def _num_system_cpus():
+ try:
+ cpus = len(os.sched_getaffinity(0))
+ except NotImplementedError:
+ cpus = None
+
+ return cpus or os.cpu_count()
+
+
+class UpdateIndex:
+ """\
+ Reindex all new and modified data.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('Filter arguments')
+ group.add_argument('--boundaries-only', action='store_true',
+ help="""Index only administrative boundaries.""")
+ group.add_argument('--no-boundaries', action='store_true',
+ help="""Index everything except administrative boundaries.""")
+ group.add_argument('--minrank', '-r', type=int, metavar='RANK', default=0,
+ help='Minimum/starting rank')
+ group.add_argument('--maxrank', '-R', type=int, metavar='RANK', default=30,
+ help='Maximum/finishing rank')
+
+ @staticmethod
+ def run(args):
+ from ..indexer.indexer import Indexer
+
+ indexer = Indexer(args.config.get_libpq_dsn(),
+ args.threads or _num_system_cpus() or 1)
+
+ if not args.no_boundaries:
+ indexer.index_boundaries(args.minrank, args.maxrank)
+ if not args.boundaries_only:
+ indexer.index_by_rank(args.minrank, args.maxrank)
+
+ if not args.no_boundaries and not args.boundaries_only \
+ and args.minrank == 0 and args.maxrank == 30:
+ conn = connect(args.config.get_libpq_dsn())
+ status.set_indexed(conn, True)
+ conn.close()
+
+ return 0
--- /dev/null
+"""
+Implementation of 'refresh' subcommand.
+"""
+import logging
+from pathlib import Path
+
+from ..db.connection import connect
+from ..tools.exec_utils import run_legacy_script
+
+# Do not repeat documentation of subcommand classes.
+# pylint: disable=C0111
+# Using non-top-level imports to avoid eventually unused imports.
+# pylint: disable=E0012,C0415
+
+LOG = logging.getLogger()
+
+class UpdateRefresh:
+ """\
+ Recompute auxiliary data used by the indexing process.
+
+ These functions must not be run in parallel with other update commands.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('Data arguments')
+ group.add_argument('--postcodes', action='store_true',
+ help='Update postcode centroid table')
+ group.add_argument('--word-counts', action='store_true',
+ help='Compute frequency of full-word search terms')
+ group.add_argument('--address-levels', action='store_true',
+ help='Reimport address level configuration')
+ group.add_argument('--functions', action='store_true',
+ help='Update the PL/pgSQL functions in the database')
+ group.add_argument('--wiki-data', action='store_true',
+ help='Update Wikipedia/data importance numbers.')
+ group.add_argument('--importance', action='store_true',
+ help='Recompute place importances (expensive!)')
+ group.add_argument('--website', action='store_true',
+ help='Refresh the directory that serves the scripts for the web API')
+ group = parser.add_argument_group('Arguments for function refresh')
+ group.add_argument('--no-diff-updates', action='store_false', dest='diffs',
+ help='Do not enable code for propagating updates')
+ group.add_argument('--enable-debug-statements', action='store_true',
+ help='Enable debug warning statements in functions')
+
+ @staticmethod
+ def run(args):
+ from ..tools import refresh
+
+ if args.postcodes:
+ LOG.warning("Update postcodes centroid")
+ conn = connect(args.config.get_libpq_dsn())
+ refresh.update_postcodes(conn, args.sqllib_dir)
+ conn.close()
+
+ if args.word_counts:
+ LOG.warning('Recompute frequency of full-word search terms')
+ conn = connect(args.config.get_libpq_dsn())
+ refresh.recompute_word_counts(conn, args.sqllib_dir)
+ conn.close()
+
+ if args.address_levels:
+ cfg = Path(args.config.ADDRESS_LEVEL_CONFIG)
+ LOG.warning('Updating address levels from %s', cfg)
+ conn = connect(args.config.get_libpq_dsn())
+ refresh.load_address_levels_from_file(conn, cfg)
+ conn.close()
+
+ if args.functions:
+ LOG.warning('Create functions')
+ conn = connect(args.config.get_libpq_dsn())
+ refresh.create_functions(conn, args.config, args.sqllib_dir,
+ args.diffs, args.enable_debug_statements)
+ conn.close()
+
+ if args.wiki_data:
+ run_legacy_script('setup.php', '--import-wikipedia-articles',
+ nominatim_env=args, throw_on_fail=True)
+ # Attention: importance MUST come after wiki data import.
+ if args.importance:
+ run_legacy_script('update.php', '--recompute-importance',
+ nominatim_env=args, throw_on_fail=True)
+ if args.website:
+ run_legacy_script('setup.php', '--setup-website',
+ nominatim_env=args, throw_on_fail=True)
+
+ return 0
--- /dev/null
+"""
+Implementation of the 'replication' sub-command.
+"""
+import datetime as dt
+import logging
+import socket
+import time
+
+from ..db import status
+from ..db.connection import connect
+from ..errors import UsageError
+
+LOG = logging.getLogger()
+
+# Do not repeat documentation of subcommand classes.
+# pylint: disable=C0111
+# Using non-top-level imports to make pyosmium optional for replication only.
+# pylint: disable=E0012,C0415
+
+def _osm2pgsql_options_from_args(args, default_cache, default_threads):
+ """ Set up the standard osm2pgsql from the command line arguments.
+ """
+ return dict(osm2pgsql=args.osm2pgsql_path,
+ osm2pgsql_cache=args.osm2pgsql_cache or default_cache,
+ osm2pgsql_style=args.config.get_import_style_file(),
+ threads=args.threads or default_threads,
+ dsn=args.config.get_libpq_dsn(),
+ flatnode_file=args.config.FLATNODE_FILE)
+
+
+class UpdateReplication:
+ """\
+ Update the database using an online replication service.
+ """
+
+ @staticmethod
+ def add_args(parser):
+ group = parser.add_argument_group('Arguments for initialisation')
+ group.add_argument('--init', action='store_true',
+ help='Initialise the update process')
+ group.add_argument('--no-update-functions', dest='update_functions',
+ action='store_false',
+ help="""Do not update the trigger function to
+ support differential updates.""")
+ group = parser.add_argument_group('Arguments for updates')
+ group.add_argument('--check-for-updates', action='store_true',
+ help='Check if new updates are available and exit')
+ group.add_argument('--once', action='store_true',
+ help="""Download and apply updates only once. When
+ not set, updates are continuously applied""")
+ group.add_argument('--no-index', action='store_false', dest='do_index',
+ help="""Do not index the new data. Only applicable
+ together with --once""")
+ group.add_argument('--osm2pgsql-cache', metavar='SIZE', type=int,
+ help='Size of cache to be used by osm2pgsql (in MB)')
+ group = parser.add_argument_group('Download parameters')
+ group.add_argument('--socket-timeout', dest='socket_timeout', type=int, default=60,
+ help='Set timeout for file downloads.')
+
+ @staticmethod
+ def _init_replication(args):
+ from ..tools import replication, refresh
+
+ LOG.warning("Initialising replication updates")
+ conn = connect(args.config.get_libpq_dsn())
+ replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
+ if args.update_functions:
+ LOG.warning("Create functions")
+ refresh.create_functions(conn, args.config, args.sqllib_dir,
+ True, False)
+ conn.close()
+ return 0
+
+
+ @staticmethod
+ def _check_for_updates(args):
+ from ..tools import replication
+
+ conn = connect(args.config.get_libpq_dsn())
+ ret = replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL)
+ conn.close()
+ return ret
+
+ @staticmethod
+ def _report_update(batchdate, start_import, start_index):
+ def round_time(delta):
+ return dt.timedelta(seconds=int(delta.total_seconds()))
+
+ end = dt.datetime.now(dt.timezone.utc)
+ LOG.warning("Update completed. Import: %s. %sTotal: %s. Remaining backlog: %s.",
+ round_time((start_index or end) - start_import),
+ "Indexing: {} ".format(round_time(end - start_index))
+ if start_index else '',
+ round_time(end - start_import),
+ round_time(end - batchdate))
+
+ @staticmethod
+ def _update(args):
+ from ..tools import replication
+ from ..indexer.indexer import Indexer
+
+ params = _osm2pgsql_options_from_args(args, 2000, 1)
+ params.update(base_url=args.config.REPLICATION_URL,
+ update_interval=args.config.get_int('REPLICATION_UPDATE_INTERVAL'),
+ import_file=args.project_dir / 'osmosischange.osc',
+ max_diff_size=args.config.get_int('REPLICATION_MAX_DIFF'),
+ indexed_only=not args.once)
+
+ # Sanity check to not overwhelm the Geofabrik servers.
+ if 'download.geofabrik.de'in params['base_url']\
+ and params['update_interval'] < 86400:
+ LOG.fatal("Update interval too low for download.geofabrik.de.\n"
+ "Please check install documentation "
+ "(https://nominatim.org/release-docs/latest/admin/Import-and-Update#"
+ "setting-up-the-update-process).")
+ raise UsageError("Invalid replication update interval setting.")
+
+ if not args.once:
+ if not args.do_index:
+ LOG.fatal("Indexing cannot be disabled when running updates continuously.")
+ raise UsageError("Bad argument '--no-index'.")
+ recheck_interval = args.config.get_int('REPLICATION_RECHECK_INTERVAL')
+
+ while True:
+ conn = connect(args.config.get_libpq_dsn())
+ start = dt.datetime.now(dt.timezone.utc)
+ state = replication.update(conn, params)
+ if state is not replication.UpdateState.NO_CHANGES:
+ status.log_status(conn, start, 'import')
+ batchdate, _, _ = status.get_status(conn)
+ conn.close()
+
+ if state is not replication.UpdateState.NO_CHANGES and args.do_index:
+ index_start = dt.datetime.now(dt.timezone.utc)
+ indexer = Indexer(args.config.get_libpq_dsn(),
+ args.threads or 1)
+ indexer.index_boundaries(0, 30)
+ indexer.index_by_rank(0, 30)
+
+ conn = connect(args.config.get_libpq_dsn())
+ status.set_indexed(conn, True)
+ status.log_status(conn, index_start, 'index')
+ conn.close()
+ else:
+ index_start = None
+
+ if LOG.isEnabledFor(logging.WARNING):
+ UpdateReplication._report_update(batchdate, start, index_start)
+
+ if args.once:
+ break
+
+ if state is replication.UpdateState.NO_CHANGES:
+ LOG.warning("No new changes. Sleeping for %d sec.", recheck_interval)
+ time.sleep(recheck_interval)
+
+
+ @staticmethod
+ def run(args):
+ socket.setdefaulttimeout(args.socket_timeout)
+
+ if args.init:
+ return UpdateReplication._init_replication(args)
+
+ if args.check_for_updates:
+ return UpdateReplication._check_for_updates(args)
+
+ UpdateReplication._update(args)
+ return 0
--- /dev/null
+"""
+Functions for database analysis and maintenance.
+"""
+import logging
+
+from ..errors import UsageError
+
+LOG = logging.getLogger()
+
+def analyse_indexing(conn, osm_id=None, place_id=None):
+ """ Analyse indexing of a single Nominatim object.
+ """
+ with conn.cursor() as cur:
+ if osm_id:
+ osm_type = osm_id[0].upper()
+ if osm_type not in 'NWR' or not osm_id[1:].isdigit():
+ LOG.fatal('OSM ID must be of form <N|W|R><id>. Got: %s', osm_id)
+ raise UsageError("OSM ID parameter badly formatted")
+ cur.execute('SELECT place_id FROM placex WHERE osm_type = %s AND osm_id = %s',
+ (osm_type, osm_id[1:]))
+
+ if cur.rowcount < 1:
+ LOG.fatal("OSM object %s not found in database.", osm_id)
+ raise UsageError("OSM object not found")
+
+ place_id = cur.fetchone()[0]
+
+ if place_id is None:
+ LOG.fatal("No OSM object given to index.")
+ raise UsageError("OSM object not found")
+
+ cur.execute("update placex set indexed_status = 2 where place_id = %s",
+ (place_id, ))
+
+ cur.execute("""SET auto_explain.log_min_duration = '0';
+ SET auto_explain.log_analyze = 'true';
+ SET auto_explain.log_nested_statements = 'true';
+ LOAD 'auto_explain';
+ SET client_min_messages = LOG;
+ SET log_min_messages = FATAL""")
+
+ cur.execute("update placex set indexed_status = 0 where place_id = %s",
+ (place_id, ))
+
+ # we do not want to keep the results
+ conn.rollback()
+
+ for msg in conn.notices:
+ print(msg)
env = nominatim_env.config.get_os_env()
env['NOMINATIM_DATADIR'] = str(nominatim_env.data_dir)
- env['NOMINATIM_BINDIR'] = str(nominatim_env.data_dir / 'utils')
+ env['NOMINATIM_SQLDIR'] = str(nominatim_env.sqllib_dir)
+ env['NOMINATIM_CONFIGDIR'] = str(nominatim_env.config_dir)
env['NOMINATIM_DATABASE_MODULE_SRC_PATH'] = nominatim_env.module_dir
if not env['NOMINATIM_OSM2PGSQL_BINARY']:
env['NOMINATIM_OSM2PGSQL_BINARY'] = nominatim_env.osm2pgsql_path
from ..db.utils import execute_file
-def update_postcodes(conn, datadir):
+def update_postcodes(conn, sql_dir):
""" Recalculate postcode centroids and add, remove and update entries in the
location_postcode table. `conn` is an opne connection to the database.
"""
- execute_file(conn, datadir / 'sql' / 'update-postcodes.sql')
+ execute_file(conn, sql_dir / 'update-postcodes.sql')
-def recompute_word_counts(conn, datadir):
+def recompute_word_counts(conn, sql_dir):
""" Compute the frequency of full-word search terms.
"""
- execute_file(conn, datadir / 'sql' / 'words_from_search_name.sql')
+ execute_file(conn, sql_dir / 'words_from_search_name.sql')
def _add_address_level_rows_from_entry(rows, entry):
return replace_partition_string(sql, sorted(partitions))
-def create_functions(conn, config, data_dir,
+def create_functions(conn, config, sql_dir,
enable_diff_updates=True, enable_debug=False):
""" (Re)create the PL/pgSQL functions.
"""
- sql_dir = data_dir / 'sql'
-
sql = _get_standard_function_sql(conn, config, sql_dir,
enable_diff_updates, enable_debug)
sql += _get_partition_function_sql(conn, sql_dir)
import logging
import time
-from osmium.replication.server import ReplicationServer
-from osmium import WriteHandler
-
from ..db import status
from .exec_utils import run_osm2pgsql
from ..errors import UsageError
+try:
+ from osmium.replication.server import ReplicationServer
+ from osmium import WriteHandler
+except ModuleNotFoundError as exc:
+ logging.getLogger().fatal("pyosmium not installed. Replication functions not available.\n"
+ "To install pyosmium via pip: pip3 install osmium")
+ raise UsageError("replication tools not available") from exc
+
LOG = logging.getLogger()
def init_replication(conn, base_url):
-Subproject commit a65ab49f777b9785726117971d3a4140436d70aa
+Subproject commit 497476d56f7c1fcbbdb95b363293de6ce0feac00
+++ /dev/null
-CREATE OR REPLACE FUNCTION hstore(k text, v text) RETURNS HSTORE
- AS $$
-DECLARE
-BEGIN
- RETURN k => v;
-END;
-$$
-LANGUAGE plpgsql IMMUTABLE;
}
$covfilter = new SebastianBergmann\CodeCoverage\Filter();
-$covfilter->addDirectoryToWhitelist($_SERVER['COV_PHP_DIR'].'/lib');
+$covfilter->addDirectoryToWhitelist($_SERVER['COV_PHP_DIR'].'/lib-php');
$covfilter->addDirectoryToWhitelist($_SERVER['COV_PHP_DIR'].'/website');
$coverage = new SebastianBergmann\CodeCoverage\CodeCoverage(null, $covfilter);
$coverage->start($_SERVER['COV_TEST_NAME']);
self.test_env['NOMINATIM_FLATNODE_FILE'] = ''
self.test_env['NOMINATIM_IMPORT_STYLE'] = 'full'
self.test_env['NOMINATIM_USE_US_TIGER_DATA'] = 'yes'
- self.test_env['NOMINATIM_DATADIR'] = self.src_dir
- self.test_env['NOMINATIM_BINDIR'] = self.src_dir / 'utils'
- self.test_env['NOMINATIM_DATABASE_MODULE_PATH'] = self.build_dir / 'module'
+ self.test_env['NOMINATIM_DATADIR'] = self.src_dir / 'data'
+ self.test_env['NOMINATIM_SQLDIR'] = self.src_dir / 'lib-sql'
+ self.test_env['NOMINATIM_CONFIGDIR'] = self.src_dir / 'settings'
+ self.test_env['NOMINATIM_DATABASE_MODULE_SRC_PATH'] = self.build_dir / 'module'
self.test_env['NOMINATIM_OSM2PGSQL_BINARY'] = self.build_dir / 'osm2pgsql' / 'osm2pgsql'
self.test_env['NOMINATIM_NOMINATIM_TOOL'] = self.build_dir / 'nominatim'
if self.server_module_path:
self.test_env['NOMINATIM_DATABASE_MODULE_PATH'] = self.server_module_path
+ else:
+ # avoid module being copied into the temporary environment
+ self.test_env['NOMINATIM_DATABASE_MODULE_PATH'] = self.build_dir / 'module'
if self.website_dir is not None:
self.website_dir.cleanup()
""" Run one of the Nominatim utility scripts with the given arguments.
"""
cmd = ['/usr/bin/env', 'php', '-Cq']
- cmd.append((Path(self.src_dir) / 'lib' / 'admin' / '{}.php'.format(script)).resolve())
+ cmd.append((Path(self.src_dir) / 'lib-php' / 'admin' / '{}.php'.format(script)).resolve())
cmd.extend(['--' + x for x in args])
for k, v in kwargs.items():
cmd.extend(('--' + k.replace('_', '-'), str(v)))
""" Query directly via PHP script.
"""
cmd = ['/usr/bin/env', 'php']
- cmd.append(context.nominatim.src_dir / 'lib' / 'admin' / 'query.php')
+ cmd.append(context.nominatim.src_dir / 'lib-php' / 'admin' / 'query.php')
if query:
cmd.extend(['--search', query])
# add more parameters in table form
<?php
- @define('CONST_LibDir', '../../lib');
+ @define('CONST_LibDir', '../../lib-php');
@define('CONST_DataDir', '../..');
@define('CONST_Debug', true);
</testsuites>
<filter>
<whitelist>
- <directory>../../lib/</directory>
+ <directory>../../lib-php/</directory>
</whitelist>
</filter>
geom or 'SRID=4326;POINT(0 0 )'))
return _insert
+
+@pytest.fixture
+def placex_table(temp_db_with_extensions, temp_db_conn):
+ """ Create an empty version of the place table.
+ """
+ with temp_db_conn.cursor() as cur:
+ cur.execute("""CREATE TABLE placex (
+ place_id BIGINT NOT NULL,
+ parent_place_id BIGINT,
+ linked_place_id BIGINT,
+ importance FLOAT,
+ indexed_date TIMESTAMP,
+ geometry_sector INTEGER,
+ rank_address SMALLINT,
+ rank_search SMALLINT,
+ partition SMALLINT,
+ indexed_status SMALLINT,
+ osm_id int8,
+ osm_type char(1),
+ class text,
+ type text,
+ name hstore,
+ admin_level smallint,
+ address hstore,
+ extratags hstore,
+ geometry Geometry(Geometry,4326),
+ wikipedia TEXT,
+ country_code varchar(2),
+ housenumber TEXT,
+ postcode TEXT,
+ centroid GEOMETRY(Geometry, 4326))
+ """)
+ temp_db_conn.commit()
+
+
+
import time
import nominatim.cli
+import nominatim.clicmd.api
+import nominatim.clicmd.refresh
+import nominatim.clicmd.admin
import nominatim.indexer.indexer
import nominatim.tools.refresh
import nominatim.tools.replication
def call_nominatim(*args):
return nominatim.cli.nominatim(module_dir='build/module',
osm2pgsql_path='build/osm2pgsql/osm2pgsql',
- phplib_dir='lib',
+ phplib_dir='lib-php',
data_dir='.',
phpcgi_path='/usr/bin/php-cgi',
+ sqllib_dir='lib-sql',
+ config_dir='settings',
cli_args=args)
class MockParamCapture:
monkeypatch.setattr(nominatim.cli, 'run_legacy_script', mock)
return mock
-@pytest.fixture
-def mock_run_api(monkeypatch):
- mock = MockParamCapture()
- monkeypatch.setattr(nominatim.cli, 'run_api_script', mock)
- return mock
-
def test_cli_help(capsys):
""" Running nominatim tool without arguments prints help.
(('special-phrases',), 'specialphrases'),
(('add-data', '--tiger-data', 'tiger'), 'setup'),
(('add-data', '--file', 'foo.osm'), 'update'),
- (('check-database',), 'check_import_finished'),
- (('warm',), 'warm'),
(('export',), 'export')
])
def test_legacy_commands_simple(mock_run_legacy, command, script):
assert mock_run_legacy.last_args[0] == script + '.php'
+@pytest.mark.parametrize("params", [('--warm', ),
+ ('--warm', '--reverse-only'),
+ ('--warm', '--search-only'),
+ ('--check-database', )])
+def test_admin_command_legacy(monkeypatch, params):
+ mock_run_legacy = MockParamCapture()
+ monkeypatch.setattr(nominatim.clicmd.admin, 'run_legacy_script', mock_run_legacy)
+
+ assert 0 == call_nominatim('admin', *params)
+
+ assert mock_run_legacy.called == 1
+
+@pytest.mark.parametrize("func, params", [('analyse_indexing', ('--analyse-indexing', ))])
+def test_admin_command_tool(temp_db, monkeypatch, func, params):
+ mock = MockParamCapture()
+ monkeypatch.setattr(nominatim.tools.admin, func, mock)
+
+ assert 0 == call_nominatim('admin', *params)
+ assert mock.called == 1
+
@pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc'),
('node', 12), ('way', 8), ('relation', 32)])
def test_add_data_command(mock_run_legacy, name, oid):
('importance', ('update.php', '--recompute-importance')),
('website', ('setup.php', '--setup-website')),
])
-def test_refresh_legacy_command(mock_run_legacy, temp_db, command, params):
+def test_refresh_legacy_command(monkeypatch, temp_db, command, params):
+ mock_run_legacy = MockParamCapture()
+ monkeypatch.setattr(nominatim.clicmd.refresh, 'run_legacy_script', mock_run_legacy)
+
assert 0 == call_nominatim('refresh', '--' + command)
assert mock_run_legacy.called == 1
assert func_mock.called == 1
-def test_refresh_importance_computed_after_wiki_import(mock_run_legacy, temp_db):
+def test_refresh_importance_computed_after_wiki_import(monkeypatch, temp_db):
+ mock_run_legacy = MockParamCapture()
+ monkeypatch.setattr(nominatim.clicmd.refresh, 'run_legacy_script', mock_run_legacy)
+
assert 0 == call_nominatim('refresh', '--importance', '--wiki-data')
assert mock_run_legacy.called == 2
assert call_nominatim('replication') == 1
-@pytest.mark.parametrize("state, retval", [
- (nominatim.tools.replication.UpdateState.UP_TO_DATE, 0),
- (nominatim.tools.replication.UpdateState.NO_CHANGES, 3)
- ])
+@pytest.mark.parametrize("state", [nominatim.tools.replication.UpdateState.UP_TO_DATE,
+ nominatim.tools.replication.UpdateState.NO_CHANGES])
def test_replication_update_once_no_index(monkeypatch, temp_db, temp_db_conn,
- status_table, state, retval):
+ status_table, state):
status.set_status(temp_db_conn, date=dt.datetime.now(dt.timezone.utc), seq=1)
func_mock = MockParamCapture(retval=state)
monkeypatch.setattr(nominatim.tools.replication, 'update', func_mock)
- assert retval == call_nominatim('replication', '--once', '--no-index')
+ assert 0 == call_nominatim('replication', '--once', '--no-index')
def test_replication_update_continuous(monkeypatch, temp_db_conn, status_table):
('details', '--place_id', '10001'),
('status',)
])
-def test_api_commands_simple(mock_run_api, params):
+def test_api_commands_simple(monkeypatch, params):
+ mock_run_api = MockParamCapture()
+ monkeypatch.setattr(nominatim.clicmd.api, 'run_api_script', mock_run_api)
+
assert 0 == call_nominatim(*params)
assert mock_run_api.called == 1
--- /dev/null
+"""
+Tests for maintenance and analysis functions.
+"""
+import pytest
+
+from nominatim.db.connection import connect
+from nominatim.errors import UsageError
+from nominatim.tools import admin
+
+@pytest.fixture
+def db(temp_db, placex_table):
+ conn = connect('dbname=' + temp_db)
+ yield conn
+ conn.close()
+
+def test_analyse_indexing_no_objects(db):
+ with pytest.raises(UsageError):
+ admin.analyse_indexing(db)
+
+
+@pytest.mark.parametrize("oid", ['1234', 'N123a', 'X123'])
+def test_analyse_indexing_bad_osmid(db, oid):
+ with pytest.raises(UsageError):
+ admin.analyse_indexing(db, osm_id=oid)
+
+
+def test_analyse_indexing_unknown_osmid(db):
+ with pytest.raises(UsageError):
+ admin.analyse_indexing(db, osm_id='W12345674')
+
+
+def test_analyse_indexing_with_place_id(db, temp_db_cursor):
+ temp_db_cursor.execute("INSERT INTO placex (place_id) VALUES(12345)")
+
+ admin.analyse_indexing(db, place_id=12345)
+
+
+def test_analyse_indexing_with_osm_id(db, temp_db_cursor):
+ temp_db_cursor.execute("""INSERT INTO placex (place_id, osm_type, osm_id)
+ VALUES(9988, 'N', 10000)""")
+
+ admin.analyse_indexing(db, osm_id='N10000')
phplib_dir = tmp_phplib_dir
data_dir = Path('data')
project_dir = Path('.')
+ sqllib_dir = Path('lib-sql')
+ config_dir = Path('settings')
module_dir = 'module'
osm2pgsql_path = 'osm2pgsql'
from nominatim.db.connection import connect
from nominatim.tools.refresh import _get_standard_function_sql, _get_partition_function_sql
-SQL_DIR = (Path(__file__) / '..' / '..' / '..' / 'sql').resolve()
+SQL_DIR = (Path(__file__) / '..' / '..' / '..' / 'lib-sql').resolve()
@pytest.fixture
def db(temp_db):
+++ /dev/null
-#!/usr/bin/env python3
-# SPDX-License-Identifier: GPL-2.0-only
-#
-# This file is part of Nominatim.
-# Copyright (C) 2020 Sarah Hoffmann
-
-"""
-Script for analysing the indexing process.
-
-The script enables detailed logging for nested statements and then
-runs the indexing process for teh given object. Detailed 'EXPLAIN ANALYSE'
-information is printed for each executed query in the trigger. The
-transaction is then rolled back, so that no actual changes to the database
-happen. It also disables logging into the system log, so that the
-log files are not cluttered.
-"""
-
-from argparse import ArgumentParser, RawDescriptionHelpFormatter, ArgumentTypeError
-import psycopg2
-import getpass
-import re
-
-class Analyser(object):
-
- def __init__(self, options):
- password = None
- if options.password_prompt:
- password = getpass.getpass("Database password: ")
-
- self.options = options
- self.conn = psycopg2.connect(dbname=options.dbname,
- user=options.user,
- password=password,
- host=options.host,
- port=options.port)
-
-
-
- def run(self):
- c = self.conn.cursor()
-
- if self.options.placeid:
- place_id = self.options.placeid
- else:
- if self.options.rank:
- c.execute(f"""select place_id from placex
- where rank_address = {self.options.rank}
- and linked_place_id is null
- limit 1""")
- objinfo = f"rank {self.options.rank}"
-
- if self.options.osmid:
- osm_type = self.options.osmid[0].upper()
- if osm_type not in ('N', 'W', 'R'):
- raise RuntimeError("OSM ID must be of form <N|W|R><id>")
- try:
- osm_id = int(self.options.osmid[1:])
- except ValueError:
- raise RuntimeError("OSM ID must be of form <N|W|R><id>")
-
- c.execute(f"""SELECT place_id FROM placex
- WHERE osm_type = '{osm_type}' AND osm_id = {osm_id}""")
- objinfo = f"OSM object {self.options.osmid}"
-
-
- if c.rowcount < 1:
- raise RuntimeError(f"Cannot find a place for {objinfo}.")
- place_id = c.fetchone()[0]
-
- c.execute(f"""update placex set indexed_status = 2 where
- place_id = {place_id}""")
-
- c.execute("""SET auto_explain.log_min_duration = '0';
- SET auto_explain.log_analyze = 'true';
- SET auto_explain.log_nested_statements = 'true';
- LOAD 'auto_explain';
- SET client_min_messages = LOG;
- SET log_min_messages = FATAL""");
-
- c.execute(f"""update placex set indexed_status = 0 where
- place_id = {place_id}""")
-
- c.close() # automatic rollback
-
- for l in self.conn.notices:
- print(l)
-
-
-if __name__ == '__main__':
- def h(s):
- return re.sub("\s\s+" , " ", s)
-
- p = ArgumentParser(description=__doc__,
- formatter_class=RawDescriptionHelpFormatter)
-
- group = p.add_mutually_exclusive_group(required=True)
- group.add_argument('--rank', dest='rank', type=int,
- help='Analyse indexing of the given address rank')
- group.add_argument('--osm-id', dest='osmid', type=str,
- help='Analyse indexing of the given OSM object')
- group.add_argument('--place-id', dest='placeid', type=int,
- help='Analyse indexing of the given Nominatim object')
- p.add_argument('-d', '--database',
- dest='dbname', action='store', default='nominatim',
- help='Name of the PostgreSQL database to connect to.')
- p.add_argument('-U', '--username',
- dest='user', action='store',
- help='PostgreSQL user name.')
- p.add_argument('-W', '--password',
- dest='password_prompt', action='store_true',
- help='Force password prompt.')
- p.add_argument('-H', '--host',
- dest='host', action='store',
- help='PostgreSQL server hostname or socket location.')
- p.add_argument('-P', '--port',
- dest='port', action='store',
- help='PostgreSQL server port')
-
- Analyser(p.parse_args()).run()