description: 'Additional options to hand to cmake'
required: false
default: ''
+ lua:
+ description: 'Version of Lua to use'
+ required: false
+ default: '5.3'
runs:
using: "composite"
shell: bash
- name: Install prerequisites
run: |
- sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev
+ sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION}
if [ "x$UBUNTUVER" == "x18" ]; then
pip3 install python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 datrie
else
env:
UBUNTUVER: ${{ inputs.ubuntu }}
CMAKE_ARGS: ${{ inputs.cmake-args }}
+ LUA_VERSION: ${{ inputs.lua }}
- name: Configure
run: mkdir build && cd build && cmake $CMAKE_ARGS ../Nominatim
- name: Remove existing PostgreSQL
run: |
sudo apt-get purge -yq postgresql*
- sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
+ sudo apt install curl ca-certificates gnupg
+ curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/apt.postgresql.org.gpg >/dev/null
+ sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
sudo apt-get update -qq
shell: bash
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
submodules: true
- - uses: actions/cache@v2
+ - uses: actions/cache@v3
with:
path: |
data/country_osm_grid.sql.gz
mv nominatim-src.tar.bz2 Nominatim
- name: 'Upload Artifact'
- uses: actions/upload-artifact@v2
+ uses: actions/upload-artifact@v3
with:
name: full-source
path: nominatim-src.tar.bz2
pytest: py.test-3
php: 7.4
- ubuntu: 22
- postgresql: 14
+ postgresql: 15
postgis: 3
pytest: py.test-3
php: 8.1
runs-on: ubuntu-${{ matrix.ubuntu }}.04
steps:
- - uses: actions/download-artifact@v2
+ - uses: actions/download-artifact@v3
with:
name: full-source
tools: phpunit, phpcs, composer
ini-values: opcache.jit=disable
- - uses: actions/setup-python@v2
+ - uses: actions/setup-python@v4
with:
python-version: 3.6
if: matrix.ubuntu == 18
if: matrix.ubuntu == 22
- name: Install latest pylint/mypy
- run: pip3 install -U pylint mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil typing-extensions
+ run: pip3 install -U pylint mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil types-requests typing-extensions
- name: PHP linting
run: phpcs --report-width=120 .
runs-on: ubuntu-20.04
steps:
- - uses: actions/download-artifact@v2
+ - uses: actions/download-artifact@v3
with:
name: full-source
OS: ${{ matrix.name }}
INSTALL_MODE: ${{ matrix.install_mode }}
- - uses: actions/download-artifact@v2
+ - uses: actions/download-artifact@v3
with:
name: full-source
path: /home/nominatim
# typed Python is enabled. See also https://github.com/PyCQA/pylint/issues/5273
disable=too-few-public-methods,duplicate-code,too-many-ancestors,bad-option-value,no-self-use,not-context-manager
-good-names=i,x,y,fd,db,cc
+good-names=i,x,y,m,fd,db,cc
project(nominatim)
set(NOMINATIM_VERSION_MAJOR 4)
-set(NOMINATIM_VERSION_MINOR 1)
+set(NOMINATIM_VERSION_MINOR 2)
set(NOMINATIM_VERSION_PATCH 0)
set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}")
endif()
set(BUILD_TESTS_SAVED "${BUILD_TESTS}")
set(BUILD_TESTS off)
- set(WITH_LUA off CACHE BOOL "")
add_subdirectory(osm2pgsql)
set(BUILD_TESTS ${BUILD_TESTS_SAVED})
endif()
install(DIRECTORY nominatim
DESTINATION ${NOMINATIM_LIBDIR}/lib-python
FILES_MATCHING PATTERN "*.py"
+ PATTERN "paths.py" EXCLUDE
PATTERN __pycache__ EXCLUDE)
+
+ configure_file(${PROJECT_SOURCE_DIR}/cmake/paths-py.tmpl paths-py.installed)
+ install(FILES ${PROJECT_BINARY_DIR}/paths-py.installed
+ DESTINATION ${NOMINATIM_LIBDIR}/lib-python/nominatim
+ RENAME paths.py)
+
install(DIRECTORY lib-sql DESTINATION ${NOMINATIM_LIBDIR})
install(FILES ${COUNTRY_GRID_FILE}
install(FILES settings/env.defaults
settings/address-levels.json
settings/phrase-settings.json
- settings/import-admin.style
- settings/import-street.style
- settings/import-address.style
- settings/import-full.style
- settings/import-extratags.style
+ settings/import-admin.lua
+ settings/import-street.lua
+ settings/import-address.lua
+ settings/import-full.lua
+ settings/import-extratags.lua
+ settings/flex-base.lua
settings/icu_tokenizer.yaml
settings/country_settings.yaml
DESTINATION ${NOMINATIM_CONFIGDIR})
cd build
make test
```
+
+## Releases
+
+Nominatim follows semantic versioning. Major releases are done for large changes
+that require (or at least strongly recommend) a reimport of the databases.
+Minor releases can usually be applied to exisiting databases Patch releases
+contain bug fixes only and are released from a separate branch where the
+relevant changes are cherry-picked from the master branch.
+
+Checklist for releases:
+
+* [ ] increase version in `nominatim/version.py` and CMakeLists.txt
+* [ ] update `ChangeLog` (copy information from patch releases from release branch)
+* [ ] complete `docs/admin/Migration.md`
+* [ ] update EOL dates in `SECURITY.md`
+* [ ] commit and make sure CI tests pass
+* [ ] test migration
+ * download, build and import previous version
+ * migrate using master version
+ * run updates using master version
+* [ ] prepare tarball:
+ * `git clone --recursive https://github.com/osm-search/Nominatim` (switch to right branch!)
+ * `rm -r .git* osm2pgsql/.git*`
+ * copy country data into `data/`
+ * add version to base directory and package
+* [ ] upload tarball to https://nominatim.org
+* [ ] prepare documentation
+ * check out new docs branch
+ * change git checkout instructions to tarball download instructions or adapt version on existing ones
+ * build documentation and copy to https://github.com/osm-search/nominatim-org-site
+ * add new version to history
+* [ ] check release tarball
+ * download tarball as per new documentation instructions
+ * compile and import Nominatim
+ * run `nominatim --version` to confirm correct version
+* [ ] tag new release and add a release on github.com
+4.2.0
+
+ * add experimental support for osm2pgsql flex style
+ * introduce secondary importance value to be retrieved from a raster data file
+ (currently still unused, to replace address importance, thanks to @tareqpi)
+ * add new report tool `nominatim admin --collect-os-info`
+ (thanks @micahcochran, @tareqpi)
+ * reorganise index to improve lookup performance and size
+ * run index creation after import in parallel
+ * run ANALYZE more selectively to speed up continuation of indexing
+ * fix crash on update when addr:interpolation receives an illegal value
+ * fix minimum number of retrieved results to be at least 10
+ * fix search for combinations of special term + name (e.g Hotel Bellevue)
+ * do not return interpolations without a parent street on reverse search
+ * improve invalidation of linked places on updates
+ * fix address parsing for interpolation lines
+ * make sure socket timeouts are respected during replication
+ (working around a bug in some versions of pyosmium)
+ * update bundled osm2pgsql to 1.7.1
+ * add support for PostgreSQL 15
+ * typing fixes to work with latest type annotations from typeshed
+ * smaller improvements to documentation (thanks to @mausch)
+
4.1.0
* switch to ICU tokenizer as default
| Version | End of support for security updates |
| ------- | ----------------------------------- |
+| 4.2.x | 2024-11-24 |
| 4.1.x | 2024-08-05 |
| 4.0.x | 2023-11-02 |
| 3.7.x | 2023-04-05 |
# Install Nominatim in a virtual machine for development and testing
-This document describes how you can install Nominatim inside a Ubuntu 16
+This document describes how you can install Nominatim inside a Ubuntu 22
virtual machine on your desktop/laptop (host machine). The goal is to give
you a development environment to easily edit code and run the test suite
without affecting the rest of your system.
PHP errors are written to `/var/log/apache2/error.log`.
With `echo` and `var_dump()` you write into the output (HTML/XML/JSON) when
-you either add `&debug=1` to the URL (preferred) or set
-`@define('CONST_Debug', true);` in `settings/local.php`.
+you either add `&debug=1` to the URL.
In the Python BDD test you can use `logger.info()` for temporary debug
statements.
Yes, Vagrant and Virtualbox can be installed on MS Windows just fine. You need a 64bit
version of Windows.
+##### Will it run on Apple Silicon?
+
+You might need to replace Virtualbox with [Parallels](https://www.parallels.com/products/desktop/).
+There is no free/open source version of Parallels.
##### Why Monaco, can I use another country?
bug fixes) get added since those usually only get applied to new/changed data.
Also this document skips the optional Wikipedia data import which affects ranking
-of search results. See [Nominatim installation](https://nominatim.org/release-docs/latest/admin/Installation) for details.
+of search results. See [Nominatim installation](https://nominatim.org/release-docs/latest/admin/Installation)
+for details.
##### Why Ubuntu? Can I test CentOS/Fedora/CoreOS/FreeBSD?
-There is a Vagrant script for CentOS available, but the Nominatim directory
+There used to be a Vagrant script for CentOS available, but the Nominatim directory
isn't symlinked/mounted to the host which makes development trickier. We used
it mainly for debugging installation with SELinux.
name, location of files. We chose Ubuntu because that is closest to the
nominatim.openstreetmap.org production environment.
-You can configure/download other Vagrant boxes from [https://app.vagrantup.com/boxes/search](https://app.vagrantup.com/boxes/search).
+You can configure/download other Vagrant boxes from
+[https://app.vagrantup.com/boxes/search](https://app.vagrantup.com/boxes/search).
##### How can I connect to an existing database?
-Let's say you have a Postgres database named `nominatim_it` on server `your-server.com` and port `5432`. The Postgres username is `postgres`. You can edit `settings/local.php` and point Nominatim to it.
+Let's say you have a Postgres database named `nominatim_it` on server `your-server.com`
+and port `5432`. The Postgres username is `postgres`. You can edit the `.env` in your
+project directory and point Nominatim to it.
+
+ NOMINATIM_DATABASE_DSN="pgsql:host=your-server.com;port=5432;user=postgres;dbname=nominatim_it
- pgsql:host=your-server.com;port=5432;user=postgres;dbname=nominatim_it
-
No data import or restarting necessary.
If the Postgres installation is behind a firewall, you can try
ssh -L 9999:localhost:5432 your-username@your-server.com
inside the virtual machine. It will map the port to `localhost:9999` and then
-you edit `settings/local.php` with
+you edit `.env` file with
- @define('CONST_Database_DSN', 'pgsql:host=localhost;port=9999;user=postgres;dbname=nominatim_it');
+ NOMINATIM_DATABASE_DSN="pgsql:host=localhost;port=9999;user=postgres;dbname=nominatim_it"
-To access postgres directly remember to specify the hostname, e.g. `psql --host localhost --port 9999 nominatim_it`
+To access postgres directly remember to specify the hostname,
+e.g. `psql --host localhost --port 9999 nominatim_it`
##### My computer is slow and the import takes too long. Can I start the virtual machine "in the cloud"?
checkout = "no"
end
+ config.vm.provider "hyperv" do |hv, override|
+ hv.memory = 2048
+ hv.linked_clone = true
+ if ENV['CHECKOUT'] != 'y' then
+ override.vm.synced_folder ".", "/home/vagrant/Nominatim", type: "smb", smb_host: ENV['SMB_HOST'] || ENV['COMPUTERNAME']
+ end
+ end
+
config.vm.provider "virtualbox" do |vb, override|
vb.gui = false
vb.memory = 2048
end
end
- config.vm.define "ubuntu", primary: true do |sub|
+ config.vm.define "ubuntu22", primary: true do |sub|
+ sub.vm.box = "generic/ubuntu2204"
+ sub.vm.provision :shell do |s|
+ s.path = "vagrant/Install-on-Ubuntu-22.sh"
+ s.privileged = false
+ s.args = [checkout]
+ end
+ end
+
+ config.vm.define "ubuntu22-apache" do |sub|
+ sub.vm.box = "generic/ubuntu2204"
+ sub.vm.provision :shell do |s|
+ s.path = "vagrant/Install-on-Ubuntu-22.sh"
+ s.privileged = false
+ s.args = [checkout, "install-apache"]
+ end
+ end
+
+ config.vm.define "ubuntu22-nginx" do |sub|
+ sub.vm.box = "generic/ubuntu2204"
+ sub.vm.provision :shell do |s|
+ s.path = "vagrant/Install-on-Ubuntu-22.sh"
+ s.privileged = false
+ s.args = [checkout, "install-nginx"]
+ end
+ end
+
+ config.vm.define "ubuntu20" do |sub|
sub.vm.box = "generic/ubuntu2004"
sub.vm.provision :shell do |s|
s.path = "vagrant/Install-on-Ubuntu-20.sh"
end
end
- config.vm.define "ubuntu-apache" do |sub|
+ config.vm.define "ubuntu20-apache" do |sub|
sub.vm.box = "generic/ubuntu2004"
sub.vm.provision :shell do |s|
s.path = "vagrant/Install-on-Ubuntu-20.sh"
end
end
- config.vm.define "ubuntu-nginx" do |sub|
+ config.vm.define "ubuntu20-nginx" do |sub|
sub.vm.box = "generic/ubuntu2004"
sub.vm.provision :shell do |s|
s.path = "vagrant/Install-on-Ubuntu-20.sh"
end
end
- config.vm.define "centos7" do |sub|
- sub.vm.box = "centos/7"
- sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Centos-7.sh"
- s.privileged = false
- s.args = [checkout]
- end
- end
-
- config.vm.define "centos" do |sub|
- sub.vm.box = "generic/centos8"
- sub.vm.provision :shell do |s|
- s.path = "vagrant/Install-on-Centos-8.sh"
- s.privileged = false
- s.args = [checkout]
- end
- end
-
-
end
--- /dev/null
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Path settings for extra data used by Nominatim (installed version).
+"""
+from pathlib import Path
+
+PHPLIB_DIR = (Path('@NOMINATIM_LIBDIR@') / 'lib-php').resolve()
+SQLLIB_DIR = (Path('@NOMINATIM_LIBDIR@') / 'lib-sql').resolve()
+DATA_DIR = Path('@NOMINATIM_DATADIR@').resolve()
+CONFIG_DIR = Path('@NOMINATIM_CONFIGDIR@').resolve()
sys.path.insert(1, '@NOMINATIM_LIBDIR@/lib-python')
-os.environ['NOMINATIM_NOMINATIM_TOOL'] = os.path.abspath(__file__)
-
from nominatim import cli
from nominatim import version
exit(cli.nominatim(module_dir='@NOMINATIM_LIBDIR@/module',
osm2pgsql_path='@NOMINATIM_LIBDIR@/osm2pgsql',
- phplib_dir='@NOMINATIM_LIBDIR@/lib-php',
- sqllib_dir='@NOMINATIM_LIBDIR@/lib-sql',
- data_dir='@NOMINATIM_DATADIR@',
- config_dir='@NOMINATIM_CONFIGDIR@',
phpcgi_path='@PHPCGI_BIN@'))
sys.path.insert(1, '@CMAKE_SOURCE_DIR@')
-os.environ['NOMINATIM_NOMINATIM_TOOL'] = os.path.abspath(__file__)
-
from nominatim import cli
from nominatim import version
exit(cli.nominatim(module_dir='@CMAKE_BINARY_DIR@/module',
osm2pgsql_path='@CMAKE_BINARY_DIR@/osm2pgsql/osm2pgsql',
- phplib_dir='@CMAKE_SOURCE_DIR@/lib-php',
- sqllib_dir='@CMAKE_SOURCE_DIR@/lib-sql',
- data_dir='@CMAKE_SOURCE_DIR@/data',
- config_dir='@CMAKE_SOURCE_DIR@/settings',
phpcgi_path='@PHPCGI_BIN@'))
``` ini
; Replace the tcp listener and add the unix socket
-listen = /var/run/php-fpm.sock
+listen = /var/run/php-fpm-nominatim.sock
; Ensure that the daemon runs as the correct user
listen.owner = www-data
fastcgi_param SCRIPT_FILENAME "$document_root$uri.php";
fastcgi_param PATH_TRANSLATED "$document_root$uri.php";
fastcgi_param QUERY_STRING $args;
- fastcgi_pass unix:/var/run/php-fpm.sock;
+ fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index index.php;
include fastcgi_params;
}
if (!-f $document_root$fastcgi_script_name) {
return 404;
}
- fastcgi_pass unix:/var/run/php-fpm.sock;
+ fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index search.php;
include fastcgi.conf;
}
This data is available as a binary download. Put it into your project directory:
cd $PROJECT_DIR
- wget https://www.nominatim.org/data/wikimedia-importance.sql.gz
+ wget https://nominatim.org/data/wikimedia-importance.sql.gz
The file is about 400MB and adds around 4GB to the Nominatim database.
This data can be optionally downloaded into the project directory:
cd $PROJECT_DIR
- wget https://www.nominatim.org/data/gb_postcodes.csv.gz
- wget https://www.nominatim.org/data/us_postcodes.csv.gz
+ wget https://nominatim.org/data/gb_postcodes.csv.gz
+ wget https://nominatim.org/data/us_postcodes.csv.gz
You can also add your own custom postcode sources, see
[Customization of postcodes](../customize/Postcodes.md).
The development version does not include the country grid. Download it separately:
```
-wget -O Nominatim/data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
+wget -O Nominatim/data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
```
### Building Nominatim
That said if you installed your own Nominatim instance you can use the
`nominatim export` PHP script as basis to return such lists.
+
+#### 7. My result has a wrong postcode. Where does it come from?
+
+Most places in OSM don't have a postcode, so Nominatim tries to interpolate
+one. It first look at all the places that make up the address of the place.
+If one of them has a postcode defined, this is the one to be used. When
+none of the address parts has a postcode either, Nominatim interpolates one
+from the surrounding objects. If the postcode is for your result is one, then
+most of the time there is an OSM object with the wrong postcode nearby.
+
+To find the bad postcode, go to
+[https://nominatim.openstreetmap.org](https://nominatim.openstreetmap.org)
+and search for your place. When you have found it, click on the 'details' link
+under the result to go to the details page. There is a field 'Computed Postcode'
+which should display the bad postcode. Click on the 'how?' link. A small
+explanation text appears. It contains a link to a query for Overpass Turbo.
+Click on that and you get a map with all places in the area that have the bad
+postcode. If none is displayed, zoom the map out a bit and then click on 'Run'.
+
+Now go to [OpenStreetMap](https://openstreetmap.org) and fix the error you
+have just found. It will take at least a day for Nominatim to catch up with
+your data fix. Sometimes longer, depending on how much editing activity is in
+the area.
+
Possible status codes are
- | | message | notes |
- |-----|----------------------|---------------------------------------------------|
- | 700 | "No database" | connection failed |
- | 701 | "Module failed" | database could not load nominatim.so |
- | 702 | "Module call failed" | nominatim.so loaded but calling a function failed |
- | 703 | "Query failed" | test query against a database table failed |
- | 704 | "No value" | test query worked but returned no results |
+| | message | notes |
+| --- | ------------------------------ | ----------------------------------------------------------------- |
+| 700 | "No database" | connection failed |
+| 701 | "Module failed" | database could not load nominatim.so |
+| 702 | "Module call failed" | nominatim.so loaded but calling a function failed |
+| 703 | "Query failed" | test query against a database table failed |
+| 704 | "No value" | test query worked but returned no results |
+| 705 | "Import date is not available" | No import dates were returned (enabling replication can fix this) |
## Configuring the Import
-Which OSM objects are added to the database and which of the tags are used
-can be configured via the import style configuration file. This
-is a JSON file which contains a list of rules which are matched against every
-tag of every object and then assign the tag its specific role.
+In the very first step of a Nominatim import, OSM data is loaded into the
+database. Nominatim uses [osm2pgsql](https://osm2pgsql.org) for this task.
+It comes with a [flex style](https://osm2pgsql.org/doc/manual.html#the-flex-output)
+specifically tailored to filter and convert OSM data into Nominatim's
+internal data representation.
-The style to use is given by the `NOMINATIM_IMPORT_STYLE` configuration
-option. There are a number of default styles, which are explained in detail
-in the [Import section](../admin/Import.md#filtering-imported-data). These
-standard styles may be referenced by their name.
+There are a number of default configurations for the flex style which
+result in geocoding databases of different detail. The
+[Import section](../admin/Import.md#filtering-imported-data) explains
+these default configurations in detail.
You can also create your own custom style. Put the style file into your
project directory and then set `NOMINATIM_IMPORT_STYLE` to the name of the file.
It is always recommended to start with one of the standard styles and customize
-those. You find the standard styles under the name `import-<stylename>.style`
+those. You find the standard styles under the name `import-<stylename>.lua`
in the standard Nominatim configuration path (usually `/etc/nominatim` or
`/usr/local/etc/nominatim`).
-The remainder of the page describes the format of the file.
+The remainder of the page describes how the flex style works and how to
+customize it.
-### Configuration Rules
+### The `flex-base.lua` module
-A single rule looks like this:
+The core of Nominatim's flex import configuration is the `flex-base` module.
+It defines the table layout used by Nominatim and provides standard
+implementations for the import callbacks that make it easy to customize
+how OSM tags are used by Nominatim.
-```json
-{
- "keys" : ["key1", "key2", ...],
- "values" : {
- "value1" : "prop",
- "value2" : "prop1,prop2"
- }
-}
-```
-
-A rule first defines a list of keys to apply the rule to. This is always a list
-of strings. The string may have four forms. An empty string matches against
-any key. A string that ends in an asterisk `*` is a prefix match and accordingly
-matches against any key that starts with the given string (minus the `*`). A
-suffix match can be defined similarly with a string that starts with a `*`. Any
-other string constitutes an exact match.
+Every custom style should include this module to make sure that the correct
+tables are created. Thus start your custom style as follows:
-The second part of the rules defines a list of values and the properties that
-apply to a successful match. Value strings may be either empty, which
-means that they match any value, or describe an exact match. Prefix
-or suffix matching of values is not possible.
+``` lua
+local flex = require('flex-base')
-For a rule to match, it has to find a valid combination of keys and values. The
-resulting property is that of the matched values.
+```
-The rules in a configuration file are processed sequentially and the first
-match for each tag wins.
+The following sections explain how the module can be customized.
+
+
+### Changing the recognized tags
+
+If you just want to change which OSM tags are recognized during import,
+then there are a number of convenience functions to set the tag lists used
+during the processing.
+
+!!! warning
+ There are no built-in defaults for the tag lists, so all the functions
+ need to be called from your style script to fully process the data.
+ Make sure you start from one of the default style and only modify
+ the data you are interested in. You can also derive your style from an
+ existing style by importing the appropriate module, e.g.
+ `local flex = require('import-street')`.
+
+Many of the following functions take _key match lists_. These lists can
+contain three kinds of strings to match against tag keys:
+A string that ends in an asterisk `*` is a prefix match and accordingly matches
+against any key that starts with the given string (minus the `*`).
+A suffix match can be defined similarly with a string that starts with a `*`.
+Any other string is matched exactly against tag keys.
+
+
+#### `set_main_tags()` - principal tags
+
+If a principal or main tag is found on an OSM object, then the object
+is included in Nominatim's search index. A single object may also have
+multiple main tags. In that case, the object will be included multiple
+times in the index, once for each main tag.
+
+The flex script distinguishes between four types of main tags:
+
+* __always__: a main tag that is used unconditionally
+* __named__: consider this main tag only, if the object has a proper name
+ (a reference is not enough, see below).
+* __named_with_key__: consider this main tag only, when the object has
+ a proper name with a domain prefix. For example, if the main tag is
+ `bridge=yes`, then it will only be added as an extra row, if there is
+ a tag `bridge:name[:XXX]` for the same object. If this property is set,
+ all other names that are not domain-specific are ignored.
+* __fallback__: use this main tag only, if there is no other main tag.
+ Fallback always implied `named`, i.e. fallbacks are only tried for
+ named objects.
+
+The `set_main_tags()` function takes exactly one table parameter which
+defines the keys and key/value combinations to include and the kind of
+main tag. Each lua table key defines an OSM tag key. The value may
+be a string defining the kind of main key as described above. Then the tag will
+be considered a main tag for any possible value. To further restrict
+which values are acceptable, give a table with the permitted values
+and their kind of main tag. If the table contains a simple value without
+key, then this is used as default for values that are not listed.
+
+!!! example
+ ``` lua
+ local flex = require('import-full')
+
+ flex.set_main_tags{
+ boundary = {administrative = 'named'},
+ highway = {'always', street_lamp = 'named'},
+ landuse = 'fallback'
+ }
+ ```
+
+ In this example an object with a `boundary` tag will only be included
+ when it has a value of `administrative`. Objects with `highway` tags are
+ always included. However when the value is `street_lamp` then the object
+ must have a name, too. With any other value, the object is included
+ independently of the name. Finally, if a `landuse` tag is present then
+ it will be used independely of the concrete value if neither boundary
+ nor highway tags were found and the object is named.
+
+
+#### `set_prefilters()` - ignoring tags
+
+Pre-filtering of tags allows to ignore them for any further processing.
+Thus pre-filtering takes precedence over any other tag processing. This is
+useful when some specific key/value combinations need to be excluded from
+processing. When tags are filtered, they may either be deleted completely
+or moved to `extratags`. Extra tags are saved with the object and returned
+to the user when requested, but are not used otherwise.
+
+`set_prefilters()` takes a table with four optional fields:
+
+* __delete_keys__ is a _key match list_ for tags that should be deleted
+* __delete_tags__ contains a table of tag keys pointing to a list of tag
+ values. Tags with matching key/value pairs are deleted.
+* __extra_keys__ is a _key match list_ for tags which should be saved into
+ extratags
+* __delete_tags__ contains a table of tag keys pointing to a list of tag
+ values. Tags with matching key/value pairs are moved to extratags.
+
+Key list may contain three kinds of strings:
+A string that ends in an asterisk `*` is a prefix match and accordingly matches
+against any key that starts with the given string (minus the `*`).
+A suffix match can be defined similarly with a string that starts with a `*`.
+Any other string is matched exactly against tag keys.
+
+!!! example
+ ``` lua
+ local flex = require('import-full')
+
+ flex.set_prefilters{
+ delete_keys = {'source', 'source:*'},
+ extra_tags = {amenity = {'yes', 'no'}}
+ }
+ flex.set_main_tags{
+ amenity = 'always'
+ }
+ ```
-A rule where key and value are the empty string is special. This defines the
-fallback when none of the rules match. The fallback is always used as a last
-resort when nothing else matches, no matter where the rule appears in the file.
-Defining multiple fallback rules is not allowed. What happens in this case,
-is undefined.
+ In this example any tags `source` and tags that begin with `source:` are
+ deleted before any other processing is done. Getting rid of frequent tags
+ this way can speed up the import.
-### Tag Properties
+ Tags with `amenity=yes` or `amenity=no` are moved to extratags. Later
+ all tags with an `amenity` key are made a main tag. This effectively means
+ that Nominatim will use all amenity tags except for those with value
+ yes and no.
-One or more of the following properties may be given for each tag:
+#### `set_name_tags()` - defining names
-* `main`
+The flex script distinguishes between two kinds of names:
- A principal tag. A new row will be added for the object with key and value
- as `class` and `type`.
+* __main__: the primary names make an object fully searchable.
+ Main tags of type _named_ will only cause the object to be included when
+ such a primary name is present. Primary names are usually those found
+ in the `name` tag and its variants.
+* __extra__: extra names are still added to the search index but they are
+ alone not sufficient to make an object named.
-* `with_name`
+`set_name_tags()` takes a table with two optional fields `main` and `extra`.
+They take _key match lists_ for main and extra names respectively.
- When the tag is a principal tag (`main` property set): only really add a new
- row, if there is any name tag found (a reference tag is not sufficient, see
- below).
+!!! example
+ ``` lua
+ local flex = require('flex-base')
-* `with_name_key`
+ flex.set_main_tags{highway = {traffic_light = 'named'}}
+ flex.set_name_tags{main = {'name', 'name:*'},
+ extra = {'ref'}
+ }
+ ```
- When the tag is a principal tag (`main` property set): only really add a new
- row, if there is also a name tag that matches the key of the principal tag.
- For example, if the main tag is `bridge=yes`, then it will only be added as
- an extra row, if there is a tag `bridge:name[:XXX]` for the same object.
- If this property is set, all other names that are not domain-specific are
- ignored.
+ This example creates a search index over traffic lights but will
+ only include those that have a common name and not those which just
+ have some reference ID from the city.
-* `fallback`
+#### `set_address_tags()` - defining address parts
- When the tag is a principal tag (`main` property set): only really add a new
- row, when no other principal tags for this object have been found. Only one
- fallback tag can win for an object.
+Address tags will be used to build up the address of an object.
-* `operator`
+`set_address_tags()` takes a table with arbitrary fields pointing to
+_key match lists_. To fields have a special meaning:
- When the tag is a principal tag (`main` property set): also include the
- `operator` tag in the list of names. This is a special construct for an
- out-dated tagging practise in OSM. Fuel stations and chain restaurants
- in particular used to have the name of the chain tagged as `operator`.
- These days the chain can be more commonly found in the `brand` tag but
- there is still enough old data around to warrant this special case.
+__main__ defines
+the tags that make a full address object out of the OSM object. This
+is usually the housenumber or variants thereof. If a main address tag
+appears, then the object will always be included, if necessary with a
+fallback of `place=house`. If the key has a prefix of `addr:` or `is_in:`
+this will be stripped.
-* `name`
+__extra__ defines all supplementary tags for addresses, tags like `addr:street`, `addr:city` etc. If the key has a prefix of `addr:` or `is_in:` this will be stripped.
- Add tag to the list of names.
+All other fields will be handled as summary fields. If a key matches the
+key match list, then its value will be added to the address tags with the
+name of the field as key. If multiple tags match, then an arbitrary one
+wins.
-* `ref`
+Country tags are handled slightly special. Only tags with a two-letter code
+are accepted, all other values are discarded.
- Add tag to the list of names as a reference. At the moment this only means
- that the object is not considered to be named for `with_name`.
+!!! example
+ ``` lua
+ local flex = require('import-full')
-* `address`
+ flex.set_address_tags{
+ main = {'addr:housenumber'},
+ extra = {'addr:*'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode'},
+ country = {'country-code', 'ISO3166-1'}
+ }
+ ```
- Add tag to the list of address tags. If the tag starts with `addr:` or
- `is_in:`, then this prefix is cut off before adding it to the list.
+ In this example all tags which begin with `addr:` will be saved in
+ the address tag list. If one of the tags is `addr:housenumber`, the
+ object will fall back to be entered as a `place=house` in the database
+ unless there is another interested main tag to be found.
-* `postcode`
+ Tags with keys `country-code` and `ISO3166-1` are saved with their
+ value under `country` in the address tag list. The same thing happens
+ to postcodes, they will always be saved under the key `postcode` thus
+ normalizing the multitude of keys that are used in the OSM database.
- Add the value as a postcode to the address tags. If multiple tags are
- candidate for postcodes, one wins out and the others are dropped.
-* `country`
+#### `set_unused_handling()` - processing remaining tags
- Add the value as a country code to the address tags. The value must be a
- two letter country code, otherwise it is ignored. If there are multiple
- tags that match, then one wins out and the others are dropped.
+This function defines what to do with tags that remain after all tags
+have been classified using the functions above. There are two ways in
+which the function can be used:
-* `house`
+`set_unused_handling(delete_keys = ..., delete_tags = ...)` deletes all
+keys that match the descriptions in the parameters and moves all remaining
+tags into the extratags list.
+`set_unused_handling(extra_keys = ..., extra_tags = ...)` moves all tags
+matching the parameters into the extratags list and then deletes the remaining
+tags. For the format of the parameters see the description in `set_prefilters()`
+above.
- If no principle tags can be found for the object, still add the object with
- `class`=`place` and `type`=`house`. Use this for address nodes that have no
- other function.
+!!! example
+ ``` lua
+ local flex = require('import-full')
-* `interpolation`
+ flex.set_address_tags{
+ main = {'addr:housenumber'},
+ extra = {'addr:*', 'tiger:county'}
+ }
+ flex.set_unused_handling{delete_keys = {'tiger:*'}}
+ ```
- Add this object as an address interpolation (appears as `class`=`place` and
- `type`=`houses` in the database).
+ In this example all remaining tags except those beginning with `tiger:`
+ are moved to the extratags list. Note that it is not possible to
+ already delete the tiger tags with `set_prefilters()` because that
+ would remove tiger:county before the address tags are processed.
-* `extra`
+### Customizing osm2pgsql callbacks
- Add tag to the list of extra tags.
+osm2pgsql expects the flex style to implement three callbacks, one process
+function per OSM type. If you want to implement special handling for
+certain OSM types, you can override the default implementations provided
+by the flex-base module.
-* `skip`
+#### Changing the relation types to be handled
- Skip the tag completely. Useful when a custom default fallback is defined
- or to define exceptions to rules.
+The default scripts only allows relations of type `multipolygon`, `boundary`
+and `waterway`. To add other types relations, set `RELATION_TYPES` for
+the type to the kind of geometry that should be created. The following
+kinds of geometries can be used:
-A rule can define as many of these properties for one match as it likes. For
-example, if the property is `"main,extra"` then the tag will open a new row
-but also have the tag appear in the list of extra tags.
+* __relation_as_multipolygon__ creates a (Multi)Polygon from the ways in
+ the relation. If the ways do not form a valid area, then the object is
+ silently discarded.
+* __relation_as_multiline__ creates a (Mutli)LineString from the ways in
+ the relations. Ways are combined as much as possible without any regards
+ to their order in the relation.
+
+!!! Example
+ ``` lua
+ local flex = require('import-full')
+
+ flex.RELATION_TYPES['site'] = flex.relation_as_multipolygon
+ ```
+
+ With this line relations of `type=site` will be included in the index
+ according to main tags found. This only works when the site relation
+ resolves to a valid area. Nodes in the site relation are not part of the
+ geometry.
+
+
+#### Adding additional logic to processing functions
+
+The default processing functions are also exported by the flex-base module
+as `process_node`, `process_way` and `process_relation`. These can be used
+to implement your own processing functions with some additional processing
+logic.
+
+!!! Example
+ ``` lua
+ local flex = require('import-full')
+
+ function osm2pgsql.process_relation(object)
+ if object.tags.boundary ~= 'administrative' or object.tags.admin_level ~= '2' then
+ flex.process_relation(object)
+ end
+ end
+ ```
+
+ This example discards all country-level boundaries and uses standard
+ handling for everything else. This can be useful if you want to use
+ your own custom country boundaries.
+
+
+### Customizing the main processing function
+
+The main processing function of the flex style can be found in the function
+`process_tags`. This function is called for all OSM object kinds and is
+responsible for filtering the tags and writing out the rows into Postgresql.
+
+!!! Example
+ ``` lua
+ local flex = require('import-full')
+
+ local original_process_tags = flex.process_tags
+
+ function flex.process_tags(o)
+ if o.object.tags.highway ~= nil and o.object.tags.access == 'no' then
+ return
+ end
+
+ original_process_tags(o)
+ end
+ ```
+
+ This example shows the most simple customization of the process_tags function.
+ It simply adds some additional processing before running the original code.
+ To do that, first save the original function and then overwrite process_tags
+ from the module. In this example all highways which are not accessible
+ by anyone will be ignored.
+
+
+#### The `Place` class
+
+The `process_tags` function receives a Lua object of `Place` type which comes
+with some handy functions to collect the data necessary for geocoding and
+writing it into the place table. Always use this object to fill the table.
+
+The Place class has some attributes which you may access read-only:
+
+* __object__ is the original OSM object data handed in by osm2pgsql
+* __admin_level__ is the content of the admin_level tag, parsed into an
+ integer and normalized to a value between 0 and 15
+* __has_name__ is a boolean indicating if the object has a full name
+* __names__ is a table with the collected list of name tags
+* __address__ is a table with the collected list of address tags
+* __extratags__ is a table with the collected list of additional tags to save
+
+There are a number of functions to fill these fields. All functions expect
+a table parameter with fields as indicated in the description.
+Many of these functions expect match functions which are described in detail
+further below.
+
+* __delete{match=...}__ removes all tags that match the match function given
+ in _match_.
+* __grab_extratags{match=...}__ moves all tags that match the match function
+ given in _match_ into extratags. Returns the number of tags moved.
+* __clean{delete=..., extra=...}__ deletes all tags that match _delete_ and
+ moves the ones that match _extra_ into extratags
+* __grab_address_parts{groups=...}__ moves matching tags into the address table.
+ _groups_ must be a group match function. Tags of the group `main` and
+ `extra` are added to the address table as is but with `addr:` and `is_in:`
+ prefixes removed from the tag key. All other groups are added with the
+ group name as key and the value from the tag. Multiple values of the same
+ group overwrite each other. The function returns the number of tags saved
+ from the main group.
+* __grab_main_parts{groups=...}__ moves matching tags into the name table.
+ _groups_ must be a group match function. If a tags of the group `main` is
+ present, the object will be marked as having a name. Tags of group `house`
+ produce a fallback to `place=house`. This fallback is return by the function
+ if present.
+
+There are two functions to write a row into the place table. Both functions
+expect the main tag (key and value) for the row and then use the collected
+information from the name, address, extratags etc. fields to complete the row.
+They also have a boolean parameter `save_extra_mains` which defines how any
+unprocessed tags are handled: when True, the tags will be saved as extratags,
+when False, they will be simply discarded.
+
+* __write_row(key, value, save_extra_mains)__ creates a new table row from
+ the current state of the Place object.
+* __write_place(key, value, mtype, save_extra_mains)__ creates a new row
+ conditionally. When value is nil, the function will attempt to look up the
+ value in the object tags. If value is still nil or mtype is nil, the row
+ is ignored. An mtype of `always` will then always write out the row,
+ a mtype of `named` only, when the object has a full name. When mtype
+ is `named_with_key`, the function checks for a domain name, i.e. a name
+ tag prefixed with the name of the main key. Only if at least one is found,
+ the row will be written. The names are replaced with the domain names found.
+
+#### Match functions
+
+The Place functions usually expect either a _match function_ or a
+_group match function_ to find the tags to apply their function to.
+
+The __match function__ is a Lua function which takes two parameters,
+key and value, and returns a boolean to indicate that a tag matches. The
+flex-base module has a convenience function `tag_match()` to create such a
+function. It takes a table with two optional fields: `keys` takes a key match
+list (see above), `tags` takes a table with keys that point to a list of
+possible values, thus defining key/value matches.
+
+The __group match function__ is a Lua function which also takes two parameters,
+key and value, and returns a string indicating to which group or type they
+belong to. The `tag_group()` can be used to create such a function. It expects
+a table where the group names are the keys and the values are a key match list.
+
+
+
+### Using the gazetteer output of osm2pgsql
+
+Nominatim still allows you to configure the gazetteer output to remain
+backwards compatible with older imports. It will be automatically used
+when the style file name ends in `.style`. For documentation of the
+old import style, please refer to the documentation of older releases
+of Nominatim. Do not use the gazetteer output for new imports. There is no
+guarantee that new versions of Nominatim are fully compatible with the
+gazetteer output.
### Changing the Style of Existing Databases
objects when the area becomes too large.
+#### NOMINATIM_UPDATE_FORWARD_DEPENDENCIES
+
+| Summary | |
+| -------------- | --------------------------------------------------- |
+| **Description:** | Forward geometry changes to dependet objects |
+| **Format:** | bool |
+| **Default:** | no |
+| **Comment:** | EXPERT ONLY. Must not be enabled after import. |
+
+The geometry of OSM ways and relations may change when a node that is part
+of the object is moved around. These changes are not propagated per default.
+The geometry of ways/relations is only updated the next time that the object
+itself is touched. When enabling this option, then dependent objects will
+be marked for update when one of its member objects changes.
+
+Enabling this option may slow down updates significantly.
+
+!!! warning
+ If you want to enable this option, it must be set already on import.
+ Do not enable this option on an existing database that was imported with
+ NOMINATIM_UPDATE_FORWARD_DEPENDENCIES=no.
+ Updates will become unusably slow.
+
#### NOMINATIM_LANGUAGES
| Summary | |
<request time> <execution time in s> <number of results> <type> "<query string>"
Request time is the time when the request was started. The execution time is
-given in ms and corresponds to the time the query took executing in PHP.
+given in seconds and corresponds to the time the query took executing in PHP.
type contains the name of the endpoint used.
Can be used as the same time as NOMINATIM_LOG_DB.
nominatim add-data --tiger-data tiger-nominatim-preprocessed-latest.csv.tar.gz
- 3. Enable use of the Tiger data in your `.env` by adding:
+ 3. Enable use of the Tiger data in your existing `.env` file by adding:
echo NOMINATIM_USE_US_TIGER_DATA=yes >> .env
4. Apply the new settings:
- nominatim refresh --functions
+ nominatim refresh --functions --website
See the [TIGER-data project](https://github.com/osm-search/TIGER-data) for more
rendering:
heading_level: 6
+##### clean-tiger-tags
+
+::: nominatim.tokenizer.sanitizers.clean_tiger_tags
+ selection:
+ members: False
+ rendering:
+ heading_level: 6
+
+
#### Token Analysis
markdown_extensions:
- codehilite
- admonition
+ - pymdownx.superfences
- def_list
- toc:
permalink:
$iCountWords = 0;
$sAddress = $aResult['langaddress'];
foreach ($aRecheckWords as $i => $sWord) {
- if (stripos($sAddress, $sWord)!==false) {
+ if (grapheme_stripos($sAddress, $sWord)!==false) {
$iCountWords++;
if (preg_match('/(^|,)\s*'.preg_quote($sWord, '/').'\s*(,|$)/', $sAddress)) {
$iCountWords += 0.1;
return null;
}
- $aResults = $this->lookup(array($iPlaceID => new Result($iPlaceID)));
+ $aResults = $this->lookup(array($iPlaceID => new Result($iPlaceID)), 0, 30, true);
return empty($aResults) ? null : reset($aResults);
}
- public function lookup($aResults, $iMinRank = 0, $iMaxRank = 30)
+ public function lookup($aResults, $iMinRank = 0, $iMaxRank = 30, $bAllowLinked = false)
{
Debug::newFunction('Place lookup');
if ($this->sAllowedTypesSQLList) {
$sSQL .= 'AND placex.class in '.$this->sAllowedTypesSQLList;
}
- $sSQL .= ' AND linked_place_id is null ';
+ if (!$bAllowLinked) {
+ $sSQL .= ' AND linked_place_id is null ';
+ }
$sSQL .= ' GROUP BY ';
$sSQL .= ' osm_type, ';
$sSQL .= ' osm_id, ';
$sSQL .= ' ST_Distance(linegeo,'.$sPointSQL.') as distance';
$sSQL .= ' FROM location_property_osmline';
$sSQL .= ' WHERE ST_DWithin('.$sPointSQL.', linegeo, '.$fSearchDiam.')';
- $sSQL .= ' and indexed_status = 0 and startnumber is not NULL ';
+ $sSQL .= ' and indexed_status = 0 and startnumber is not NULL ';
+ $sSQL .= ' and parent_place_id != 0';
$sSQL .= ' ORDER BY distance ASC limit 1';
Debug::printSQL($sSQL);
$sSQL .= '(select place_id, parent_place_id, rank_address, rank_search, country_code, geometry';
$sSQL .= ' FROM placex';
$sSQL .= ' WHERE ST_GeometryType(geometry) in (\'ST_Polygon\', \'ST_MultiPolygon\')';
- $sSQL .= ' AND rank_address Between 5 AND ' .$iMaxRank;
+ // Ensure that query planner doesn't use the index on rank_search.
+ $sSQL .= ' AND coalesce(rank_search, 0) between 5 and ' .$iMaxRank;
+ $sSQL .= ' AND rank_address between 4 and 25'; // needed for index selection
$sSQL .= ' AND geometry && '.$sPointSQL;
$sSQL .= ' AND type != \'postcode\' ';
$sSQL .= ' AND name is not null';
$sSQL .= ' AND indexed_status = 0 and linked_place_id is null';
- $sSQL .= ' ORDER BY rank_address DESC LIMIT 50 ) as a';
- $sSQL .= ' WHERE ST_CONTAINS(geometry, '.$sPointSQL.' )';
- $sSQL .= ' ORDER BY rank_address DESC LIMIT 1';
+ $sSQL .= ' ORDER BY rank_search DESC LIMIT 50 ) as a';
+ $sSQL .= ' WHERE ST_Contains(geometry, '.$sPointSQL.' )';
+ $sSQL .= ' ORDER BY rank_search DESC LIMIT 1';
Debug::printSQL($sSQL);
$aPoly = $this->oDB->getRow($sSQL, null, 'Could not determine polygon containing the point.');
$iRankSearch = $aPoly['rank_search'];
$iPlaceID = $aPoly['place_id'];
- if ($iRankAddress != $iMaxRank) {
+ if ($iRankSearch != $iMaxRank) {
$sSQL = 'SELECT place_id FROM ';
$sSQL .= '(SELECT place_id, rank_search, country_code, geometry,';
$sSQL .= ' ST_distance('.$sPointSQL.', geometry) as distance';
*/
public function extendSearch($oSearch, $oPosition)
{
- $iSearchCost = 2;
+ $iSearchCost = 0;
$iOp = $this->iOperator;
if ($iOp == \Nominatim\Operator::NONE) {
- if ($oSearch->hasName() || $oSearch->getContext()->isBoundedSearch()) {
+ if ($oPosition->isFirstToken()
+ || $oSearch->hasName()
+ || $oSearch->getContext()->isBoundedSearch()
+ ) {
$iOp = \Nominatim\Operator::NAME;
+ $iSearchCost += 3;
} else {
$iOp = \Nominatim\Operator::NEAR;
- $iSearchCost += 2;
+ $iSearchCost += 4;
+ if (!$oPosition->isFirstToken()) {
+ $iSearchCost += 3;
+ }
}
- } elseif (!$oPosition->isFirstToken() && !$oPosition->isLastToken()) {
+ } elseif ($oPosition->isFirstToken()) {
$iSearchCost += 2;
+ } elseif ($oPosition->isLastToken()) {
+ $iSearchCost += 4;
+ } else {
+ $iSearchCost += 6;
}
+
if ($oSearch->hasHousenumber()) {
$iSearchCost ++;
}
DECLARE
existing BIGINT[];
BEGIN
- -- Get the existing entry from the interpolation table.
- SELECT array_agg(place_id) INTO existing
- FROM location_property_osmline WHERE osm_id = way_id;
-
- IF existing IS NULL or array_length(existing, 1) = 0 THEN
- INSERT INTO location_property_osmline (osm_id, address, linegeo)
- VALUES (way_id, addr, geom);
+ IF addr is NULL OR NOT addr ? 'interpolation'
+ OR NOT (addr->'interpolation' in ('odd', 'even', 'all')
+ or addr->'interpolation' similar to '[1-9]')
+ THEN
+ -- the new interpolation is illegal, simply remove existing entries
+ DELETE FROM location_property_osmline WHERE osm_id = way_id;
ELSE
- -- Update the interpolation table:
- -- The first entry gets the original data, all other entries
- -- are removed and will be recreated on indexing.
- -- (An interpolation can be split up, if it has more than 2 address nodes)
- UPDATE location_property_osmline
- SET address = addr,
- linegeo = geom,
- startnumber = null,
- indexed_status = 1
- WHERE place_id = existing[1];
- IF array_length(existing, 1) > 1 THEN
- DELETE FROM location_property_osmline
- WHERE place_id = any(existing[2:]);
+ -- Get the existing entry from the interpolation table.
+ SELECT array_agg(place_id) INTO existing
+ FROM location_property_osmline WHERE osm_id = way_id;
+
+ IF existing IS NULL or array_length(existing, 1) = 0 THEN
+ INSERT INTO location_property_osmline (osm_id, address, linegeo)
+ VALUES (way_id, addr, geom);
+ ELSE
+ -- Update the interpolation table:
+ -- The first entry gets the original data, all other entries
+ -- are removed and will be recreated on indexing.
+ -- (An interpolation can be split up, if it has more than 2 address nodes)
+ UPDATE location_property_osmline
+ SET address = addr,
+ linegeo = geom,
+ startnumber = null,
+ indexed_status = 1
+ WHERE place_id = existing[1];
+ IF array_length(existing, 1) > 1 THEN
+ DELETE FROM location_property_osmline
+ WHERE place_id = any(existing[2:]);
+ END IF;
END IF;
END IF;
RETURN null;
END IF;
+ -- Remove the place from the list of places to be deleted
+ DELETE FROM place_to_be_deleted pdel
+ WHERE pdel.osm_type = NEW.osm_type and pdel.osm_id = NEW.osm_id
+ and pdel.class = NEW.class;
+
-- Have we already done this place?
SELECT * INTO existing
FROM place
{% if debug %}RAISE WARNING 'Existing: %',existing.osm_id;{% endif %}
- -- Handle a place changing type by removing the old data.
- -- (This trigger is executed BEFORE INSERT of the NEW tuple.)
IF existing.osm_type IS NULL THEN
DELETE FROM place where osm_type = NEW.osm_type and osm_id = NEW.osm_id and class = NEW.class;
END IF;
END IF;
{% endif %}
- IF existing.osm_type IS NOT NULL THEN
- -- Pathological case caused by the triggerless copy into place during initial import
- -- force delete even for large areas, it will be reinserted later
- UPDATE place SET geometry = ST_SetSRID(ST_Point(0,0), 4326)
- WHERE osm_type = NEW.osm_type and osm_id = NEW.osm_id
- and class = NEW.class and type = NEW.type;
- DELETE FROM place
- WHERE osm_type = NEW.osm_type and osm_id = NEW.osm_id
- and class = NEW.class and type = NEW.type;
+ IF existingplacex.osm_type is not NULL THEN
+ -- Mark any existing place for delete in the placex table
+ UPDATE placex SET indexed_status = 100
+ WHERE placex.osm_type = NEW.osm_type and placex.osm_id = NEW.osm_id
+ and placex.class = NEW.class and placex.type = NEW.type;
END IF;
-- Process it as a new insertion
{% if debug %}RAISE WARNING 'insert done % % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type,NEW.name;{% endif %}
+ IF existing.osm_type is not NULL THEN
+ -- If there is already an entry in place, just update that, if necessary.
+ IF coalesce(existing.name, ''::hstore) != coalesce(NEW.name, ''::hstore)
+ or coalesce(existing.address, ''::hstore) != coalesce(NEW.address, ''::hstore)
+ or coalesce(existing.extratags, ''::hstore) != coalesce(NEW.extratags, ''::hstore)
+ or coalesce(existing.admin_level, 15) != coalesce(NEW.admin_level, 15)
+ or existing.geometry::text != NEW.geometry::text
+ THEN
+ UPDATE place
+ SET name = NEW.name,
+ address = NEW.address,
+ extratags = NEW.extratags,
+ admin_level = NEW.admin_level,
+ geometry = NEW.geometry
+ WHERE osm_type = NEW.osm_type and osm_id = NEW.osm_id
+ and class = NEW.class and type = NEW.type;
+ END IF;
+
+ RETURN NULL;
+ END IF;
+
RETURN NEW;
END IF;
END;
$$ LANGUAGE plpgsql;
-
CREATE OR REPLACE FUNCTION place_delete()
RETURNS TRIGGER
AS $$
DECLARE
- has_rank BOOLEAN;
+ deferred BOOLEAN;
BEGIN
-
- {% if debug %}RAISE WARNING 'delete: % % % %',OLD.osm_type,OLD.osm_id,OLD.class,OLD.type;{% endif %}
-
- -- deleting large polygons can have a massive effect on the system - require manual intervention to let them through
- IF st_area(OLD.geometry) > 2 and st_isvalid(OLD.geometry) THEN
- SELECT bool_or(not (rank_address = 0 or rank_address > 25)) as ranked FROM placex WHERE osm_type = OLD.osm_type and osm_id = OLD.osm_id and class = OLD.class and type = OLD.type INTO has_rank;
- IF has_rank THEN
- insert into import_polygon_delete (osm_type, osm_id, class, type) values (OLD.osm_type,OLD.osm_id,OLD.class,OLD.type);
- RETURN NULL;
- END IF;
+ {% if debug %}RAISE WARNING 'Delete for % % %/%', OLD.osm_type, OLD.osm_id, OLD.class, OLD.type;{% endif %}
+
+ deferred := ST_IsValid(OLD.geometry) and ST_Area(OLD.geometry) > 2;
+ IF deferred THEN
+ SELECT bool_or(not (rank_address = 0 or rank_address > 25)) INTO deferred
+ FROM placex
+ WHERE osm_type = OLD.osm_type and osm_id = OLD.osm_id
+ and class = OLD.class and type = OLD.type;
END IF;
- -- mark for delete
- UPDATE placex set indexed_status = 100 where osm_type = OLD.osm_type and osm_id = OLD.osm_id and class = OLD.class and type = OLD.type;
+ INSERT INTO place_to_be_deleted (osm_type, osm_id, class, type, deferred)
+ VALUES(OLD.osm_type, OLD.osm_id, OLD.class, OLD.type, deferred);
- -- interpolations are special
- IF OLD.osm_type='W' and OLD.class = 'place' and OLD.type = 'houses' THEN
- UPDATE location_property_osmline set indexed_status = 100 where osm_id = OLD.osm_id; -- osm_id = wayid (=old.osm_id)
- END IF;
+ RETURN NULL;
+END;
+$$ LANGUAGE plpgsql;
- RETURN OLD;
+CREATE OR REPLACE FUNCTION flush_deleted_places()
+ RETURNS INTEGER
+ AS $$
+BEGIN
+ -- deleting large polygons can have a massive effect on the system - require manual intervention to let them through
+ INSERT INTO import_polygon_delete (osm_type, osm_id, class, type)
+ SELECT osm_type, osm_id, class, type FROM place_to_be_deleted WHERE deferred;
+
+ -- delete from place table
+ ALTER TABLE place DISABLE TRIGGER place_before_delete;
+ DELETE FROM place USING place_to_be_deleted
+ WHERE place.osm_type = place_to_be_deleted.osm_type
+ and place.osm_id = place_to_be_deleted.osm_id
+ and place.class = place_to_be_deleted.class
+ and place.type = place_to_be_deleted.type
+ and not deferred;
+ ALTER TABLE place ENABLE TRIGGER place_before_delete;
+
+ -- Mark for delete in the placex table
+ UPDATE placex SET indexed_status = 100 FROM place_to_be_deleted
+ WHERE placex.osm_type = 'N' and place_to_be_deleted.osm_type = 'N'
+ and placex.osm_id = place_to_be_deleted.osm_id
+ and placex.class = place_to_be_deleted.class
+ and placex.type = place_to_be_deleted.type
+ and not deferred;
+ UPDATE placex SET indexed_status = 100 FROM place_to_be_deleted
+ WHERE placex.osm_type = 'W' and place_to_be_deleted.osm_type = 'W'
+ and placex.osm_id = place_to_be_deleted.osm_id
+ and placex.class = place_to_be_deleted.class
+ and placex.type = place_to_be_deleted.type
+ and not deferred;
+ UPDATE placex SET indexed_status = 100 FROM place_to_be_deleted
+ WHERE placex.osm_type = 'R' and place_to_be_deleted.osm_type = 'R'
+ and placex.osm_id = place_to_be_deleted.osm_id
+ and placex.class = place_to_be_deleted.class
+ and placex.type = place_to_be_deleted.type
+ and not deferred;
+
+ -- Mark for delete in interpolations
+ UPDATE location_property_osmline SET indexed_status = 100 FROM place_to_be_deleted
+ WHERE place_to_be_deleted.osm_type = 'W'
+ and place_to_be_deleted.class = 'place'
+ and place_to_be_deleted.type = 'houses'
+ and location_property_osmline.osm_id = place_to_be_deleted.osm_id
+ and not deferred;
+
+ -- Clear todo list.
+ TRUNCATE TABLE place_to_be_deleted;
+
+ RETURN NULL;
END;
-$$
-LANGUAGE plpgsql;
+$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION find_associated_street(poi_osm_type CHAR(1),
- poi_osm_id BIGINT)
+ poi_osm_id BIGINT,
+ bbox GEOMETRY)
RETURNS BIGINT
AS $$
DECLARE
location RECORD;
parent RECORD;
+ result BIGINT;
+ distance FLOAT;
+ new_distance FLOAT;
+ waygeom GEOMETRY;
BEGIN
FOR location IN
SELECT members FROM planet_osm_rels
FOR i IN 1..array_upper(location.members, 1) BY 2 LOOP
IF location.members[i+1] = 'street' THEN
FOR parent IN
- SELECT place_id from placex
+ SELECT place_id, geometry
+ FROM placex
WHERE osm_type = upper(substring(location.members[i], 1, 1))::char(1)
and osm_id = substring(location.members[i], 2)::bigint
and name is not null
and rank_search between 26 and 27
LOOP
- RETURN parent.place_id;
+ -- Find the closest 'street' member.
+ -- Avoid distance computation for the frequent case where there is
+ -- only one street member.
+ IF waygeom is null THEN
+ result := parent.place_id;
+ waygeom := parent.geometry;
+ ELSE
+ distance := coalesce(distance, ST_Distance(waygeom, bbox));
+ new_distance := ST_Distance(parent.geometry, bbox);
+ IF new_distance < distance THEN
+ distance := new_distance;
+ result := parent.place_id;
+ waygeom := parent.geometry;
+ END IF;
+ END IF;
END LOOP;
END IF;
END LOOP;
END LOOP;
- RETURN NULL;
+ RETURN result;
END;
$$
LANGUAGE plpgsql STABLE;
{% if debug %}RAISE WARNING 'finding street for % %', poi_osm_type, poi_osm_id;{% endif %}
-- Is this object part of an associatedStreet relation?
- parent_place_id := find_associated_street(poi_osm_type, poi_osm_id);
+ parent_place_id := find_associated_street(poi_osm_type, poi_osm_id, bbox);
IF parent_place_id is null THEN
parent_place_id := find_parent_for_address(token_info, poi_partition, bbox);
RETURN location.place_id;
END IF;
- parent_place_id := find_associated_street('W', location.osm_id);
+ parent_place_id := find_associated_street('W', location.osm_id, bbox);
END LOOP;
END IF;
{% endif %}
END IF;
- IF NEW.postcode is null AND NEW.rank_search > 8 THEN
+ IF NEW.postcode is null AND NEW.rank_search > 8
+ AND (NEW.rank_address > 0
+ OR ST_GeometryType(NEW.geometry) not in ('ST_LineString','ST_MultiLineString')
+ OR ST_Length(NEW.geometry) < 0.02)
+ THEN
NEW.postcode := get_nearest_postcode(NEW.country_code, NEW.geometry);
END IF;
---
CREATE UNIQUE INDEX IF NOT EXISTS idx_place_osm_unique
ON place USING btree(osm_id, osm_type, class, type) {{db.tablespace.address_index}};
+---
+-- Table needed for running updates with osm2pgsql on place.
+ CREATE TABLE IF NOT EXISTS place_to_be_deleted (
+ osm_type CHAR(1),
+ osm_id BIGINT,
+ class TEXT,
+ type TEXT,
+ deferred BOOLEAN
+ );
{% endif %}
-- Indices only needed for search.
INCLUDE (startnumber, endnumber) {{db.tablespace.search_index}}
WHERE startnumber is not null;
{% endif %}
+
{% endif %}
# just use the pgxs makefile
-foreach(suffix ${PostgreSQL_ADDITIONAL_VERSIONS} "14" "13" "12" "11" "10" "9.6")
+foreach(suffix ${PostgreSQL_ADDITIONAL_VERSIONS} "15" "14" "13" "12" "11" "10" "9.6")
list(APPEND PG_CONFIG_HINTS
"/usr/pgsql-${suffix}/bin")
endforeach()
self.parser.print_help()
return 1
- for arg in ('module_dir', 'osm2pgsql_path', 'phplib_dir', 'sqllib_dir',
- 'data_dir', 'config_dir', 'phpcgi_path'):
- setattr(args, arg, Path(kwargs[arg]))
+ args.phpcgi_path = Path(kwargs['phpcgi_path'])
args.project_dir = Path(args.project_dir).resolve()
if 'cli_args' not in kwargs:
datefmt='%Y-%m-%d %H:%M:%S',
level=max(4 - args.verbose, 1) * 10)
- args.config = Configuration(args.project_dir, args.config_dir,
+ args.config = Configuration(args.project_dir,
environ=kwargs.get('environ', os.environ))
- args.config.set_libdirs(module=args.module_dir,
- osm2pgsql=args.osm2pgsql_path,
- php=args.phplib_dir,
- sql=args.sqllib_dir,
- data=args.data_dir)
+ args.config.set_libdirs(module=kwargs['module_dir'],
+ osm2pgsql=kwargs['osm2pgsql_path'])
log = logging.getLogger()
log.warning('Using project directory: %s', str(args.project_dir))
if args.restrict_to_osm_relation:
params.extend(('--restrict-to-osm-relation', args.restrict_to_osm_relation))
- return run_legacy_script('export.php', *params, nominatim_env=args)
+ return run_legacy_script('export.php', *params, config=args.config)
class AdminServe:
params.append('--reverse-only')
if args.target == 'search':
params.append('--search-only')
- return run_legacy_script(*params, nominatim_env=args)
+ return run_legacy_script(*params, config=args.config)
# Basic environment set by root program.
config: Configuration
project_dir: Path
- module_dir: Path
- osm2pgsql_path: Path
- phplib_dir: Path
- sqllib_dir: Path
- data_dir: Path
- config_dir: Path
phpcgi_path: Path
# Global switches
from the command line arguments. The resulting dict can be
further customized and then used in `run_osm2pgsql()`.
"""
- return dict(osm2pgsql=self.config.OSM2PGSQL_BINARY or self.osm2pgsql_path,
+ return dict(osm2pgsql=self.config.OSM2PGSQL_BINARY or self.config.lib_dir.osm2pgsql,
osm2pgsql_cache=self.osm2pgsql_cache or default_cache,
osm2pgsql_style=self.config.get_import_style_file(),
+ osm2pgsql_style_path=self.config.config_dir,
threads=self.threads or default_threads,
dsn=self.config.get_libpq_dsn(),
flatnode_file=str(self.config.get_path('FLATNODE_FILE') or ''),
LOG.warning("Initialising replication updates")
with connect(args.config.get_libpq_dsn()) as conn:
- replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
+ replication.init_replication(conn, base_url=args.config.REPLICATION_URL,
+ socket_timeout=args.socket_timeout)
if args.update_functions:
LOG.warning("Create functions")
refresh.create_functions(conn, args.config, True, False)
from ..tools import replication
with connect(args.config.get_libpq_dsn()) as conn:
- return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL)
+ return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL,
+ socket_timeout=args.socket_timeout)
def _report_update(self, batchdate: dt.datetime,
while True:
with connect(args.config.get_libpq_dsn()) as conn:
start = dt.datetime.now(dt.timezone.utc)
- state = replication.update(conn, params)
+ state = replication.update(conn, params, socket_timeout=args.socket_timeout)
if state is not replication.UpdateState.NO_CHANGES:
status.log_status(conn, start, 'import')
batchdate, _, _ = status.get_status(conn)
LOG.warning('Setting up country tables')
country_info.setup_country_tables(args.config.get_libpq_dsn(),
- args.data_dir,
+ args.config.lib_dir.data,
args.no_partitions)
LOG.warning('Importing OSM data file')
from nominatim.typing import StrPath
from nominatim.errors import UsageError
+import nominatim.paths
LOG = logging.getLogger()
CONFIG_CACHE : Dict[str, Any] = {}
avoid conflicts with other environment variables.
"""
- def __init__(self, project_dir: Path, config_dir: Path,
+ def __init__(self, project_dir: Optional[Path],
environ: Optional[Mapping[str, str]] = None) -> None:
self.environ = environ or os.environ
self.project_dir = project_dir
- self.config_dir = config_dir
- self._config = dotenv_values(str((config_dir / 'env.defaults').resolve()))
- if project_dir is not None and (project_dir / '.env').is_file():
- self._config.update(dotenv_values(str((project_dir / '.env').resolve())))
+ self.config_dir = nominatim.paths.CONFIG_DIR
+ self._config = dotenv_values(str(self.config_dir / 'env.defaults'))
+ if self.project_dir is not None and (self.project_dir / '.env').is_file():
+ self.project_dir = self.project_dir.resolve()
+ self._config.update(dotenv_values(str(self.project_dir / '.env')))
class _LibDirs:
module: Path
osm2pgsql: Path
- php: Path
- sql: Path
- data: Path
+ php = nominatim.paths.PHPLIB_DIR
+ sql = nominatim.paths.SQLLIB_DIR
+ data = nominatim.paths.DATA_DIR
self.lib_dir = _LibDirs()
self._private_plugins: Dict[str, object] = {}
""" Set paths to library functions and data.
"""
for key, value in kwargs.items():
- setattr(self.lib_dir, key, Path(value).resolve())
+ setattr(self.lib_dir, key, Path(value))
def __getattr__(self, name: str) -> str:
cfgpath = Path(value)
if not cfgpath.is_absolute():
+ assert self.project_dir is not None
cfgpath = self.project_dir / cfgpath
return cfgpath.resolve()
style = getattr(self, 'IMPORT_STYLE')
if style in ('admin', 'street', 'address', 'full', 'extratags'):
- return self.config_dir / f'import-{style}.style'
+ return self.config_dir / f'import-{style}.lua'
return self.find_config_file('', 'IMPORT_STYLE')
- def get_os_env(self) -> Dict[str, Optional[str]]:
+ def get_os_env(self) -> Dict[str, str]:
""" Return a copy of the OS environment with the Nominatim configuration
merged in.
"""
- env = dict(self._config)
+ env = {k: v for k, v in self._config.items() if v is not None}
env.update(self.environ)
return env
--- /dev/null
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Path settings for extra data used by Nominatim.
+"""
+from pathlib import Path
+
+PHPLIB_DIR = (Path(__file__) / '..' / '..' / 'lib-php').resolve()
+SQLLIB_DIR = (Path(__file__) / '..' / '..' / 'lib-sql').resolve()
+DATA_DIR = (Path(__file__) / '..' / '..' / 'data').resolve()
+CONFIG_DIR = (Path(__file__) / '..' / '..' / 'settings').resolve()
module_name = config.TOKENIZER
# Create the directory for the tokenizer data
+ assert config.project_dir is not None
basedir = config.project_dir / 'tokenizer'
if not basedir.exists():
basedir.mkdir()
The function looks up the appropriate tokenizer in the database
and initialises it.
"""
+ assert config.project_dir is not None
basedir = config.project_dir / 'tokenizer'
if not basedir.is_dir():
# Directory will be repopulated by tokenizer below.
This copies all necessary data in the project directory to make
sure the tokenizer remains stable even over updates.
"""
+ assert config.project_dir is not None
module_dir = _install_module(config.DATABASE_MODULE_PATH,
config.lib_dir.module,
config.project_dir / 'module')
def init_from_project(self, config: Configuration) -> None:
""" Initialise the tokenizer from the project directory.
"""
+ assert config.project_dir is not None
+
with connect(self.dsn) as conn:
self.normalization = properties.get_property(conn, DBCFG_NORMALIZATION)
def update_sql_functions(self, config: Configuration) -> None:
""" Reimport the SQL functions for this tokenizer.
"""
+ assert config.project_dir is not None
+
with connect(self.dsn) as conn:
max_word_freq = properties.get_property(conn, DBCFG_MAXWORDFREQ)
modulepath = config.DATABASE_MODULE_PATH or \
This is a special migration function for updating existing databases
to new software versions.
"""
+ assert config.project_dir is not None
+
self.normalization = config.TERM_NORMALIZATION
module_dir = _install_module(config.DATABASE_MODULE_PATH,
config.lib_dir.module,
--- /dev/null
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Sanitizer that preprocesses tags from the TIGER import.
+
+It makes the following changes:
+
+* remove state reference from tiger:county
+"""
+from typing import Callable
+import re
+
+from nominatim.tokenizer.sanitizers.base import ProcessInfo
+from nominatim.tokenizer.sanitizers.config import SanitizerConfig
+
+COUNTY_MATCH = re.compile('(.*), [A-Z][A-Z]')
+
+def _clean_tiger_county(obj: ProcessInfo) -> None:
+ """ Remove the state reference from tiger:county tags.
+
+ This transforms a name like 'Hamilton, AL' into 'Hamilton'.
+ If no state reference is detected at the end, the name is left as is.
+ """
+ if not obj.address:
+ return
+
+ for item in obj.address:
+ if item.kind == 'tiger' and item.suffix == 'county':
+ m = COUNTY_MATCH.fullmatch(item.name)
+ if m:
+ item.name = m[1]
+ # Switch kind and suffix, the split left them reversed.
+ item.kind = 'county'
+ item.suffix = 'tiger'
+
+ return
+
+
+def create(_: SanitizerConfig) -> Callable[[ProcessInfo], None]:
+ """ Create a housenumber processing function.
+ """
+ return _clean_tiger_county
from typing import Any, Union, Optional, Mapping, IO
from pathlib import Path
import logging
+import os
import subprocess
import urllib.request as urlrequest
from urllib.parse import urlencode
+from nominatim.config import Configuration
from nominatim.typing import StrPath
from nominatim.version import version_str
from nominatim.db.connection import get_pg_env
LOG = logging.getLogger()
def run_legacy_script(script: StrPath, *args: Union[int, str],
- nominatim_env: Any,
+ config: Configuration,
throw_on_fail: bool = False) -> int:
""" Run a Nominatim PHP script with the given arguments.
then throw a `CalledProcessError` on a non-zero exit.
"""
cmd = ['/usr/bin/env', 'php', '-Cq',
- str(nominatim_env.phplib_dir / 'admin' / script)]
+ str(config.lib_dir.php / 'admin' / script)]
cmd.extend([str(a) for a in args])
- env = nominatim_env.config.get_os_env()
- env['NOMINATIM_DATADIR'] = str(nominatim_env.data_dir)
- env['NOMINATIM_SQLDIR'] = str(nominatim_env.sqllib_dir)
- env['NOMINATIM_CONFIGDIR'] = str(nominatim_env.config_dir)
- env['NOMINATIM_DATABASE_MODULE_SRC_PATH'] = str(nominatim_env.module_dir)
+ env = config.get_os_env()
+ env['NOMINATIM_DATADIR'] = str(config.lib_dir.data)
+ env['NOMINATIM_SQLDIR'] = str(config.lib_dir.sql)
+ env['NOMINATIM_CONFIGDIR'] = str(config.config_dir)
+ env['NOMINATIM_DATABASE_MODULE_SRC_PATH'] = str(config.lib_dir.module)
if not env['NOMINATIM_OSM2PGSQL_BINARY']:
- env['NOMINATIM_OSM2PGSQL_BINARY'] = str(nominatim_env.osm2pgsql_path)
+ env['NOMINATIM_OSM2PGSQL_BINARY'] = str(config.lib_dir.osm2pgsql)
- proc = subprocess.run(cmd, cwd=str(nominatim_env.project_dir), env=env,
+ proc = subprocess.run(cmd, cwd=str(config.project_dir), env=env,
check=throw_on_fail)
return proc.returncode
env = get_pg_env(options['dsn'])
cmd = [str(options['osm2pgsql']),
'--hstore', '--latlon', '--slim',
- '--with-forward-dependencies', 'false',
'--log-progress', 'true',
'--number-processes', str(options['threads']),
'--cache', str(options['osm2pgsql_cache']),
- '--output', 'gazetteer',
'--style', str(options['osm2pgsql_style'])
]
- if options['append']:
- cmd.append('--append')
+
+ if str(options['osm2pgsql_style']).endswith('.lua'):
+ env['LUA_PATH'] = ';'.join((str(options['osm2pgsql_style_path'] / '?.lua'),
+ os.environ.get('LUAPATH', ';')))
+ cmd.extend(('--output', 'flex'))
else:
- cmd.append('--create')
+ cmd.extend(('--output', 'gazetteer'))
+
+ cmd.append('--append' if options['append'] else '--create')
if options['flatnode_file']:
cmd.extend(('--flat-nodes', options['flatnode_file']))
names = {}
names['countrycode'] = country_code
analyzer.add_country_names(country_code, names)
+
+
+@_migration(4, 1, 99, 0)
+def add_place_deletion_todo_table(conn: Connection, **_: Any) -> None:
+ """ Add helper table for deleting data on updates.
+
+ The table is only necessary when updates are possible, i.e.
+ the database is not in freeze mode.
+ """
+ if conn.table_exists('place'):
+ with conn.cursor() as cur:
+ cur.execute("""CREATE TABLE IF NOT EXISTS place_to_be_deleted (
+ osm_type CHAR(1),
+ osm_id BIGINT,
+ class TEXT,
+ type TEXT,
+ deferred BOOLEAN)""")
+
+
+@_migration(4, 1, 99, 1)
+def split_pending_index(conn: Connection, **_: Any) -> None:
+ """ Reorganise indexes for pending updates.
+ """
+ if conn.table_exists('place'):
+ with conn.cursor() as cur:
+ cur.execute("""CREATE INDEX IF NOT EXISTS idx_placex_rank_address_sector
+ ON placex USING BTREE (rank_address, geometry_sector)
+ WHERE indexed_status > 0""")
+ cur.execute("""CREATE INDEX IF NOT EXISTS idx_placex_rank_boundaries_sector
+ ON placex USING BTREE (rank_search, geometry_sector)
+ WHERE class = 'boundary' and type = 'administrative'
+ and indexed_status > 0""")
+ cur.execute("DROP INDEX IF EXISTS idx_placex_pendingsector")
+
+
+@_migration(4, 2, 99, 0)
+def enable_forward_dependencies(conn: Connection, **_: Any) -> None:
+ """ Create indexes for updates with forward dependency tracking (long-running).
+ """
+ if conn.table_exists('planet_osm_ways'):
+ with conn.cursor() as cur:
+ cur.execute("""SELECT * FROM pg_indexes
+ WHERE tablename = 'planet_osm_ways'
+ and indexdef LIKE '%nodes%'""")
+ if cur.rowcount == 0:
+ cur.execute("""CREATE OR REPLACE FUNCTION public.planet_osm_index_bucket(bigint[])
+ RETURNS bigint[]
+ LANGUAGE sql IMMUTABLE
+ AS $function$
+ SELECT ARRAY(SELECT DISTINCT unnest($1) >> 5)
+ $function$""")
+ cur.execute("""CREATE INDEX planet_osm_ways_nodes_bucket_idx
+ ON planet_osm_ways
+ USING gin (planet_osm_index_bucket(nodes))
+ WITH (fastupdate=off)""")
+ cur.execute("""CREATE INDEX planet_osm_rels_parts_idx
+ ON planet_osm_rels USING gin (parts)
+ WITH (fastupdate=off)""")
+ cur.execute("ANALYZE planet_osm_ways")
LOG.info('Creating website directory.')
basedir.mkdir()
+ assert config.project_dir is not None
template = dedent(f"""\
<?php
"""
Functions for updating a database from a replication source.
"""
-from typing import ContextManager, MutableMapping, Any, Generator, cast
+from typing import ContextManager, MutableMapping, Any, Generator, cast, Iterator
from contextlib import contextmanager
import datetime as dt
from enum import Enum
import logging
import time
+import types
+import urllib.request as urlrequest
+import requests
from nominatim.db import status
from nominatim.db.connection import Connection
from nominatim.tools.exec_utils import run_osm2pgsql
try:
from osmium.replication.server import ReplicationServer
from osmium import WriteHandler
+ from osmium import version as pyo_version
except ImportError as exc:
logging.getLogger().critical("pyosmium not installed. Replication functions not available.\n"
"To install pyosmium via pip: pip3 install osmium")
LOG = logging.getLogger()
-def init_replication(conn: Connection, base_url: str) -> None:
+def init_replication(conn: Connection, base_url: str,
+ socket_timeout: int = 60) -> None:
""" Set up replication for the server at the given base URL.
"""
LOG.info("Using replication source: %s", base_url)
# margin of error to make sure we get all data
date -= dt.timedelta(hours=3)
- repl = ReplicationServer(base_url)
-
- seq = repl.timestamp_to_sequence(date)
+ with _make_replication_server(base_url, socket_timeout) as repl:
+ seq = repl.timestamp_to_sequence(date)
if seq is None:
LOG.fatal("Cannot reach the configured replication service '%s'.\n"
LOG.warning("Updates initialised at sequence %s (%s)", seq, date)
-def check_for_updates(conn: Connection, base_url: str) -> int:
+def check_for_updates(conn: Connection, base_url: str,
+ socket_timeout: int = 60) -> int:
""" Check if new data is available from the replication service at the
given base URL.
"""
"Please run 'nominatim replication --init' first.")
return 254
- state = ReplicationServer(base_url).get_state_info()
+ with _make_replication_server(base_url, socket_timeout) as repl:
+ state = repl.get_state_info()
if state is None:
LOG.error("Cannot get state for URL %s.", base_url)
NO_CHANGES = 3
-def update(conn: Connection, options: MutableMapping[str, Any]) -> UpdateState:
+def update(conn: Connection, options: MutableMapping[str, Any],
+ socket_timeout: int = 60) -> UpdateState:
""" Update database from the next batch of data. Returns the state of
updates according to `UpdateState`.
"""
options['import_file'].unlink()
# Read updates into file.
- with _make_replication_server(options['base_url']) as repl:
+ with _make_replication_server(options['base_url'], socket_timeout) as repl:
outhandler = WriteHandler(str(options['import_file']))
endseq = repl.apply_diffs(outhandler, startseq + 1,
max_size=options['max_diff_size'] * 1024)
if endseq is None:
return UpdateState.NO_CHANGES
- # Consume updates with osm2pgsql.
- options['append'] = True
- options['disable_jit'] = conn.server_version_tuple() >= (11, 0)
- run_osm2pgsql(options)
+ run_osm2pgsql_updates(conn, options)
# Write the current status to the file
endstate = repl.get_state_info(endseq)
return UpdateState.UP_TO_DATE
-def _make_replication_server(url: str) -> ContextManager[ReplicationServer]:
+def run_osm2pgsql_updates(conn: Connection, options: MutableMapping[str, Any]) -> None:
+ """ Run osm2pgsql in append mode.
+ """
+ # Remove any stale deletion marks.
+ with conn.cursor() as cur:
+ cur.execute('TRUNCATE place_to_be_deleted')
+ conn.commit()
+
+ # Consume updates with osm2pgsql.
+ options['append'] = True
+ options['disable_jit'] = conn.server_version_tuple() >= (11, 0)
+ run_osm2pgsql(options)
+
+ # Handle deletions
+ with conn.cursor() as cur:
+ cur.execute('SELECT flush_deleted_places()')
+ conn.commit()
+
+
+def _make_replication_server(url: str, timeout: int) -> ContextManager[ReplicationServer]:
""" Returns a ReplicationServer in form of a context manager.
Creates a light wrapper around older versions of pyosmium that did
not support the context manager interface.
"""
if hasattr(ReplicationServer, '__enter__'):
- return cast(ContextManager[ReplicationServer], ReplicationServer(url))
+ # Patches the open_url function for pyosmium >= 3.2
+ # where the socket timeout is no longer respected.
+ def patched_open_url(self: ReplicationServer, url: urlrequest.Request) -> Any:
+ """ Download a resource from the given URL and return a byte sequence
+ of the content.
+ """
+ headers = {"User-Agent" : f"Nominatim (pyosmium/{pyo_version.pyosmium_release})"}
+
+ if self.session is not None:
+ return self.session.get(url.get_full_url(),
+ headers=headers, timeout=timeout or None,
+ stream=True)
+
+ @contextmanager
+ def _get_url_with_session() -> Iterator[requests.Response]:
+ with requests.Session() as session:
+ request = session.get(url.get_full_url(),
+ headers=headers, timeout=timeout or None,
+ stream=True)
+ yield request
+
+ return _get_url_with_session()
+
+ repl = ReplicationServer(url)
+ setattr(repl, 'open_url', types.MethodType(patched_open_url, repl))
+
+ return cast(ContextManager[ReplicationServer], repl)
@contextmanager
def get_cm() -> Generator[ReplicationServer, None, None]:
# patch level when cherry-picking the commit with the migration.
#
# Released versions always have a database patch level of 0.
-NOMINATIM_VERSION = (4, 1, 0, 0)
+NOMINATIM_VERSION = (4, 2, 99, 0)
POSTGRESQL_REQUIRED_VERSION = (9, 6)
POSTGIS_REQUIRED_VERSION = (2, 2)
-Subproject commit 6a5d2500e9689f55485d186306aadc55560085fd
+Subproject commit 4facd1aea451cea220261c361698b8e5f18a9327
+++ /dev/null
-name:
- default: De Nederlandse Antillen
- af: Nederlandse Antille
- an: Antillas Neerlandesas
- ar: جزر الأنتيل
- be: Нідэрландскія Антылы
- bg: Холандски Антили
- br: Antilhez Nederlandat
- bs: Holandski Antili
- ca: Antilles Neerlandeses
- cs: Nizozemské Antily
- cy: Antilles yr Iseldiroedd
- da: Nederlandske Antiller
- de: Niederländische Antillen
- dv: ނެދަލޭންޑު އެންޓިލޭ
- el: Ολλανδικές Αντίλλες
- en: Netherlands Antilles
- eo: Nederlandaj Antiloj
- es: Antillas Neerlandesas;Antillas Holandesas;Indias Occidentales Holandesas
- et: Hollandi Antillid
- eu: Holandarren Antillak
- fa: آنتیل هلند
- fi: Alankomaiden Antillit
- fo: Niðurlendsku Antillurnar
- fr: Antilles néerlandaises
- fy: Nederlânske Antillen
- ga: Aintillí na hÍsiltíre
- gl: Antillas Neerlandesas
- he: האנטילים ההולנדיים
- hi: नीदरलैंड एंटीलीज़
- hr: Nizozemski Antili
- hu: Holland Antillák
- ia: Antillas Nederlandese
- id: Antillen Belanda
- io: Nederlandana Antili
- is: Hollensku Antillaeyjar
- it: Antille Olandesi
- ja: オランダ領アンティル
- jv: Antillen Walanda
- ka: ნიდერლანდის ანტილები
- kk: Антийлер
- ko: 네덜란드령 안틸레스
- kw: Antillys Iseldiryek
- la: Antillae Nederlandiae
- lb: Hollännesch Antillen
- li: Nederlandse Antille
- ln: Antiya ya Holanda
- lt: Nyderlandų Antilai
- lv: Antiļas
- mn: Нидерландын Антиллийн Арлууд
- mr: नेदरलँड्स अँटिल्स
- ms: Antillen Belanda
- nn: Dei nederlandske Antillane
- "no": De nederlandske Antillene
- pl: Antyle Holenderskie
- pt: Antilhas Holandesas
- ro: Antilele Olandeze
- ru: Нидерландские Антилы
- sh: Nizozemski Antili
- sk: Holandské Antily
- sl: Nizozemski Antili
- sr: Холандски Антили
- sv: Nederländska Antillerna
- sw: Antili za Kiholanzi
- ta: நெதர்லாந்து அண்டிலிசு
- tg: Антил Ҳоланд
- th: เนเธอร์แลนด์แอนทิลลิส
- tr: Hollanda Antilleri
- uk: Нідерландські Антильські острови
- vi: Antille thuộc Hà Lan
- zh: 荷属安的列斯
+++ /dev/null
-name:
- default: Antarctica
+++ /dev/null
-name:
- default: American Samoa
+++ /dev/null
-name:
- default: Aruba
+++ /dev/null
-name:
- default: Aland Islands
+++ /dev/null
-name:
- default: Saint Barthélemy
+++ /dev/null
-name:
- default: "\N"
+++ /dev/null
-name:
- default: Bouvet Island
+++ /dev/null
-name:
- default: Cocos (Keeling) Islands
- af: Cocos (Keeling) Eilande
- ar: جزر كوكوس (كيلينغ)
- be: Какосавыя (Кілінг) астравы
- br: Inizi Kokoz
- ca: Illes Cocos
- da: Cocosøerne
- de: Kokosinseln
- el: Νησιά Κόκος
- en: Cocos (Keeling) Islands
- eo: Kokosinsuloj
- es: Islas Cocos (Keeling)
- et: Kookossaared
- eu: Cocos (Keeling) uharteak
- fa: جزایر کوکوس
- fi: Kookossaaret
- fr: Îles Cocos
- fy: de Kokoseilannen
- he: איי קוקוס (קילינג)
- hr: Kokosovi otoci
- hu: Kókusz (Keeling)-szigetek
- id: Kepulauan Cocos (Keeling)
- is: Kókoseyjar
- it: Isole Cocos e Keeling
- lt: Kokoso (Keelingo) salos
- lv: Kokosu (Kīlinga) salas
- mn: Кокосын (Кийлингийн) Арлууд
- nl: Cocoseilanden
- pl: Wyspy Kokosowe
- ru: Кокосовые острова
- sl: Kokosovi otoki
- sv: Kokosöarna
- tr: Cocos (Keeling) Adaları
- uk: Кокосові острови
- vi: Quần đảo Cocos (Keeling)
- zh: 科科斯(基林)群島
+++ /dev/null
-name:
- default: Curaçao
- en: Curaçao
- es: Curazao
- fr: Curaçao
- ru: Кюрасао
- sv: Curaçao
+++ /dev/null
-name:
- default: Christmas Island
- af: Christmas-eiland
- ar: جزيرة الميلاد
- bg: Рождество
- br: Enez Nedeleg
- bs: Božićno ostrvo
- ca: Illa Christmas
- cs: Vánoční ostrov
- cy: Ynys y Nadolig
- da: Juleøen
- de: Weihnachtsinsel
- el: Νήσος των Χριστουγέννων
- eo: Kristnaskinsulo
- es: Isla de Navidad
- et: Jõulusaar
- eu: Christmas uhartea
- fa: جزیره کریسمس
- fi: Joulusaari
- fr: Île Christmas
- fy: Krysteilân
- ga: Oileán na Nollag
- gl: Illa de Nadal
- he: טריטוריית האי חג המולד
- hi: क्रिसमस आईलैंड
- hr: Božićni otok
- hu: Karácsony-sziget
- id: Pulau Natal
- is: Jólaeyja
- it: Isola del Natale
- ja: クリスマス島
- ka: შობის კუნძული
- kk: Кристмас аралы
- ko: 크리스마스 섬
- kw: Ynys Nadelik
- lb: Chrëschtdagsinsel
- lt: Kalėdų sala
- lv: Ziemsvētku sala
- mn: Зул Сарын Арал
- mr: क्रिसमस द्वीप
- ms: Pulau Krismas
- nl: Christmaseiland
- nn: Christmasøya
- "no": Christmasøya
- pl: Wyspa Bożego Narodzenia
- pt: Ilha Christmas
- ro: Insula Crăciunului
- ru: Остров Рождества
- sh: Božićni otok
- sk: Vianočný ostrov
- sl: Božični otoki
- sr: Божићно Острво
- sv: Julön
- sw: Kisiwa cha Krismasi
- ta: கிறிஸ்துமசு தீவு
- th: เกาะคริสต์มาส
- tr: Christmas Adası
- uk: Острів Різдва
- vi: Đảo Christmas
- wo: Dunu Christmas
- zh: 圣诞岛
+++ /dev/null
-name:
- default: Guyane Française
- af: Frans-Guyana
- ar: غيانا
- br: Gwiana c’hall
- ca: Guaiana Francesa
- cy: Guyane
- da: Fransk Guyana
- de: Französisch-Guayana
- el: Γαλλική Γουιάνα
- en: French Guiana
- eo: Gujano
- es: Guayana Francesa
- et: Prantsuse Guajaana
- fa: گویان فرانسه
- fi: Ranskan Guayana
- fr: Guyane française
- fy: Frânsk Guyana
- ga: Guáin na Fraince
- gd: Guiana Fhrangach
- he: גיאנה הצרפתית
- hr: Francuska Gvajana
- hu: Francia Guyana
- id: Guyana Perancis
- is: Franska Gvæjana
- it: Guyana francese
- la: Guiana Francica
- li: Frans Guyana
- lt: Prancūzijos Gviana
- lv: Franču Gviāna
- mn: Франц Гвиана
- nl: Frans-Guyana
- pl: Gujana Francuska
- ru: Французская Гвиана
- sl: Francoska Gvajana
- sv: Franska Guyana
- th: เฟรนช์เกียนา
- tr: Fransız Guyanası
- uk: Французька Гвіана
- vi: Guyane thuộc Pháp
- zh: 法属圭亚那
+++ /dev/null
-name:
- default: Guadeloupe
- ar: غوادلوب
- be: Гвадэлупа
- br: Gwadeloup
- ca: Illa de Guadalupe
- da: Guadeloupe
- el: Γουαδελούπη
- en: Guadeloupe
- eo: Gvadelupo
- es: Guadalupe
- fa: گوادلوپ
- fi: Guadeloupe
- fr: Guadeloupe
- fy: Guadelûp
- ga: Guadalúip
- he: גוואדלופ
- hr: Gvadalupa
- hu: Guadeloupe
- is: Gvadelúpeyjar
- it: Guadalupa
- la: Guadalupa
- lt: Gvadelupa
- lv: Gvadelupa
- mn: Гуаделупе
- pl: Gwadelupa
- ru: Гваделупа
- sv: Guadeloupe
- th: กวาเดอลูป
- uk: Гваделупа
- zh: 瓜德罗普
+++ /dev/null
-name:
- default: Guam
+++ /dev/null
-name:
- default: Hong Kong
+++ /dev/null
-name:
- default: Heard Island and MaxDonald Islands
+++ /dev/null
-name:
- default: Saint Martin
+++ /dev/null
-name:
- default: Macao
+++ /dev/null
-name:
- default: Northern Mariana Islands
+++ /dev/null
-name:
- default: Martinique
- ar: مارتينيك
- be: Марцініка
- br: Martinik
- ca: Martinica
- da: Martinique
- el: Μαρτινίκα
- en: Martinique
- eo: Martiniko
- es: Martinica
- fa: مارتینیک
- fi: Martinique
- fr: Martinique
- fy: Martinyk
- he: מרטיניק
- hr: Martinik
- hu: Martinique
- id: Martinik
- is: Martinique
- it: Martinica
- la: Martinica
- lt: Martinika
- lv: Martinika
- mn: Мартиник
- pl: Martynika
- ru: Мартиника
- sv: Martinique
- uk: Мартиніка
- zh: 馬提尼克
+++ /dev/null
-name:
- default: Nouvelle-Calédonie
- af: Nieu-Caledonia
- ar: كاليدونيا الجديدة
- be: Новая Каледонія
- br: Kaledonia Nevez
- ca: Nova Caledònia
- cy: Caledonia Newydd
- da: Ny Kaledonien
- de: Neukaledonien
- el: Νέα Καληδονία
- en: New Caledonia
- eo: Nov-Kaledonio
- es: Nueva Caledonia
- fa: کالدونیای جدید
- fi: Uusi-Kaledonia
- fr: Nouvelle-Calédonie
- ga: An Nua-Chaladóin
- he: קלדוניה החדשה
- hr: Nova Kaledonija
- hu: Új-Kaledónia
- id: Kaledonia Baru
- is: Nýja-Kaledónía
- it: Nuova Caledonia
- la: Nova Caledonia
- lt: Naujoji Kaledonija
- lv: Jaunkaledonija
- mn: Шинэ Каледони
- nl: Nieuw-Caledonië
- pl: Nowa Kaledonia
- ru: Новая Каледония
- sl: Nova Kaledonija
- sv: Nya Kaledonien
- th: นิวแคลิโดเนีย
- tr: Yeni Kaledonya
- uk: Нова Каледонія
- zh: 新喀里多尼亚
+++ /dev/null
-name:
- default: Norfolk Island
- af: Norfolkeiland
- ar: جزيرة نورفولك
- be: Норфалк
- br: Enez Norfolk
- ca: Illa Norfolk
- cy: Ynys Norfolk
- da: Norfolk-øen
- de: Norfolkinsel
- en: Norfolk Island
- eo: Norfolkinsulo
- es: Isla Norfolk
- et: Norfolki saar
- fi: Norfolkinsaari
- fr: Île Norfolk
- fy: Norfolk
- ga: Oileán Norfolk
- he: האי נורפוק
- hr: Otok Norfolk
- hu: Norfolk-sziget
- id: Pulau Norfolk
- is: Norfolkeyja
- it: Isola Norfolk
- la: Insula Norfolcia
- lt: Norfolko sala
- lv: Norfolkas sala
- mn: Норфолк Арал
- nl: Norfolk
- pl: Wyspa Norfolk
- ru: Остров Норфолк
- sv: Norfolkön
- tr: Norfolk Adası
- uk: Острів Норфолк
- vi: Đảo Norfolk
- zh: 诺福克岛
+++ /dev/null
-name:
- default: Polynésie française
- af: Franse Polynesië
- an: Polinesia Franzesa
- ar: بولونيزيا الفرنسية
- az: Fransa Polineziyası
- be: Французская Палінезія
- bg: Френска Полинезия
- br: Polinezia Frañs
- bs: Francuska Polinezija
- ca: Polinèsia Francesa
- cs: Francouzská Polynésie
- cy: Polynesia Ffrengig
- da: Fransk Polynesien
- de: Französisch-Polynesien
- dv: ފަރަންސޭސި ޕޮލިނޭޝިއާ
- el: Γαλλική Πολυνησία
- en: French Polynesia
- eo: Franca Polinezio
- es: Polinesia Francesa
- et: Prantsuse Polüneesia
- eu: Frantziar Polinesia
- fa: پلینزی فرانسه
- fi: Ranskan Polynesia
- fr: Polynésie française
- fy: Frânsk Polyneezje
- ga: Polainéis na Fraince
- gd: French Polynesia
- gl: Polinesia francesa
- he: פולינזיה הצרפתית
- hi: फ्रेंच पोलीनेशिया
- hr: Francuska Polinezija
- hu: Francia Polinézia
- id: Polinesia Perancis
- io: Franca Polinezia
- is: Franska Pólýnesía
- it: Polinesia francese
- ja: フランス領ポリネシア
- jv: Polinesia Perancis
- kk: Франция Полинезиясы
- ko: 프랑스령 폴리네시아
- kw: Polynesi Frynkek
- la: Polynesia Francica
- lb: Franséisch-Polynesien
- lt: Prancūzijos Polinezija
- lv: Franču Polinēzija
- mi: Porinīhia Wīwī
- mk: Француска Полинезија
- mn: Францын Полинез
- mr: फ्रेंच पॉलिनेशिया
- ms: Polinesia Perancis
- nl: Frans-Polynesië
- nn: Fransk Polynesia
- "no": Fransk Polynesia
- oc: Polinesia Francesa
- os: Францы Полинези
- pl: Polinezja Francuska
- pt: Polinésia Francesa
- qu: Phransis Pulinisya
- ro: Polinezia Franceză
- ru: Французская Полинезия
- se: Frankriikka Polynesia
- sh: Francuska Polinezija
- sk: Francúzska Polynézia
- sl: Francoska Polinezija
- sr: Француска Полинезија
- sv: Franska Polynesien
- sw: Polynesia ya Kifaransa
- ta: பிரெஞ்சு பொலினீசியா
- th: เฟรนช์โปลินีเซีย
- tr: Fransız Polinezyası
- ty: Pōrīnetia Farāni
- ug: Fransiyige Qarashliq Polinéziye
- uk: Французька Полінезія
- vi: Polynésie thuộc Pháp
- wo: Polineesi gu Faraas
- zh: 法属波利尼西亚
+++ /dev/null
-name:
- default: Saint-Pierre-et-Miquelon
- af: Saint-Pierre et Miquelon
- be: Святы П’ер і Міквелон
- da: Saint Pierre og Miquelon
- de: Saint-Pierre und Miquelon
- en: Saint Pierre and Miquelon
- eo: Sankta-Piero kaj Mikelono
- es: San Pedro y Miguelón
- fi: Saint-Pierre ja Miquelon
- fr: Saint-Pierre-et-Miquelon
- hr: Sveti Petar i Mikelon
- hu: Saint-Pierre és Miquelon
- lt: Sen Pjeras ir Mikelonas
- lv: Senpjēra un Mikelona
- mn: Сент Пьер ба Микелон
- sv: Saint-Pierre och Miquelon
- tr: Saint-Pierre ve Miquelon
- uk: Сен-П'єр і Мікелон
+++ /dev/null
-name:
- default: Puerto Rico
+++ /dev/null
-name:
- default: Réunion
- af: Réunion
- ar: ريونيون
- be: Руньён
- br: Ar Reunion
- ca: Illa de la Reunió
- da: Reunion
- el: Ρεϊνιόν
- eo: Reunio
- es: La Reunión
- fa: رئونیون
- fi: Réunion
- fr: La Réunion
- he: ראוניון
- hu: Réunion
- is: Réunion
- it: Riunione
- la: Reunio
- lt: Reunionas
- lv: Reinjona
- mn: Реюньон
- pl: Reunion
- ru: Реюньон
- sl: Reunion
- sv: Réunion
- th: เรอูนียง
- uk: Реюньйон
- zh: 留尼汪
+++ /dev/null
-name:
- default: Svalbard and Jan Mayen
+++ /dev/null
-name:
- default: Sint Maarten
+++ /dev/null
-name:
- default: Terres australes et antarctiques françaises
- af: Franse Suidelike en Antarktiese Gebiede
- an: Territorios Australs Franzeses
- ar: الأراضي الجنوبية الفرنسية
- be: Французскія Паўднёвыя тэрыторыі
- bg: Френски южни и антарктически територии
- br: Douaroù Aostral hag Antarktikel Frañs
- ca: Terres Australs i Antàrtiques Franceses
- cs: Francouzská jižní a antarktická území
- da: Franske sydlige og Antarktiske territorier
- de: Französische Süd- und Antarktisgebiete
- el: Γαλλικά νότια και ανταρκτικά εδάφη
- en: French Southern Lands
- eo: Francaj Sudaj Teritorioj
- es: Tierras Australes y Antárticas Francesas
- eu: Frantziaren lurralde austral eta antartikoak
- fi: Ranskan eteläiset ja antarktiset alueet
- fr: Terres australes et antarctiques françaises
- fy: Frânske Súdlike en Antarktyske Lannen
- gl: Terras Austrais e Antárticas Francesas
- hr: Francuski južni i antarktički teritoriji
- hu: Francia déli és antarktiszi területek
- id: Daratan Selatan dan Antarktika Perancis
- is: Frönsku suðlægu landsvæðin
- it: Terre Australi e Antartiche Francesi
- ja: フランス領南方・南極地域
- ko: 프랑스령 남부와 남극 지역
- kw: Tiryow Deghow hag Antarktik Frynkek
- lt: Prancūzijos Pietų Sritys
- lv: Francijas Dienvidjūru un Antarktikas Zemes
- nl: Franse Zuidelijke en Antarctische Gebieden
- "no": De franske sørterritorier
- oc: Tèrras Australas e Antarticas Francesas
- pl: Francuskie Terytoria Południowe i Antarktyczne
- pt: Terras Austrais e Antárticas Francesas
- ro: Teritoriile australe şi antarctice franceze
- ru: Французские Южные и Антарктические территории
- sh: Francuske Južne Teritorije
- sk: Francúzske južné a antarktické územia
- sl: Francoske južne in antarktične dežele
- sr: Француске јужне и антарктичке земље
- sv: Franska sydterritorierna
- ta: பிரெஞ்சு தென்னக நிலங்களும் அண்டாடிக் நிலமும்
- tr: Fransız Güney ve Antarktika Toprakları
- uk: Французькі Південні та Антарктичні території
- vi: Vùng đất phía Nam và châu Nam Cực thuộc Pháp
- zh: 法属南部领地
+++ /dev/null
-name:
- default: United States Minor Outlying Islands
+++ /dev/null
-name:
- default: United States Virgin Islands
+++ /dev/null
-name:
- default: Wallis-et-Futuna
- af: Wallis-en-Futuna
- an: Wallis e Futuna
- ar: جزر واليس وفوتونا
- be: Уоліс і Футуна
- bg: Уолис и Футуна
- br: Wallis ha Futuna
- ca: Wallis i Futuna
- cs: Wallis a Futuna
- cy: Wallis a Futuna
- da: Wallis og Futuna
- de: Wallis und Futuna
- dv: ވާލީ އަދި ފުތޫނާ
- el: Ουώλλις και Φουτούνα
- en: Wallis and Futuna Islands
- eo: Valiso kaj Futuno
- es: Wallis y Futuna
- et: Wallis ja Futuna
- eu: Wallis eta Futuna
- fa: والیس و فوتونا
- fi: Wallis- ja Futunasaaret
- fr: Wallis-et-Futuna
- fy: Wallis en Fûtûna
- ga: Vailís agus Futúna
- gl: Wallis e Futuna
- he: ואליס ופוטונה
- hr: Wallis i Futuna
- hu: Wallis és Futuna
- id: Wallis dan Futuna
- io: Wallis e Futuna Insuli
- is: Wallis- og Fútúnaeyjar
- it: Wallis e Futuna
- ja: ウォリス・フツナ
- jv: Wallis lan Futuna
- ko: 왈리스 퓌튀나
- kw: Wallis ha Futuna
- la: Vallis et Futuna
- lb: Wallis a Futuna
- lt: Walliso ir Futuna salos
- lv: Volisa un Futuna
- mn: Уоллис ба Футуна
- mr: वालिस व फुतुना
- ms: Wallis dan Futuna
- nl: Wallis en Futuna
- nn: Wallis- og Futunaøyane
- "no": Wallis- og Futunaøyene
- oc: Wallis e Futuna
- pl: Wallis i Futuna
- pt: Wallis e Futuna
- ro: Wallis şi Futuna
- ru: Уоллис и Футуна
- se: Wallis ja Futuna
- sh: Wallis i Futuna
- sk: Wallis a Futuna
- sl: Wallis in Futuna
- sm: Wallis and Futuna
- sr: Валис и Футуна
- sv: Wallis- och Futunaöarna
- sw: Wallis na Futuna
- ta: வலிசும் புட்டூனாவும்
- th: หมู่เกาะวาลลิสและหมู่เกาะฟุตูนา
- tr: Wallis ve Futuna Adaları
- ug: Wallis we Futuna Taqim Aralliri
- uk: Волліс і Футуна
- vi: Wallis và Futuna
- wo: Wallis ak Futuna
- zh: 瓦利斯和富图纳群岛
+++ /dev/null
-name:
- default: Mayotte
pattern: "dddd"
-# Netherlands Antilles (De Nederlandse Antillen)
-an:
- partition: 58
- languages: nl, en, pap
- names: !include country-names/an.yaml
-
-
# Angola (Angola)
ao:
partition: 85
postcode: no
-# (Antarctica)
-aq:
- partition: 181
- languages: en, es, fr, ru
- names: !include country-names/aq.yaml
- postcode: no
-
-
# Argentina (Argentina)
ar:
partition: 39
pattern: "l?dddd(?:lll)?"
-# (American Samoa)
-as:
- partition: 182
- languages: en, sm
- names: !include country-names/as.yaml
-
-
# Austria (Österreich)
at:
partition: 245
pattern: "dddd"
-# (Aruba)
-aw:
- partition: 183
- languages: nl, pap
- names: !include country-names/aw.yaml
- postcode: no
-
-
-# (Aland Islands)
-ax:
- partition: 184
- languages: sv
- names: !include country-names/ax.yaml
-
-
# Azerbaijan (Azərbaycan)
az:
partition: 119
postcode: no
-# (Saint Barthélemy)
-bl:
- partition: 204
- languages: fr
- names: !include country-names/bl.yaml
-
-
# Bermuda (Bermuda)
bm:
partition: 176
postcode: no
-# Caribbean Netherlands (Caribisch Nederland)
-bq:
- partition: 250
- languages: nl
- names: !include country-names/bq.yaml
-
-
# Brazil (Brasil)
br:
partition: 121
pattern: "ddddd"
-# (Bouvet Island)
-bv:
- partition: 185
- languages: "no"
- names: !include country-names/bv.yaml
-
-
# Botswana (Botswana)
bw:
partition: 122
output: \1 \2
-# Cocos (Keeling) Islands (Cocos (Keeling) Islands)
-cc:
- partition: 118
- languages: en
- names: !include country-names/cc.yaml
-
-
# Democratic Republic of the Congo (République démocratique du Congo)
cd:
partition: 229
pattern: "dddd"
-# Curaçao (Curaçao)
-cw:
- partition: 248
- languages: nl, en
- names: !include country-names/cw.yaml
-
-
-# Christmas Island (Christmas Island)
-cx:
- partition: 177
- languages: en
- names: !include country-names/cx.yaml
-
-
# Cyprus (Κύπρος - Kıbrıs)
cy:
partition: 114
pattern: "dddd"
-# French Guiana (Guyane Française)
-gf:
- partition: 231
- languages: fr
- names: !include country-names/gf.yaml
-
-
# Guernsey (Guernsey)
gg:
partition: 77
pattern: "ddd"
-# Guadeloupe (Guadeloupe)
-gp:
- partition: 232
- languages: fr
- names: !include country-names/gp.yaml
-
-
# Equatorial Guinea (Guinea Ecuatorial)
gq:
partition: 12
pattern: "ddddd"
-# Guam (Guam)
-gu:
- partition: 187
- languages: en, ch
- names: !include country-names/gu.yaml
-
-
# Guinea-Bissau (Guiné-Bissau)
gw:
partition: 8
postcode: no
-# (Hong Kong)
-hk:
- partition: 188
- languages: zh-hant, en
- names: !include country-names/hk.yaml
-
-
-# (Heard Island and MaxDonald Islands)
-hm:
- partition: 189
- languages: en
- names: !include country-names/hm.yaml
-
-
# Honduras (Honduras)
hn:
partition: 56
pattern: "ddddd"
-# Saint Martin (Saint Martin)
-mf:
- partition: 203
- languages: fr
- names: !include country-names/mf.yaml
-
-
# Madagascar (Madagasikara)
mg:
partition: 164
pattern: "ddddd"
-# Macao (Macao)
-mo:
- partition: 191
- languages: zh-hant, pt
- names: !include country-names/mo.yaml
- postcode: no
-
-
-# Northern Mariana Islands (Northern Mariana Islands)
-mp:
- partition: 192
- languages: ch, en
- names: !include country-names/mp.yaml
-
-
-# Martinique (Martinique)
-mq:
- partition: 233
- languages: fr
- names: !include country-names/mq.yaml
-
-
# Mauritania (موريتانيا)
mr:
partition: 149
pattern: "ddddd"
-# New Caledonia (Nouvelle-Calédonie)
-nc:
- partition: 234
- languages: fr
- names: !include country-names/nc.yaml
-
-
# Niger (Niger)
ne:
partition: 226
pattern: "dddd"
-# Norfolk Island (Norfolk Island)
-nf:
- partition: 100
- languages: en, pih
- names: !include country-names/nf.yaml
-
-
# Nigeria (Nigeria)
ng:
partition: 218
pattern: "ddddd"
-# French Polynesia (Polynésie française)
-pf:
- partition: 202
- languages: fr
- names: !include country-names/pf.yaml
-
-
# Papua New Guinea (Papua Niugini)
pg:
partition: 71
output: \1-\2
-# Saint Pierre and Miquelon (Saint-Pierre-et-Miquelon)
-pm:
- partition: 236
- languages: fr
- names: !include country-names/pm.yaml
-
-
# Pitcairn Islands (Pitcairn Islands)
pn:
partition: 113
output: \1 \2
-# Puerto Rico (Puerto Rico)
-pr:
- partition: 193
- languages: es, en
- names: !include country-names/pr.yaml
-
-
# Palestinian Territory (Palestinian Territory)
ps:
partition: 194
postcode: no
-# (Réunion)
-re:
- partition: 235
- languages: fr
- names: !include country-names/re.yaml
-
-
# Romania (România)
ro:
partition: 170
pattern: "dddd"
-# (Svalbard and Jan Mayen)
-sj:
- partition: 197
- languages: "no"
- names: !include country-names/sj.yaml
-
-
# Slovakia (Slovensko)
sk:
partition: 172
pattern: "dddd"
-# (Sint Maarten)
-sx:
- partition: 249
- languages: nl, en
- names: !include country-names/sx.yaml
-
-
# Syria (سوريا)
sy:
partition: 104
postcode: no
-# French Southern Lands (Terres australes et antarctiques françaises)
-tf:
- partition: 132
- languages: fr
- names: !include country-names/tf.yaml
-
-
# Togo (Togo)
tg:
partition: 243
postcode: no
-# (United States Minor Outlying Islands)
-um:
- partition: 198
- languages: en
- names: !include country-names/um.yaml
- postcode:
- pattern: "96898"
-
-
# United States (United States)
us:
partition: 2
output: VG\1
-# (United States Virgin Islands)
-vi:
- partition: 199
- languages: en
- names: !include country-names/vi.yaml
-
-
# Vietnam (Việt Nam)
vn:
partition: 75
postcode: no
-# Wallis and Futuna Islands (Wallis-et-Futuna)
-wf:
- partition: 238
- languages: fr
- names: !include country-names/wf.yaml
-
-
# Samoa (Sāmoa)
ws:
partition: 131
postcode: no
-# Mayotte (Mayotte)
-yt:
- partition: 200
- languages: fr
- names: !include country-names/yt.yaml
-
-
# South Africa (South Africa)
za:
partition: 76
--- /dev/null
+-- Core functions for Nominatim import flex style.
+--
+
+local module = {}
+
+local PRE_DELETE = nil
+local PRE_EXTRAS = nil
+local MAIN_KEYS = nil
+local NAMES = nil
+local ADDRESS_TAGS = nil
+local SAVE_EXTRA_MAINS = false
+local POSTCODE_FALLBACK = true
+
+
+-- The single place table.
+local place_table = osm2pgsql.define_table{
+ name = "place",
+ ids = { type = 'any', id_column = 'osm_id', type_column = 'osm_type' },
+ columns = {
+ { column = 'class', type = 'text', not_null = true },
+ { column = 'type', type = 'text', not_null = true },
+ { column = 'admin_level', type = 'smallint' },
+ { column = 'name', type = 'hstore' },
+ { column = 'address', type = 'hstore' },
+ { column = 'extratags', type = 'hstore' },
+ { column = 'geometry', type = 'geometry', projection = 'WGS84', not_null = true },
+ },
+ indexes = {}
+}
+
+------------ Geometry functions for relations ---------------------
+
+function module.relation_as_multipolygon(o)
+ return o:as_multipolygon()
+end
+
+function module.relation_as_multiline(o)
+ return o:as_multilinestring():line_merge()
+end
+
+
+module.RELATION_TYPES = {
+ multipolygon = module.relation_as_multipolygon,
+ boundary = module.relation_as_multipolygon,
+ waterway = module.relation_as_multiline
+}
+
+------------- Place class ------------------------------------------
+
+local Place = {}
+Place.__index = Place
+
+function Place.new(object, geom_func)
+ local self = setmetatable({}, Place)
+ self.object = object
+ self.geom_func = geom_func
+
+ self.admin_level = tonumber(self.object:grab_tag('admin_level'))
+ if self.admin_level == nil
+ or self.admin_level <= 0 or self.admin_level > 15
+ or math.floor(self.admin_level) ~= self.admin_level then
+ self.admin_level = 15
+ end
+
+ self.num_entries = 0
+ self.has_name = false
+ self.names = {}
+ self.address = {}
+ self.extratags = {}
+
+ return self
+end
+
+function Place:clean(data)
+ for k, v in pairs(self.object.tags) do
+ if data.delete ~= nil and data.delete(k, v) then
+ self.object.tags[k] = nil
+ elseif data.extra ~= nil and data.extra(k, v) then
+ self.extratags[k] = v
+ self.object.tags[k] = nil
+ end
+ end
+end
+
+function Place:delete(data)
+ if data.match ~= nil then
+ for k, v in pairs(self.object.tags) do
+ if data.match(k, v) then
+ self.object.tags[k] = nil
+ end
+ end
+ end
+end
+
+function Place:grab_extratags(data)
+ local count = 0
+
+ if data.match ~= nil then
+ for k, v in pairs(self.object.tags) do
+ if data.match(k, v) then
+ self.object.tags[k] = nil
+ self.extratags[k] = v
+ count = count + 1
+ end
+ end
+ end
+
+ return count
+end
+
+local function strip_address_prefix(k)
+ if k:sub(1, 5) == 'addr:' then
+ return k:sub(6)
+ end
+
+ if k:sub(1, 6) == 'is_in:' then
+ return k:sub(7)
+ end
+
+ return k
+end
+
+
+function Place:grab_address_parts(data)
+ local count = 0
+
+ if data.groups ~= nil then
+ for k, v in pairs(self.object.tags) do
+ local atype = data.groups(k, v)
+
+ if atype ~= nil then
+ if atype == 'main' then
+ self.has_name = true
+ self.address[strip_address_prefix(k)] = v
+ count = count + 1
+ elseif atype == 'extra' then
+ self.address[strip_address_prefix(k)] = v
+ else
+ self.address[atype] = v
+ end
+ self.object.tags[k] = nil
+ end
+ end
+ end
+
+ return count
+end
+
+
+function Place:grab_name_parts(data)
+ local fallback = nil
+
+ if data.groups ~= nil then
+ for k, v in pairs(self.object.tags) do
+ local atype = data.groups(k, v)
+
+ if atype ~= nil then
+ self.names[k] = v
+ self.object.tags[k] = nil
+ if atype == 'main' then
+ self.has_name = true
+ elseif atype == 'house' then
+ self.has_name = true
+ fallback = {'place', 'house', 'always'}
+ end
+ end
+ end
+ end
+
+ return fallback
+end
+
+
+function Place:write_place(k, v, mtype, save_extra_mains)
+ if mtype == nil then
+ return 0
+ end
+
+ v = v or self.object.tags[k]
+ if v == nil then
+ return 0
+ end
+
+ if type(mtype) == 'table' then
+ mtype = mtype[v] or mtype[1]
+ end
+
+ if mtype == 'always' or (self.has_name and mtype == 'named') then
+ return self:write_row(k, v, save_extra_mains)
+ end
+
+ if mtype == 'named_with_key' then
+ local names = {}
+ local prefix = k .. ':name'
+ for namek, namev in pairs(self.object.tags) do
+ if namek:sub(1, #prefix) == prefix
+ and (#namek == #prefix
+ or namek:sub(#prefix + 1, #prefix + 1) == ':') then
+ names[namek:sub(#k + 2)] = namev
+ end
+ end
+
+ if next(names) ~= nil then
+ local saved_names = self.names
+ self.names = names
+
+ local results = self:write_row(k, v, save_extra_mains)
+
+ self.names = saved_names
+
+ return results
+ end
+ end
+
+ return 0
+end
+
+function Place:write_row(k, v, save_extra_mains)
+ if self.geometry == nil then
+ self.geometry = self.geom_func(self.object)
+ end
+ if self.geometry:is_null() then
+ return 0
+ end
+
+ if save_extra_mains then
+ for extra_k, extra_v in pairs(self.object.tags) do
+ if extra_k ~= k then
+ self.extratags[extra_k] = extra_v
+ end
+ end
+ end
+
+ place_table:insert{
+ class = k,
+ type = v,
+ admin_level = self.admin_level,
+ name = next(self.names) and self.names,
+ address = next(self.address) and self.address,
+ extratags = next(self.extratags) and self.extratags,
+ geometry = self.geometry
+ }
+
+ if save_extra_mains then
+ for k, v in pairs(self.object.tags) do
+ self.extratags[k] = nil
+ end
+ end
+
+ self.num_entries = self.num_entries + 1
+
+ return 1
+end
+
+
+function module.tag_match(data)
+ if data == nil or next(data) == nil then
+ return nil
+ end
+
+ local fullmatches = {}
+ local key_prefixes = {}
+ local key_suffixes = {}
+
+ if data.keys ~= nil then
+ for _, key in pairs(data.keys) do
+ if key:sub(1, 1) == '*' then
+ if #key > 1 then
+ if key_suffixes[#key - 1] == nil then
+ key_suffixes[#key - 1] = {}
+ end
+ key_suffixes[#key - 1][key:sub(2)] = true
+ end
+ elseif key:sub(#key, #key) == '*' then
+ if key_prefixes[#key - 1] == nil then
+ key_prefixes[#key - 1] = {}
+ end
+ key_prefixes[#key - 1][key:sub(1, #key - 1)] = true
+ else
+ fullmatches[key] = true
+ end
+ end
+ end
+
+ if data.tags ~= nil then
+ for k, vlist in pairs(data.tags) do
+ if fullmatches[k] == nil then
+ fullmatches[k] = {}
+ for _, v in pairs(vlist) do
+ fullmatches[k][v] = true
+ end
+ end
+ end
+ end
+
+ return function (k, v)
+ if fullmatches[k] ~= nil and (fullmatches[k] == true or fullmatches[k][v] ~= nil) then
+ return true
+ end
+
+ for slen, slist in pairs(key_suffixes) do
+ if #k >= slen and slist[k:sub(-slen)] ~= nil then
+ return true
+ end
+ end
+
+ for slen, slist in pairs(key_prefixes) do
+ if #k >= slen and slist[k:sub(1, slen)] ~= nil then
+ return true
+ end
+ end
+
+ return false
+ end
+end
+
+
+function module.tag_group(data)
+ if data == nil or next(data) == nil then
+ return nil
+ end
+
+ local fullmatches = {}
+ local key_prefixes = {}
+ local key_suffixes = {}
+
+ for group, tags in pairs(data) do
+ for _, key in pairs(tags) do
+ if key:sub(1, 1) == '*' then
+ if #key > 1 then
+ if key_suffixes[#key - 1] == nil then
+ key_suffixes[#key - 1] = {}
+ end
+ key_suffixes[#key - 1][key:sub(2)] = group
+ end
+ elseif key:sub(#key, #key) == '*' then
+ if key_prefixes[#key - 1] == nil then
+ key_prefixes[#key - 1] = {}
+ end
+ key_prefixes[#key - 1][key:sub(1, #key - 1)] = group
+ else
+ fullmatches[key] = group
+ end
+ end
+ end
+
+ return function (k, v)
+ local val = fullmatches[k]
+ if val ~= nil then
+ return val
+ end
+
+ for slen, slist in pairs(key_suffixes) do
+ if #k >= slen then
+ val = slist[k:sub(-slen)]
+ if val ~= nil then
+ return val
+ end
+ end
+ end
+
+ for slen, slist in pairs(key_prefixes) do
+ if #k >= slen then
+ val = slist[k:sub(1, slen)]
+ if val ~= nil then
+ return val
+ end
+ end
+ end
+ end
+end
+
+-- Process functions for all data types
+function module.process_node(object)
+
+ local function geom_func(o)
+ return o:as_point()
+ end
+
+ module.process_tags(Place.new(object, geom_func))
+end
+
+function module.process_way(object)
+
+ local function geom_func(o)
+ local geom = o:as_polygon()
+
+ if geom:is_null() then
+ geom = o:as_linestring()
+ end
+
+ return geom
+ end
+
+ module.process_tags(Place.new(object, geom_func))
+end
+
+function module.process_relation(object)
+ local geom_func = module.RELATION_TYPES[object.tags.type]
+
+ if geom_func ~= nil then
+ module.process_tags(Place.new(object, geom_func))
+ end
+end
+
+-- The process functions are used by default by osm2pgsql.
+osm2pgsql.process_node = module.process_node
+osm2pgsql.process_way = module.process_way
+osm2pgsql.process_relation = module.process_relation
+
+function module.process_tags(o)
+ o:clean{delete = PRE_DELETE, extra = PRE_EXTRAS}
+
+ -- Exception for boundary/place double tagging
+ if o.object.tags.boundary == 'administrative' then
+ o:grab_extratags{match = function (k, v)
+ return k == 'place' and v:sub(1,3) ~= 'isl'
+ end}
+ end
+
+ -- name keys
+ local fallback = o:grab_name_parts{groups=NAMES}
+
+ -- address keys
+ if o:grab_address_parts{groups=ADDRESS_TAGS} > 0 and fallback == nil then
+ fallback = {'place', 'house', 'always'}
+ end
+ if o.address.country ~= nil and #o.address.country ~= 2 then
+ o.address['country'] = nil
+ end
+ if POSTCODE_FALLBACK and fallback == nil and o.address.postcode ~= nil then
+ fallback = {'place', 'postcode', 'always'}
+ end
+
+ if o.address.interpolation ~= nil then
+ o:write_place('place', 'houses', 'always', SAVE_EXTRA_MAINS)
+ return
+ end
+
+ o:clean{delete = POST_DELETE, extra = POST_EXTRAS}
+
+ -- collect main keys
+ for k, v in pairs(o.object.tags) do
+ local ktype = MAIN_KEYS[k]
+ if ktype == 'fallback' then
+ if o.has_name then
+ fallback = {k, v, 'named'}
+ end
+ elseif ktype ~= nil then
+ o:write_place(k, v, MAIN_KEYS[k], SAVE_EXTRA_MAINS)
+ end
+ end
+
+ if fallback ~= nil and o.num_entries == 0 then
+ o:write_place(fallback[1], fallback[2], fallback[3], SAVE_EXTRA_MAINS)
+ end
+end
+
+--------- Convenience functions for simple style configuration -----------------
+
+
+function module.set_prefilters(data)
+ PRE_DELETE = module.tag_match{keys = data.delete_keys, tags = data.delete_tags}
+ PRE_EXTRAS = module.tag_match{keys = data.extra_keys,
+ tags = data.extra_tags}
+end
+
+function module.set_main_tags(data)
+ MAIN_KEYS = data
+end
+
+function module.set_name_tags(data)
+ NAMES = module.tag_group(data)
+end
+
+function module.set_address_tags(data)
+ if data.postcode_fallback ~= nil then
+ POSTCODE_FALLBACK = data.postcode_fallback
+ data.postcode_fallback = nil
+ end
+
+ ADDRESS_TAGS = module.tag_group(data)
+end
+
+function module.set_unused_handling(data)
+ if data.extra_keys == nil and data.extra_tags == nil then
+ POST_DELETE = module.tag_match{keys = data.delete_keys, tags = data.delete_tags}
+ POST_EXTRAS = nil
+ SAVE_EXTRA_MAINS = true
+ elseif data.delete_keys == nil and data.delete_tags == nil then
+ POST_DELETE = nil
+ POST_EXTRAS = module.tag_match{keys = data.extra_keys, tags = data.extra_tags}
+ SAVE_EXTRA_MAINS = false
+ else
+ error("unused handler can have only 'extra_keys' or 'delete_keys' set.")
+ end
+end
+
+function set_relation_types(data)
+ module.RELATION_TYPES = {}
+ for k, v in data do
+ if v == 'multipolygon' then
+ module.RELATION_TYPES[k] = module.relation_as_multipolygon
+ elseif v == 'multiline' then
+ module.RELATION_TYPES[k] = module.relation_as_multiline
+ end
+ end
+end
+
+return module
- ":: lower ()"
- "[^a-z0-9[:Space:]] >"
- ":: NFC ()"
+ - "[:Space:]+ > ' '"
sanitizers:
- step: clean-housenumbers
filter-kind:
- step: clean-postcodes
convert-to-address: yes
default-pattern: "[A-Z0-9- ]{3,12}"
+ - step: clean-tiger-tags
- step: split-name-list
- step: strip-brace-terms
- step: tag-analyzer-by-language
--- /dev/null
+local flex = require('flex-base')
+
+flex.set_main_tags{
+ highway = {'always',
+ street_lamp = 'named',
+ traffic_signals = 'named',
+ service = 'named',
+ cycleway = 'named',
+ path = 'named',
+ footway = 'named',
+ steps = 'named',
+ bridleway = 'named',
+ track = 'named',
+ motorway_link = 'named',
+ trunk_link = 'named',
+ primary_link = 'named',
+ secondary_link = 'named',
+ tertiary_link = 'named'},
+ boundary = {administrative = 'named',
+ postal_code = 'named'},
+ landuse = 'fallback',
+ place = 'always'
+}
+
+flex.set_prefilters{delete_keys = {'building', 'source',
+ 'source', '*source', 'type',
+ 'is_in:postcode', '*:wikidata',
+ '*:prefix', '*:suffix', 'name:prefix:*', 'name:suffix:*',
+ 'name:etymology', 'name:signed', 'name:botanical',
+ 'addr:street:name', 'addr:street:type'},
+ delete_tags = {highway = {'no', 'turning_circle', 'mini_roundabout',
+ 'noexit', 'crossing', 'give_way', 'stop'},
+ landuse = {'cemetry', 'no'},
+ boundary = {'place'}},
+ extra_keys = {'wikipedia', 'wikipedia:*', 'wikidata', 'capital', 'area'}
+ }
+
+flex.set_name_tags{main = {'name', 'name:*',
+ 'int_name', 'int_name:*',
+ 'nat_name', 'nat_name:*',
+ 'reg_name', 'reg_name:*',
+ 'loc_name', 'loc_name:*',
+ 'old_name', 'old_name:*',
+ 'alt_name', 'alt_name:*', 'alt_name_*',
+ 'official_name', 'official_name:*',
+ 'place_name', 'place_name:*',
+ 'short_name', 'short_name:*', 'brand'},
+ extra = {'ref', 'int_ref', 'nat_ref', 'reg_ref',
+ 'loc_ref', 'old_ref',
+ 'iata', 'icao', 'pcode', 'pcode:*', 'ISO3166-2'},
+ house = {'addr:housename'}
+ }
+
+flex.set_address_tags{main = {'addr:housenumber',
+ 'addr:conscriptionnumber',
+ 'addr:streetnumber'},
+ extra = {'addr:*', 'is_in:*', 'tiger:county'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode',
+ 'tiger:zip_left', 'tiger:zip_right'},
+ country = {'country_code', 'ISO3166-1',
+ 'addr:country_code', 'is_in:country_code',
+ 'addr:country', 'is_in:country'},
+ interpolation = {'addr:interpolation'}
+ }
+
+
+flex.set_unused_handling{extra_keys = {'place'}}
+
+return flex
+++ /dev/null
-[
-{
- "keys" : [ "" ],
- "values" : {
- "no" : "skip"
- }
-},
-{ "keys" : ["wikipedia", "wikipedia:*", "wikidata", "area"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "skip",
- "" : "fallback,with_name"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "administrative" : "main",
- "postal_code" : "main"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["addr:housename"],
- "values" : {
- "" : "name,house"
- }
-},
-{
- "keys" : ["addr:housenumber", "addr:conscriptionnumber", "addr:streetnumber"],
- "values" : {
- "" : "address,house"
- }
-},
-{
- "keys" : ["addr:interpolation"],
- "values" : {
- "" : "interpolation,address"
- }
-},
-{
- "keys" : ["postal_code", "postcode", "addr:postcode",
- "tiger:zip_left", "tiger:zip_right"],
- "values" : {
- "" : "postcode,fallback"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["highway"],
- "values" : {
- "motorway" : "main",
- "trunk" : "main",
- "primary" : "main",
- "secondary" : "main",
- "tertiary" : "main",
- "unclassified" : "main",
- "residential" : "main",
- "living_street" : "main",
- "pedestrian" : "main",
- "road" : "main",
- "service" : "main,with_name",
- "cycleway" : "main,with_name",
- "path" : "main,with_name",
- "footway" : "main,with_name",
- "steps" : "main,with_name",
- "bridleway" : "main,with_name",
- "track" : "main,with_name",
- "byway": "main,with_name",
- "motorway_link" : "main,with_name",
- "trunk_link" : "main,with_name",
- "primary_link" : "main,with_name",
- "secondary_link" : "main,with_name",
- "tertiary_link" : "main,with_name"
- }
-}
-]
--- /dev/null
+local flex = require('flex-base')
+
+flex.set_main_tags{
+ boundary = {administrative = 'named'},
+ landuse = 'fallback',
+ place = 'always'
+}
+
+flex.set_prefilters{delete_keys = {'building', 'source', 'highway',
+ 'addr:housenumber', 'addr:street', 'addr:city',
+ 'source', '*source', 'type',
+ 'is_in:postcode', '*:wikidata',
+ '*:prefix', '*:suffix', 'name:prefix:*', 'name:suffix:*',
+ 'name:etymology', 'name:signed', 'name:botanical',
+ 'addr:street:name', 'addr:street:type'},
+ delete_tags = {landuse = {'cemetry', 'no'},
+ boundary = {'place'}},
+ extra_keys = {'wikipedia', 'wikipedia:*', 'wikidata', 'capital'}
+ }
+
+flex.set_name_tags{main = {'name', 'name:*',
+ 'int_name', 'int_name:*',
+ 'nat_name', 'nat_name:*',
+ 'reg_name', 'reg_name:*',
+ 'loc_name', 'loc_name:*',
+ 'old_name', 'old_name:*',
+ 'alt_name', 'alt_name:*', 'alt_name_*',
+ 'official_name', 'official_name:*',
+ 'place_name', 'place_name:*',
+ 'short_name', 'short_name:*', 'brand'},
+ extra = {'ref', 'int_ref', 'nat_ref', 'reg_ref',
+ 'loc_ref', 'old_ref',
+ 'iata', 'icao', 'pcode', 'pcode:*', 'ISO3166-2'}
+ }
+
+flex.set_address_tags{extra = {'addr:*', 'is_in:*'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode'},
+ country = {'country_code', 'ISO3166-1',
+ 'addr:country_code', 'is_in:country_code',
+ 'addr:country', 'is_in:country'},
+ postcode_fallback = false
+ }
+
+flex.set_unused_handling{extra_keys = {'place'}}
+
+return flex
+++ /dev/null
-[
-{ "keys" : ["wikipedia", "wikipedia:*", "wikidata"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "skip",
- "" : "fallback,with_name"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "administrative" : "main"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["postal_code", "postcode", "addr:postcode",
- "tiger:zip_left", "tiger:zip_right"],
- "values" : {
- "" : "postcode"
- }
-},
-{
- "keys" : ["capital"],
- "values" : {
- "" : "extra"
- }
-}
-]
--- /dev/null
+local flex = require('flex-base')
+
+flex.set_main_tags{
+ building = 'fallback',
+ emergency = 'always',
+ healthcare = 'fallback',
+ historic = 'always',
+ military = 'always',
+ natural = 'named',
+ landuse = 'named',
+ highway = {'always',
+ street_lamp = 'named',
+ traffic_signals = 'named',
+ service = 'named',
+ cycleway = 'named',
+ path = 'named',
+ footway = 'named',
+ steps = 'named',
+ bridleway = 'named',
+ track = 'named',
+ motorway_link = 'named',
+ trunk_link = 'named',
+ primary_link = 'named',
+ secondary_link = 'named',
+ tertiary_link = 'named'},
+ railway = 'named',
+ man_made = 'always',
+ aerialway = 'always',
+ boundary = {'named',
+ postal_code = 'named'},
+ aeroway = 'always',
+ amenity = 'always',
+ club = 'always',
+ craft = 'always',
+ junction = 'fallback',
+ landuse = 'fallback',
+ leisure = 'always',
+ office = 'always',
+ mountain_pass = 'always',
+ shop = 'always',
+ tourism = 'always',
+ bridge = 'named_with_key',
+ tunnel = 'named_with_key',
+ waterway = 'named',
+ place = 'always'
+}
+
+flex.set_prefilters{delete_keys = {'note', 'note:*', 'source', '*source', 'attribution',
+ 'comment', 'fixme', 'FIXME', 'created_by', 'NHD:*',
+ 'nhd:*', 'gnis:*', 'geobase:*', 'KSJ2:*', 'yh:*',
+ 'osak:*', 'naptan:*', 'CLC:*', 'import', 'it:fvg:*',
+ 'type', 'lacounty:*', 'ref:ruian:*', 'building:ruian:type',
+ 'ref:linz:*', 'is_in:postcode'},
+ delete_tags = {emergency = {'yes', 'no', 'fire_hydrant'},
+ historic = {'yes', 'no'},
+ military = {'yes', 'no'},
+ natural = {'yes', 'no', 'coastline'},
+ highway = {'no', 'turning_circle', 'mini_roundabout',
+ 'noexit', 'crossing', 'give_way', 'stop'},
+ railway = {'level_crossing', 'no', 'rail'},
+ man_made = {'survey_point', 'cutline'},
+ aerialway = {'pylon', 'no'},
+ aeroway = {'no'},
+ amenity = {'no'},
+ club = {'no'},
+ craft = {'no'},
+ leisure = {'no'},
+ office = {'no'},
+ mountain_pass = {'no'},
+ shop = {'no'},
+ tourism = {'yes', 'no'},
+ bridge = {'no'},
+ tunnel = {'no'},
+ waterway = {'riverbank'},
+ building = {'no'},
+ boundary = {'place'}},
+ extra_keys = {'*:prefix', '*:suffix', 'name:prefix:*', 'name:suffix:*',
+ 'name:etymology', 'name:signed', 'name:botanical',
+ 'wikidata', '*:wikidata',
+ 'addr:street:name', 'addr:street:type'}
+ }
+
+flex.set_name_tags{main = {'name', 'name:*',
+ 'int_name', 'int_name:*',
+ 'nat_name', 'nat_name:*',
+ 'reg_name', 'reg_name:*',
+ 'loc_name', 'loc_name:*',
+ 'old_name', 'old_name:*',
+ 'alt_name', 'alt_name:*', 'alt_name_*',
+ 'official_name', 'official_name:*',
+ 'place_name', 'place_name:*',
+ 'short_name', 'short_name:*', 'brand'},
+ extra = {'ref', 'int_ref', 'nat_ref', 'reg_ref',
+ 'loc_ref', 'old_ref',
+ 'iata', 'icao', 'pcode', 'pcode:*', 'ISO3166-2'},
+ house = {'addr:housename'}
+ }
+
+flex.set_address_tags{main = {'addr:housenumber',
+ 'addr:conscriptionnumber',
+ 'addr:streetnumber'},
+ extra = {'addr:*', 'is_in:*', 'tiger:county'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode',
+ 'tiger:zip_left', 'tiger:zip_right'},
+ country = {'country_code', 'ISO3166-1',
+ 'addr:country_code', 'is_in:country_code',
+ 'addr:country', 'is_in:country'},
+ interpolation = {'addr:interpolation'}
+ }
+
+
+flex.set_unused_handling{delete_keys = {'tiger:*'}}
+
+return flex
+++ /dev/null
-[
-{
- "keys" : ["*source"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "wikidata", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "pcode:*", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["addr:housename"],
- "values" : {
- "" : "name,house"
- }
-},
-{
- "keys" : ["emergency"],
- "values" : {
- "fire_hydrant" : "skip",
- "yes" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["historic", "military"],
- "values" : {
- "no" : "skip",
- "yes" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["natural"],
- "values" : {
- "yes" : "skip",
- "no" : "skip",
- "coastline" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "main,with_name",
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["highway"],
- "values" : {
- "no" : "skip",
- "turning_circle" : "skip",
- "mini_roundabout" : "skip",
- "noexit" : "skip",
- "crossing" : "skip",
- "give_way" : "skip",
- "stop" : "skip",
- "street_lamp" : "main,with_name",
- "traffic_signals" : "main,with_name",
- "service" : "main,with_name",
- "cycleway" : "main,with_name",
- "path" : "main,with_name",
- "footway" : "main,with_name",
- "steps" : "main,with_name",
- "bridleway" : "main,with_name",
- "track" : "main,with_name",
- "byway": "main,with_name",
- "motorway_link" : "main,with_name",
- "trunk_link" : "main,with_name",
- "primary_link" : "main,with_name",
- "secondary_link" : "main,with_name",
- "tertiary_link" : "main,with_name",
- "" : "main"
- }
-},
-{
- "keys" : ["railway"],
- "values" : {
- "level_crossing" : "skip",
- "no" : "skip",
- "rail" : "extra",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["man_made"],
- "values" : {
- "survey_point" : "skip",
- "cutline" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["aerialway"],
- "values" : {
- "pylon" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "place" : "skip",
- "postal_code" : "main",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["aeroway", "amenity", "club", "craft", "leisure",
- "office", "mountain_pass"],
- "values" : {
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["shop"],
- "values" : {
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["tourism"],
- "values" : {
- "yes" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["bridge", "tunnel"],
- "values" : {
- "" : "main,with_name_key"
- }
-},
-{
- "keys" : ["waterway"],
- "values" : {
- "riverbank" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["junction", "healthcare"],
- "values" : {
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["postal_code", "postcode", "addr:postcode",
- "tiger:zip_left", "tiger:zip_right"],
- "values" : {
- "" : "postcode,fallback"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:housenumber", "addr:conscriptionnumber", "addr:streetnumber"],
- "values" : {
- "" : "address,house"
- }
-},
-{
- "keys" : ["addr:interpolation"],
- "values" : {
- "" : "interpolation,address"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["building"],
- "values" : {
- "no" : "skip",
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["note", "note:*", "source", "source*", "attribution",
- "comment", "fixme", "FIXME", "created_by", "tiger:*", "NHD:*",
- "nhd:*", "gnis:*", "geobase:*", "KSJ2:*", "yh:*",
- "osak:*", "naptan:*", "CLC:*", "import", "it:fvg:*",
- "type", "lacounty:*", "ref:ruian:*", "building:ruian:type",
- "ref:linz:*"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : [""],
- "values" : {
- "" : "extra"
- }
-}
-]
--- /dev/null
+local flex = require('flex-base')
+
+flex.set_main_tags{
+ building = 'fallback',
+ emergency = 'always',
+ healthcare = 'fallback',
+ historic = 'always',
+ military = 'always',
+ natural = 'named',
+ landuse = 'named',
+ highway = {'always',
+ street_lamp = 'named',
+ traffic_signals = 'named',
+ service = 'named',
+ cycleway = 'named',
+ path = 'named',
+ footway = 'named',
+ steps = 'named',
+ bridleway = 'named',
+ track = 'named',
+ motorway_link = 'named',
+ trunk_link = 'named',
+ primary_link = 'named',
+ secondary_link = 'named',
+ tertiary_link = 'named'},
+ railway = 'named',
+ man_made = 'always',
+ aerialway = 'always',
+ boundary = {'named',
+ postal_code = 'named'},
+ aeroway = 'always',
+ amenity = 'always',
+ club = 'always',
+ craft = 'always',
+ junction = 'fallback',
+ landuse = 'fallback',
+ leisure = 'always',
+ office = 'always',
+ mountain_pass = 'always',
+ shop = 'always',
+ tourism = 'always',
+ bridge = 'named_with_key',
+ tunnel = 'named_with_key',
+ waterway = 'named',
+ place = 'always'
+}
+
+flex.set_prefilters{delete_keys = {'note', 'note:*', 'source', '*source', 'attribution',
+ 'comment', 'fixme', 'FIXME', 'created_by', 'NHD:*',
+ 'nhd:*', 'gnis:*', 'geobase:*', 'KSJ2:*', 'yh:*',
+ 'osak:*', 'naptan:*', 'CLC:*', 'import', 'it:fvg:*',
+ 'type', 'lacounty:*', 'ref:ruian:*', 'building:ruian:type',
+ 'ref:linz:*', 'is_in:postcode'},
+ delete_tags = {emergency = {'yes', 'no', 'fire_hydrant'},
+ historic = {'yes', 'no'},
+ military = {'yes', 'no'},
+ natural = {'yes', 'no', 'coastline'},
+ highway = {'no', 'turning_circle', 'mini_roundabout',
+ 'noexit', 'crossing', 'give_way', 'stop'},
+ railway = {'level_crossing', 'no', 'rail'},
+ man_made = {'survey_point', 'cutline'},
+ aerialway = {'pylon', 'no'},
+ aeroway = {'no'},
+ amenity = {'no'},
+ club = {'no'},
+ craft = {'no'},
+ leisure = {'no'},
+ office = {'no'},
+ mountain_pass = {'no'},
+ shop = {'no'},
+ tourism = {'yes', 'no'},
+ bridge = {'no'},
+ tunnel = {'no'},
+ waterway = {'riverbank'},
+ building = {'no'},
+ boundary = {'place'}},
+ extra_keys = {'*:prefix', '*:suffix', 'name:prefix:*', 'name:suffix:*',
+ 'name:etymology', 'name:signed', 'name:botanical',
+ 'wikidata', '*:wikidata',
+ 'addr:street:name', 'addr:street:type'}
+ }
+
+flex.set_name_tags{main = {'name', 'name:*',
+ 'int_name', 'int_name:*',
+ 'nat_name', 'nat_name:*',
+ 'reg_name', 'reg_name:*',
+ 'loc_name', 'loc_name:*',
+ 'old_name', 'old_name:*',
+ 'alt_name', 'alt_name:*', 'alt_name_*',
+ 'official_name', 'official_name:*',
+ 'place_name', 'place_name:*',
+ 'short_name', 'short_name:*', 'brand'},
+ extra = {'ref', 'int_ref', 'nat_ref', 'reg_ref',
+ 'loc_ref', 'old_ref',
+ 'iata', 'icao', 'pcode', 'pcode:*', 'ISO3166-2'},
+ house = {'addr:housename'}
+ }
+
+flex.set_address_tags{main = {'addr:housenumber',
+ 'addr:conscriptionnumber',
+ 'addr:streetnumber'},
+ extra = {'addr:*', 'is_in:*', 'tiger:county'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode',
+ 'tiger:zip_left', 'tiger:zip_right'},
+ country = {'country_code', 'ISO3166-1',
+ 'addr:country_code', 'is_in:country_code',
+ 'addr:country', 'is_in:country'},
+ interpolation = {'addr:interpolation'}
+ }
+
+
+flex.set_unused_handling{extra_keys = {'place'}}
+
+return flex
+++ /dev/null
-[
-{
- "keys" : ["*source"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "wikidata", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "pcode:*", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["addr:housename"],
- "values" : {
- "" : "name,house"
- }
-},
-{
- "keys" : ["emergency"],
- "values" : {
- "fire_hydrant" : "skip",
- "yes" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["historic", "military"],
- "values" : {
- "no" : "skip",
- "yes" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["natural"],
- "values" : {
- "yes" : "skip",
- "no" : "skip",
- "coastline" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "main,with_name",
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["highway"],
- "values" : {
- "no" : "skip",
- "turning_circle" : "skip",
- "mini_roundabout" : "skip",
- "noexit" : "skip",
- "crossing" : "skip",
- "give_way" : "skip",
- "stop" : "skip",
- "street_lamp" : "main,with_name",
- "traffic_signals" : "main,with_name",
- "service" : "main,with_name",
- "cycleway" : "main,with_name",
- "path" : "main,with_name",
- "footway" : "main,with_name",
- "steps" : "main,with_name",
- "bridleway" : "main,with_name",
- "track" : "main,with_name",
- "byway": "main,with_name",
- "motorway_link" : "main,with_name",
- "trunk_link" : "main,with_name",
- "primary_link" : "main,with_name",
- "secondary_link" : "main,with_name",
- "tertiary_link" : "main,with_name",
- "" : "main"
- }
-},
-{
- "keys" : ["railway"],
- "values" : {
- "level_crossing" : "skip",
- "no" : "skip",
- "rail" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["man_made"],
- "values" : {
- "survey_point" : "skip",
- "cutline" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["aerialway"],
- "values" : {
- "pylon" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "place" : "skip",
- "postal_code" : "main",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["aeroway", "amenity", "club", "craft", "leisure",
- "office", "mountain_pass"],
- "values" : {
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["shop"],
- "values" : {
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["tourism"],
- "values" : {
- "yes" : "skip",
- "no" : "skip",
- "" : "main"
- }
-},
-{
- "keys" : ["bridge", "tunnel"],
- "values" : {
- "" : "main,with_name_key"
- }
-},
-{
- "keys" : ["waterway"],
- "values" : {
- "riverbank" : "skip",
- "" : "main,with_name"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["junction", "healthcare"],
- "values" : {
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["postal_code", "postcode", "addr:postcode",
- "tiger:zip_left", "tiger:zip_right"],
- "values" : {
- "" : "postcode,fallback"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:housenumber", "addr:conscriptionnumber", "addr:streetnumber"],
- "values" : {
- "" : "address,house"
- }
-},
-{
- "keys" : ["addr:interpolation"],
- "values" : {
- "" : "interpolation,address"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["building"],
- "values" : {
- "no" : "skip",
- "" : "main,fallback,with_name"
- }
-},
-{
- "keys" : ["tracktype", "traffic_calming", "service", "cuisine", "capital",
- "dispensing", "religion", "denomination", "sport",
- "internet_access", "lanes", "surface", "smoothness", "width",
- "est_width", "incline", "opening_hours", "collection_times",
- "service_times", "disused", "wheelchair", "sac_scale",
- "trail_visibility", "mtb:scale", "mtb:description", "wood",
- "drive_through", "drive_in", "access", "vehicle", "bicyle",
- "foot", "goods", "hgv", "motor_vehicle", "motor_car", "oneway",
- "date_on", "date_off", "day_on", "day_off", "hour_on", "hour_off",
- "maxweight", "maxheight", "maxspeed", "fee", "toll", "charge",
- "population", "description", "image", "attribution", "fax",
- "email", "url", "website", "phone", "real_ale", "smoking",
- "food", "camera", "brewery", "locality", "wikipedia",
- "wikipedia:*", "access:*", "contact:*", "drink:*", "toll:*",
- "area"],
- "values" : {
- "" : "extra"
- }
-}
-]
--- /dev/null
+local flex = require('flex-base')
+
+flex.set_main_tags{
+ highway = {'always',
+ street_lamp = 'named',
+ traffic_signals = 'named',
+ service = 'named',
+ cycleway = 'named',
+ path = 'named',
+ footway = 'named',
+ steps = 'named',
+ bridleway = 'named',
+ track = 'named',
+ motorway_link = 'named',
+ trunk_link = 'named',
+ primary_link = 'named',
+ secondary_link = 'named',
+ tertiary_link = 'named'},
+ boundary = {administrative = 'named'},
+ landuse = 'fallback',
+ place = 'always'
+}
+
+flex.set_prefilters{delete_keys = {'building', 'source',
+ 'addr:housenumber', 'addr:street',
+ 'source', '*source', 'type',
+ 'is_in:postcode', '*:wikidata',
+ '*:prefix', '*:suffix', 'name:prefix:*', 'name:suffix:*',
+ 'name:etymology', 'name:signed', 'name:botanical',
+ 'addr:street:name', 'addr:street:type'},
+ delete_tags = {highway = {'no', 'turning_circle', 'mini_roundabout',
+ 'noexit', 'crossing', 'give_way', 'stop'},
+ landuse = {'cemetry', 'no'},
+ boundary = {'place'}},
+ extra_keys = {'wikipedia', 'wikipedia:*', 'wikidata', 'capital', 'area'}
+ }
+
+flex.set_name_tags{main = {'name', 'name:*',
+ 'int_name', 'int_name:*',
+ 'nat_name', 'nat_name:*',
+ 'reg_name', 'reg_name:*',
+ 'loc_name', 'loc_name:*',
+ 'old_name', 'old_name:*',
+ 'alt_name', 'alt_name:*', 'alt_name_*',
+ 'official_name', 'official_name:*',
+ 'place_name', 'place_name:*',
+ 'short_name', 'short_name:*', 'brand'},
+ extra = {'ref', 'int_ref', 'nat_ref', 'reg_ref',
+ 'loc_ref', 'old_ref',
+ 'iata', 'icao', 'pcode', 'pcode:*', 'ISO3166-2'}
+ }
+
+flex.set_address_tags{main = {'addr:housenumber',
+ 'addr:conscriptionnumber',
+ 'addr:streetnumber'},
+ extra = {'addr:*', 'is_in:*', 'tiger:county'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode',
+ 'tiger:zip_left', 'tiger:zip_right'},
+ country = {'country_code', 'ISO3166-1',
+ 'addr:country_code', 'is_in:country_code',
+ 'addr:country', 'is_in:country'},
+ interpolation = {'addr:interpolation'},
+ postcode_fallback = false
+ }
+
+flex.set_unused_handling{extra_keys = {'place'}}
+
+return flex
+++ /dev/null
-[
-{ "keys" : ["wikipedia", "wikipedia:*", "wikidata", "area"],
- "values" : {
- "" : "extra"
- }
-},
-{
- "keys" : ["*:prefix", "*:suffix", "name:prefix:*", "name:suffix:*",
- "name:etymology", "name:signed", "name:botanical", "*:wikidata",
- "addr:street:name", "addr:street:type"],
- "values" : {
- "" : "skip"
- }
-},
-{
- "keys" : ["ref", "int_ref", "nat_ref", "reg_ref", "loc_ref", "old_ref",
- "iata", "icao", "pcode", "ISO3166-2"],
- "values" : {
- "" : "ref"
- }
-},
-{
- "keys" : ["name", "name:*", "int_name", "int_name:*", "nat_name", "nat_name:*",
- "reg_name", "reg_name:*", "loc_name", "loc_name:*",
- "old_name", "old_name:*", "alt_name", "alt_name:*", "alt_name_*",
- "official_name", "official_name:*", "place_name", "place_name:*",
- "short_name", "short_name:*", "brand"],
- "values" : {
- "" : "name"
- }
-},
-{
- "keys" : ["landuse"],
- "values" : {
- "cemetry" : "skip",
- "" : "fallback,with_name"
- }
-},
-{
- "keys" : ["boundary"],
- "values" : {
- "administrative" : "main"
- }
-},
-{
- "keys" : ["place"],
- "values" : {
- "" : "main"
- }
-},
-{
- "keys" : ["country_code", "ISO3166-1", "is_in:country_code", "is_in:country",
- "addr:country", "addr:country_code"],
- "values" : {
- "" : "country"
- }
-},
-{
- "keys" : ["addr:*", "is_in:*", "tiger:county"],
- "values" : {
- "" : "address"
- }
-},
-{
- "keys" : ["highway"],
- "values" : {
- "motorway" : "main",
- "trunk" : "main",
- "primary" : "main",
- "secondary" : "main",
- "tertiary" : "main",
- "unclassified" : "main",
- "residential" : "main",
- "living_street" : "main",
- "pedestrian" : "main",
- "road" : "main",
- "service" : "main,with_name",
- "cycleway" : "main,with_name",
- "path" : "main,with_name",
- "footway" : "main,with_name",
- "steps" : "main,with_name",
- "bridleway" : "main,with_name",
- "track" : "main,with_name",
- "byway": "main,with_name",
- "motorway_link" : "main,with_name",
- "trunk_link" : "main,with_name",
- "primary_link" : "main,with_name",
- "secondary_link" : "main,with_name",
- "tertiary_link" : "main,with_name"
- }
-}
-]
| jsonv2 |
| geojson |
| xml |
+
+
+ Scenario: Lookup of a linked place
+ When sending geocodejson lookup query for N1932181216
+ Then exactly 1 result is returned
+ And results contain
+ | name |
+ | Vaduz |
And W10 expands to interpolation
| start | end | parent_place_id |
| 12 | 14 | W2 |
+
+
+ Scenario Outline: Bad interpolation values are ignored
+ Given the grid with origin 1,1
+ | 1 | | 9 | | 2 |
+ Given the places
+ | osm | class | type | housenr |
+ | N1 | place | house | 2 |
+ | N2 | place | house | 6 |
+ And the places
+ | osm | class | type | addr+interpolation | geometry |
+ | W1 | place | houses | <value> | 1,2 |
+ And the ways
+ | id | nodes |
+ | 1 | 1,2 |
+ When importing
+ Then W1 expands to no interpolation
+
+ Examples:
+ | value |
+ | foo |
+ | x |
+ | 12-2 |
| object | parent_place_id |
| N9 | R14 |
+
+ Scenario: Choose closest street in associatedStreet relation
+ Given the grid
+ | 1 | | | | 3 |
+ | 10 | | 11 | | 12 |
+ And the places
+ | osm | class | type | housenr | geometry |
+ | N1 | place | house | 1 | 1 |
+ | N3 | place | house | 3 | 3 |
+ And the named places
+ | osm | class | type | geometry |
+ | W100 | highway | residential | 10,11 |
+ | W101 | highway | residential | 11,12 |
+ And the relations
+ | id | members | tags+type |
+ | 1 | N1:house,N3:house,W100:street,W101:street | associatedStreet |
+ When importing
+ Then placex contains
+ | object | parent_place_id |
+ | N1 | W100 |
+ | N3 | W101 |
+
+
Scenario: POIs in building inherit address
Given the grid
| 10 | | | | | | 11 |
| parent_place_id | start | end |
| W1 | 4 | 6 |
+ Scenario: Legal interpolation type changed to illegal one
+ Given the grid
+ | 1 | | 2 |
+ | 3 | | 4 |
+ And the places
+ | osm | class | type | name | geometry |
+ | W1 | highway | unclassified | Cloud Street | 1, 2 |
+ And the ways
+ | id | nodes |
+ | 2 | 3,4 |
+ And the places
+ | osm | class | type | addr+interpolation | geometry |
+ | W2 | place | houses | even | 3,4 |
+ And the places
+ | osm | class | type | housenr |
+ | N3 | place | house | 2 |
+ | N4 | place | house | 6 |
+ When importing
+ Then W2 expands to interpolation
+ | parent_place_id | start | end |
+ | W1 | 4 | 4 |
+ When updating places
+ | osm | class | type | addr+interpolation | geometry |
+ | W2 | place | houses | 12-2 | 3,4 |
+ Then W2 expands to no interpolation
+
'API_TEST_FILE' : (TEST_BASE_DIR / 'testdb' / 'apidb-test-data.pbf').resolve(),
'SERVER_MODULE_PATH' : None,
'TOKENIZER' : None, # Test with a custom tokenizer
+ 'STYLE' : 'extratags',
'PHPCOV' : False, # set to output directory to enable code coverage
}
--- /dev/null
+@DB
+Feature: Import with custom styles by osm2pgsql
+ Tests for the example customizations given in the documentation.
+
+ Scenario: Custom main tags
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_main_tags{
+ boundary = {administrative = 'named'},
+ highway = {'always', street_lamp = 'named'},
+ landuse = 'fallback'
+ }
+ """
+ When loading osm data
+ """
+ n10 Tboundary=administrative x0 y0
+ n11 Tboundary=administrative,name=Foo x0 y0
+ n12 Tboundary=electoral x0 y0
+ n13 Thighway=primary x0 y0
+ n14 Thighway=street_lamp x0 y0
+ n15 Thighway=primary,landuse=street x0 y0
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N11 | boundary | administrative |
+ | N13 | highway | primary |
+ | N15 | highway | primary |
+
+ Scenario: Prefiltering tags
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_prefilters{
+ delete_keys = {'source', 'source:*'},
+ extra_tags = {amenity = {'yes', 'no'}}
+ }
+ flex.set_main_tags{
+ amenity = 'always',
+ tourism = 'always'
+ }
+ """
+ When loading osm data
+ """
+ n1 Tamenity=yes x0 y6
+ n2 Tamenity=hospital,source=survey x3 y6
+ n3 Ttourism=hotel,amenity=yes x0 y0
+ n4 Ttourism=hotel,amenity=telephone x0 y0
+ """
+ Then place contains exactly
+ | object | extratags |
+ | N2:amenity | - |
+ | N3:tourism | 'amenity': 'yes' |
+ | N4:tourism | - |
+ | N4:amenity | - |
+
+ Scenario: Name tags
+ Given the lua style file
+ """
+ local flex = require('flex-base')
+
+ flex.set_main_tags{highway = {traffic_light = 'named'}}
+ flex.set_name_tags{main = {'name', 'name:*'},
+ extra = {'ref'}
+ }
+ """
+ When loading osm data
+ """
+ n1 Thighway=stop,name=Something x0 y0
+ n2 Thighway=traffic_light,ref=453-4 x0 y0
+ n3 Thighway=traffic_light,name=Greens x0 y0
+ n4 Thighway=traffic_light,name=Red,ref=45 x0 y0
+ """
+ Then place contains exactly
+ | object | name |
+ | N3:highway | 'name': 'Greens' |
+ | N4:highway | 'name': 'Red', 'ref': '45' |
+
+ Scenario: Address tags
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_address_tags{
+ main = {'addr:housenumber'},
+ extra = {'addr:*'},
+ postcode = {'postal_code', 'postcode', 'addr:postcode'},
+ country = {'country-code', 'ISO3166-1'}
+ }
+ """
+ When loading osm data
+ """
+ n1 Ttourism=hotel,addr:street=Foo x0 y0
+ n2 Taddr:housenumber=23,addr:street=Budd,postal_code=5567 x0 y0
+ n3 Taddr:street=None,addr:city=Where x0 y0
+ """
+ Then place contains exactly
+ | object | type | address |
+ | N1:tourism | hotel | 'street': 'Foo' |
+ | N2:place | house | 'housenumber': '23', 'street': 'Budd', 'postcode': '5567' |
+
+ Scenario: Unused handling
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.set_address_tags{
+ main = {'addr:housenumber'},
+ extra = {'addr:*', 'tiger:county'}
+ }
+ flex.set_unused_handling{delete_keys = {'tiger:*'}}
+ """
+ When loading osm data
+ """
+ n1 Ttourism=hotel,tiger:county=Fargo x0 y0
+ n2 Ttourism=hotel,tiger:xxd=56,else=other x0 y0
+ """
+ Then place contains exactly
+ | object | type | address | extratags |
+ | N1:tourism | hotel | 'tiger:county': 'Fargo' | - |
+ | N2:tourism | hotel | - | 'else': 'other' |
+
+ Scenario: Additional relation types
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ flex.RELATION_TYPES['site'] = flex.relation_as_multipolygon
+ """
+ And the grid
+ | 1 | 2 |
+ | 4 | 3 |
+ When loading osm data
+ """
+ n1
+ n2
+ n3
+ n4
+ w1 Nn1,n2,n3,n4,n1
+ r1 Ttype=multipolygon,amenity=school Mw1@
+ r2 Ttype=site,amenity=school Mw1@
+ """
+ Then place contains exactly
+ | object | type |
+ | R1:amenity | school |
+ | R2:amenity | school |
+
+ Scenario: Exclude country relations
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ function osm2pgsql.process_relation(object)
+ if object.tags.boundary ~= 'administrative' or object.tags.admin_level ~= '2' then
+ flex.process_relation(object)
+ end
+ end
+ """
+ And the grid
+ | 1 | 2 |
+ | 4 | 3 |
+ When loading osm data
+ """
+ n1
+ n2
+ n3
+ n4
+ w1 Nn1,n2,n3,n4,n1
+ r1 Ttype=multipolygon,boundary=administrative,admin_level=4,name=Small Mw1@
+ r2 Ttype=multipolygon,boundary=administrative,admin_level=2,name=Big Mw1@
+ """
+ Then place contains exactly
+ | object | type |
+ | R1:boundary | administrative |
+
+ Scenario: Customize processing functions
+ Given the lua style file
+ """
+ local flex = require('import-full')
+
+ local original_process_tags = flex.process_tags
+
+ function flex.process_tags(o)
+ if o.object.tags.highway ~= nil and o.object.tags.access == 'no' then
+ return
+ end
+
+ original_process_tags(o)
+ end
+ """
+ When loading osm data
+ """
+ n1 Thighway=residential x0 y0
+ n2 Thighway=residential,access=no x0 y0
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:highway | residential |
--- /dev/null
+@DB
+Feature: Tag evaluation
+ Tests if tags are correctly imported into the place table
+
+ Scenario: Main tags as fallback
+ When loading osm data
+ """
+ n100 Tjunction=yes,highway=bus_stop
+ n101 Tjunction=yes,name=Bar
+ n200 Tbuilding=yes,amenity=cafe
+ n201 Tbuilding=yes,name=Intersting
+ n202 Tbuilding=yes
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N100 | highway | bus_stop |
+ | N101 | junction | yes |
+ | N200 | amenity | cafe |
+ | N201 | building | yes |
+
+
+ Scenario: Name and reg tags
+ When loading osm data
+ """
+ n2001 Thighway=road,name=Foo,alt_name:de=Bar,ref=45
+ n2002 Thighway=road,name:prefix=Pre,name:suffix=Post,ref:de=55
+ n2003 Thighway=yes,name:%20%de=Foo,name=real1
+ n2004 Thighway=yes,name:%a%de=Foo,name=real2
+ n2005 Thighway=yes,name:%9%de=Foo,name:\\=real3
+ n2006 Thighway=yes,name:%9%de=Foo,name=rea\l3
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2001 | highway | road | 'name': 'Foo', 'alt_name:de': 'Bar', 'ref': '45' |
+ | N2002 | highway | road | - |
+ | N2003 | highway | yes | 'name: de': 'Foo', 'name': 'real1' |
+ | N2004 | highway | yes | 'name:\nde': 'Foo', 'name': 'real2' |
+ | N2005 | highway | yes | 'name:\tde': 'Foo', 'name:\\\\': 'real3' |
+ | N2006 | highway | yes | 'name:\tde': 'Foo', 'name': 'rea\\l3' |
+
+ And place contains
+ | object | extratags |
+ | N2002 | 'name:prefix': 'Pre', 'name:suffix': 'Post', 'ref:de': '55' |
+
+
+ Scenario: Name when using with_name flag
+ When loading osm data
+ """
+ n3001 Tbridge=yes,bridge:name=GoldenGate
+ n3002 Tbridge=yes,bridge:name:en=Rainbow
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N3001 | bridge | yes | 'name': 'GoldenGate' |
+ | N3002 | bridge | yes | 'name:en': 'Rainbow' |
+
+
+ Scenario: Address tags
+ When loading osm data
+ """
+ n4001 Taddr:housenumber=34,addr:city=Esmarald,addr:county=Land
+ n4002 Taddr:streetnumber=10,is_in:city=Rootoo,is_in=Gold
+ """
+ Then place contains exactly
+ | object | class | address |
+ | N4001 | place | 'housenumber': '34', 'city': 'Esmarald', 'county': 'Land' |
+ | N4002 | place | 'streetnumber': '10', 'city': 'Rootoo' |
+
+
+ Scenario: Country codes
+ When loading osm data
+ """
+ n5001 Tshop=yes,country_code=DE
+ n5002 Tshop=yes,country_code=toolong
+ n5003 Tshop=yes,country_code=x
+ n5004 Tshop=yes,addr:country=us
+ n5005 Tshop=yes,country=be
+ n5006 Tshop=yes,addr:country=France
+ """
+ Then place contains exactly
+ | object | class | address |
+ | N5001 | shop | 'country': 'DE' |
+ | N5002 | shop | - |
+ | N5003 | shop | - |
+ | N5004 | shop | 'country': 'us' |
+ | N5005 | shop | - |
+ | N5006 | shop | - |
+
+
+ Scenario: Postcodes
+ When loading osm data
+ """
+ n6001 Tshop=bank,addr:postcode=12345
+ n6002 Tshop=bank,tiger:zip_left=34343
+ n6003 Tshop=bank,is_in:postcode=9009
+ """
+ Then place contains exactly
+ | object | class | address |
+ | N6001 | shop | 'postcode': '12345' |
+ | N6002 | shop | 'postcode': '34343' |
+ | N6003 | shop | - |
+
+
+ Scenario: Main with extra
+ When loading osm data
+ """
+ n7001 Thighway=primary,bridge=yes,name=1
+ n7002 Thighway=primary,bridge=yes,bridge:name=1
+ """
+ Then place contains exactly
+ | object | class | type | name | extratags+bridge:name |
+ | N7001 | highway | primary | 'name': '1' | - |
+ | N7002:highway | highway | primary | - | 1 |
+ | N7002:bridge | bridge | yes | 'name': '1' | 1 |
+
+
+ Scenario: Global fallback and skipping
+ When loading osm data
+ """
+ n8001 Tshop=shoes,note:de=Nein,xx=yy
+ n8002 Tshop=shoes,building=no,ele=234
+ n8003 Tshop=shoes,name:source=survey
+ """
+ Then place contains exactly
+ | object | class | name | extratags |
+ | N8001 | shop | - | 'xx': 'yy' |
+ | N8002 | shop | - | 'ele': '234' |
+ | N8003 | shop | - | - |
+
+
+ Scenario: Admin levels
+ When loading osm data
+ """
+ n9001 Tplace=city
+ n9002 Tplace=city,admin_level=16
+ n9003 Tplace=city,admin_level=x
+ n9004 Tplace=city,admin_level=1
+ n9005 Tplace=city,admin_level=0
+ n9006 Tplace=city,admin_level=2.5
+ """
+ Then place contains exactly
+ | object | class | admin_level |
+ | N9001 | place | 15 |
+ | N9002 | place | 15 |
+ | N9003 | place | 15 |
+ | N9004 | place | 1 |
+ | N9005 | place | 15 |
+ | N9006 | place | 15 |
+
+
+ Scenario: Administrative boundaries with place tags
+ When loading osm data
+ """
+ n10001 Tboundary=administrative,place=city,name=A
+ n10002 Tboundary=natural,place=city,name=B
+ n10003 Tboundary=administrative,place=island,name=C
+ """
+ Then place contains
+ | object | class | type | extratags |
+ | N10001 | boundary | administrative | 'place': 'city' |
+ And place contains
+ | object | class | type |
+ | N10002:boundary | boundary | natural |
+ | N10002:place | place | city |
+ | N10003:boundary | boundary | administrative |
+ | N10003:place | place | island |
+
+
+ Scenario: Building fallbacks
+ When loading osm data
+ """
+ n12001 Ttourism=hotel,building=yes
+ n12002 Tbuilding=house
+ n12003 Tbuilding=shed,addr:housenumber=1
+ n12004 Tbuilding=yes,name=Das-Haus
+ n12005 Tbuilding=yes,addr:postcode=12345
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N12001 | tourism | hotel |
+ | N12003 | building | shed |
+ | N12004 | building | yes |
+ | N12005 | place | postcode |
+
+
+ Scenario: Address interpolations
+ When loading osm data
+ """
+ n13001 Taddr:interpolation=odd
+ n13002 Taddr:interpolation=even,place=city
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N13001 | place | houses | 'interpolation': 'odd' |
+ | N13002 | place | houses | 'interpolation': 'even' |
--- /dev/null
+@DB
+Feature: Updates of address interpolation objects
+ Test that changes to address interpolation objects are correctly
+ propagated.
+
+ Background:
+ Given the grid
+ | 1 | 2 |
+
+
+ Scenario: Adding a new interpolation
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+
+ When updating osm data
+ """
+ w99 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:place | houses |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ Then location_property_osmline contains exactly
+ | object |
+ | 99:5 |
+
+
+ Scenario: Delete an existing interpolation
+ When loading osm data
+ """
+ n1 Taddr:housenumber=2
+ n2 Taddr:housenumber=7
+ w99 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:place | houses |
+
+ When updating osm data
+ """
+ w99 v2 dD
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ Then location_property_osmline contains exactly
+ | object | indexed_status |
+
+
+ Scenario: Changing an object to an interpolation
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w99 Thighway=residential Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:highway | residential |
+
+ When updating osm data
+ """
+ w99 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:place | houses |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ And location_property_osmline contains exactly
+ | object |
+ | 99:5 |
+
+
+ Scenario: Changing an interpolation to something else
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w99 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:place | houses |
+
+ When updating osm data
+ """
+ w99 Thighway=residential Nn1,n2
+ """
+ Then place contains
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:highway | residential |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W99:highway | residential |
+ And location_property_osmline contains exactly
+ | object |
+
--- /dev/null
+@DB
+Feature: Update of postcode only objects
+ Tests that changes to objects containing only a postcode are
+ propagated correctly.
+
+
+ Scenario: Adding a postcode-only node
+ When loading osm data
+ """
+ """
+ Then place contains exactly
+ | object |
+
+ When updating osm data
+ """
+ n34 Tpostcode=4456
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:place | postcode |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+
+ Scenario: Deleting a postcode-only node
+ When loading osm data
+ """
+ n34 Tpostcode=4456
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:place | postcode |
+
+ When updating osm data
+ """
+ n34 v2 dD
+ """
+ Then place contains exactly
+ | object |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+
+ Scenario Outline: Converting a regular object into a postcode-only node
+ When loading osm data
+ """
+ n34 T<class>=<type>
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:<class> | <type> |
+
+ When updating osm data
+ """
+ n34 Tpostcode=4456
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:place | postcode |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+ Examples:
+ | class | type |
+ | amenity | restaurant |
+ | place | hamlet |
+
+
+ Scenario Outline: Converting a postcode-only node into a regular object
+ When loading osm data
+ """
+ n34 Tpostcode=4456
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:place | postcode |
+
+ When updating osm data
+ """
+ n34 T<class>=<type>
+ """
+ Then place contains exactly
+ | object | type |
+ | N34:<class> | <type> |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | N34:<class> | <type> |
+
+ Examples:
+ | class | type |
+ | amenity | restaurant |
+ | place | hamlet |
+
+
+ Scenario: Converting na interpolation into a postcode-only node
+ Given the grid
+ | 1 | 2 |
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w34 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W34:place | houses |
+
+ When updating osm data
+ """
+ w34 Tpostcode=4456 Nn1,n2
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W34:place | postcode |
+ When indexing
+ Then location_property_osmline contains exactly
+ | object |
+ And placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+
+
+ Scenario: Converting a postcode-only node into an interpolation
+ Given the grid
+ | 1 | 2 |
+ When loading osm data
+ """
+ n1 Taddr:housenumber=3
+ n2 Taddr:housenumber=17
+ w34 Tpostcode=4456 Nn1,n2
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W34:place | postcode |
+
+ When updating osm data
+ """
+ w34 Taddr:interpolation=odd Nn1,n2
+ """
+ Then place contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
+ | W34:place | houses |
+ When indexing
+ Then location_property_osmline contains exactly
+ | object |
+ | 34:5 |
+ And placex contains exactly
+ | object | type |
+ | N1:place | house |
+ | N2:place | house |
Feature: Update of simple objects by osm2pgsql
Testing basic update functions of osm2pgsql.
- Scenario: Import object with two main tags
+ Scenario: Adding a new object
When loading osm data
"""
- n1 Ttourism=hotel,amenity=restaurant,name=foo
- n2 Tplace=locality,name=spotty
+ n1 Tplace=town,name=Middletown
"""
- Then place contains
- | object | type | name+name |
- | N1:tourism | hotel | foo |
- | N1:amenity | restaurant | foo |
- | N2:place | locality | spotty |
- When updating osm data
- """
- n1 dV Ttourism=hotel,name=foo
- n2 dD
- """
- Then place has no entry for N1:amenity
- And place has no entry for N2
- And place contains
- | object | class | type | name |
- | N1:tourism | tourism | hotel | 'name' : 'foo' |
+ Then place contains exactly
+ | object | type | name+name |
+ | N1:place | town | Middletown |
+
+ When updating osm data
+ """
+ n2 Tamenity=hotel,name=Posthotel
+ """
+ Then place contains exactly
+ | object | type | name+name |
+ | N1:place | town | Middletown |
+ | N2:amenity | hotel | Posthotel |
+ And placex contains exactly
+ | object | type | name+name | indexed_status |
+ | N1:place | town | Middletown | 0 |
+ | N2:amenity | hotel | Posthotel | 1 |
- Scenario: Downgrading a highway to one that is dropped without name
- When loading osm data
- """
- n100 x0 y0
- n101 x0.0001 y0.0001
- w1 Thighway=residential Nn100,n101
- """
- Then place contains
- | object |
- | W1:highway |
- When updating osm data
- """
- w1 Thighway=service Nn100,n101
- """
- Then place has no entry for W1
- Scenario: Downgrading a highway when a second tag is present
+ Scenario: Deleting an existing object
When loading osm data
"""
- n100 x0 y0
- n101 x0.0001 y0.0001
- w1 Thighway=residential,tourism=hotel Nn100,n101
- """
- Then place contains
- | object |
- | W1:highway |
- | W1:tourism |
- When updating osm data
+ n1 Tplace=town,name=Middletown
+ n2 Tamenity=hotel,name=Posthotel
"""
- w1 Thighway=service,tourism=hotel Nn100,n101
- """
- Then place has no entry for W1:highway
- And place contains
- | object |
- | W1:tourism |
+ Then place contains exactly
+ | object | type | name+name |
+ | N1:place | town | Middletown |
+ | N2:amenity | hotel | Posthotel |
+
+ When updating osm data
+ """
+ n2 dD
+ """
+ Then place contains exactly
+ | object | type | name+name |
+ | N1:place | town | Middletown |
+ And placex contains exactly
+ | object | type | name+name | indexed_status |
+ | N1:place | town | Middletown | 0 |
+ | N2:amenity | hotel | Posthotel | 100 |
--- /dev/null
+@DB
+Feature: Tag evaluation
+ Tests if tags are correctly updated in the place table
+
+ Background:
+ Given the grid
+ | 1 | 2 | 3 |
+ | 10 | 11 | |
+ | 45 | 46 | |
+
+ Scenario: Main tag deleted
+ When loading osm data
+ """
+ n1 Tamenity=restaurant
+ n2 Thighway=bus_stop,railway=stop,name=X
+ n3 Tamenity=prison
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N1 | amenity | restaurant |
+ | N2:highway | highway | bus_stop |
+ | N2:railway | railway | stop |
+ | N3 | amenity | prison |
+
+ When updating osm data
+ """
+ n1 Tnot_a=restaurant
+ n2 Thighway=bus_stop,name=X
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N2:highway | highway | bus_stop |
+ | N3 | amenity | prison |
+ And placex contains
+ | object | indexed_status |
+ | N3:amenity | 0 |
+ When indexing
+ Then placex contains exactly
+ | object | type | name |
+ | N2:highway | bus_stop | 'name': 'X' |
+ | N3:amenity | prison | - |
+
+
+ Scenario: Main tag added
+ When loading osm data
+ """
+ n1 Tatity=restaurant
+ n2 Thighway=bus_stop,name=X
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N2:highway | highway | bus_stop |
+
+ When updating osm data
+ """
+ n1 Tamenity=restaurant
+ n2 Thighway=bus_stop,railway=stop,name=X
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N1 | amenity | restaurant |
+ | N2:highway | highway | bus_stop |
+ | N2:railway | railway | stop |
+ When indexing
+ Then placex contains exactly
+ | object | type | name |
+ | N1:amenity | restaurant | - |
+ | N2:highway | bus_stop | 'name': 'X' |
+ | N2:railway | stop | 'name': 'X' |
+
+
+ Scenario: Main tag modified
+ When loading osm data
+ """
+ n10 Thighway=footway,name=X
+ n11 Tamenity=atm
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N10 | highway | footway |
+ | N11 | amenity | atm |
+
+ When updating osm data
+ """
+ n10 Thighway=path,name=X
+ n11 Thighway=primary
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N10 | highway | path |
+ | N11 | highway | primary |
+ When indexing
+ Then placex contains exactly
+ | object | type | name |
+ | N10:highway | path | 'name': 'X' |
+ | N11:highway | primary | - |
+
+
+ Scenario: Main tags with name, name added
+ When loading osm data
+ """
+ n45 Tlanduse=cemetry
+ n46 Tbuilding=yes
+ """
+ Then place contains exactly
+ | object | class | type |
+
+ When updating osm data
+ """
+ n45 Tlanduse=cemetry,name=TODO
+ n46 Tbuilding=yes,addr:housenumber=1
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N45 | landuse | cemetry |
+ | N46 | building| yes |
+ When indexing
+ Then placex contains exactly
+ | object | type | name | address |
+ | N45:landuse | cemetry | 'name': 'TODO' | - |
+ | N46:building| yes | - | 'housenumber': '1' |
+
+
+ Scenario: Main tags with name, name removed
+ When loading osm data
+ """
+ n45 Tlanduse=cemetry,name=TODO
+ n46 Tbuilding=yes,addr:housenumber=1
+ """
+ Then place contains exactly
+ | object | class | type |
+ | N45 | landuse | cemetry |
+ | N46 | building| yes |
+
+ When updating osm data
+ """
+ n45 Tlanduse=cemetry
+ n46 Tbuilding=yes
+ """
+ Then place contains exactly
+ | object | class | type |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+ Scenario: Main tags with name, name modified
+ When loading osm data
+ """
+ n45 Tlanduse=cemetry,name=TODO
+ n46 Tbuilding=yes,addr:housenumber=1
+ """
+ Then place contains exactly
+ | object | class | type | name | address |
+ | N45 | landuse | cemetry | 'name' : 'TODO' | - |
+ | N46 | building| yes | - | 'housenumber': '1'|
+
+ When updating osm data
+ """
+ n45 Tlanduse=cemetry,name=DONE
+ n46 Tbuilding=yes,addr:housenumber=10
+ """
+ Then place contains exactly
+ | object | class | type | name | address |
+ | N45 | landuse | cemetry | 'name' : 'DONE' | - |
+ | N46 | building| yes | - | 'housenumber': '10'|
+ When indexing
+ Then placex contains exactly
+ | object | class | type | name | address |
+ | N45 | landuse | cemetry | 'name' : 'DONE' | - |
+ | N46 | building| yes | - | 'housenumber': '10'|
+
+
+ Scenario: Main tag added to address only node
+ When loading osm data
+ """
+ n1 Taddr:housenumber=345
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N1 | place | house | 'housenumber': '345'|
+
+ When updating osm data
+ """
+ n1 Taddr:housenumber=345,building=yes
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N1 | building | yes | 'housenumber': '345'|
+ When indexing
+ Then placex contains exactly
+ | object | class | type | address |
+ | N1 | building | yes | 'housenumber': '345'|
+
+
+ Scenario: Main tag removed from address only node
+ When loading osm data
+ """
+ n1 Taddr:housenumber=345,building=yes
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N1 | building | yes | 'housenumber': '345'|
+
+ When updating osm data
+ """
+ n1 Taddr:housenumber=345
+ """
+ Then place contains exactly
+ | object | class | type | address |
+ | N1 | place | house | 'housenumber': '345'|
+ When indexing
+ Then placex contains exactly
+ | object | class | type | address |
+ | N1 | place | house | 'housenumber': '345'|
+
+
+ Scenario: Main tags with name key, adding key name
+ When loading osm data
+ """
+ n2 Tbridge=yes
+ """
+ Then place contains exactly
+ | object | class | type |
+
+ When updating osm data
+ """
+ n2 Tbridge=yes,bridge:name=high
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name': 'high' |
+ When indexing
+ Then placex contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name': 'high' |
+
+
+ Scenario: Main tags with name key, deleting key name
+ When loading osm data
+ """
+ n2 Tbridge=yes,bridge:name=high
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name': 'high' |
+
+ When updating osm data
+ """
+ n2 Tbridge=yes
+ """
+ Then place contains exactly
+ | object |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+
+ Scenario: Main tags with name key, changing key name
+ When loading osm data
+ """
+ n2 Tbridge=yes,bridge:name=high
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name': 'high' |
+
+ When updating osm data
+ """
+ n2 Tbridge=yes,bridge:name:en=high
+ """
+ Then place contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name:en': 'high' |
+ When indexing
+ Then placex contains exactly
+ | object | class | type | name |
+ | N2 | bridge | yes | 'name:en': 'high' |
+
+
+ Scenario: Downgrading a highway to one that is dropped without name
+ When loading osm data
+ """
+ n100 x0 y0
+ n101 x0.0001 y0.0001
+ w1 Thighway=residential Nn100,n101
+ """
+ Then place contains exactly
+ | object |
+ | W1:highway |
+
+ When updating osm data
+ """
+ w1 Thighway=service Nn100,n101
+ """
+ Then place contains exactly
+ | object |
+ When indexing
+ Then placex contains exactly
+ | object |
+
+
+ Scenario: Upgrading a highway to one that is not dropped without name
+ When loading osm data
+ """
+ n100 x0 y0
+ n101 x0.0001 y0.0001
+ w1 Thighway=service Nn100,n101
+ """
+ Then place contains exactly
+ | object |
+
+ When updating osm data
+ """
+ w1 Thighway=unclassified Nn100,n101
+ """
+ Then place contains exactly
+ | object |
+ | W1:highway |
+ When indexing
+ Then placex contains exactly
+ | object |
+ | W1:highway |
+
+
+ Scenario: Downgrading a highway when a second tag is present
+ When loading osm data
+ """
+ n100 x0 y0
+ n101 x0.0001 y0.0001
+ w1 Thighway=residential,tourism=hotel Nn100,n101
+ """
+ Then place contains exactly
+ | object | type |
+ | W1:highway | residential |
+ | W1:tourism | hotel |
+
+ When updating osm data
+ """
+ w1 Thighway=service,tourism=hotel Nn100,n101
+ """
+ Then place contains exactly
+ | object | type |
+ | W1:tourism | hotel |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | W1:tourism | hotel |
+
+
+ Scenario: Upgrading a highway when a second tag is present
+ When loading osm data
+ """
+ n100 x0 y0
+ n101 x0.0001 y0.0001
+ w1 Thighway=service,tourism=hotel Nn100,n101
+ """
+ Then place contains exactly
+ | object | type |
+ | W1:tourism | hotel |
+
+ When updating osm data
+ """
+ w1 Thighway=residential,tourism=hotel Nn100,n101
+ """
+ Then place contains exactly
+ | object | type |
+ | W1:highway | residential |
+ | W1:tourism | hotel |
+ When indexing
+ Then placex contains exactly
+ | object | type |
+ | W1:highway | residential |
+ | W1:tourism | hotel |
+
+
+ Scenario: Replay on administrative boundary
+ When loading osm data
+ """
+ n10 x34.0 y-4.23
+ n11 x34.1 y-4.23
+ n12 x34.2 y-4.13
+ w10 Tboundary=administrative,waterway=river,name=Border,admin_level=2 Nn12,n11,n10
+ """
+ Then place contains exactly
+ | object | type | admin_level | name |
+ | W10:waterway | river | 2 | 'name': 'Border' |
+ | W10:boundary | administrative | 2 | 'name': 'Border' |
+
+ When updating osm data
+ """
+ w10 Tboundary=administrative,waterway=river,name=Border,admin_level=2 Nn12,n11,n10
+ """
+ Then place contains exactly
+ | object | type | admin_level | name |
+ | W10:waterway | river | 2 | 'name': 'Border' |
+ | W10:boundary | administrative | 2 | 'name': 'Border' |
+ When indexing
+ Then placex contains exactly
+ | object | type | admin_level | name |
+ | W10:waterway | river | 2 | 'name': 'Border' |
+
+
+ Scenario: Change admin_level on administrative boundary
+ Given the grid
+ | 10 | 11 |
+ | 13 | 12 |
+ When loading osm data
+ """
+ n10
+ n11
+ n12
+ n13
+ w10 Nn10,n11,n12,n13,n10
+ r10 Ttype=multipolygon,boundary=administrative,name=Border,admin_level=2 Mw10@
+ """
+ Then place contains exactly
+ | object | admin_level |
+ | R10:boundary | 2 |
+
+ When updating osm data
+ """
+ r10 Ttype=multipolygon,boundary=administrative,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+ When indexing
+ Then placex contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+
+
+ Scenario: Change boundary to administrative
+ Given the grid
+ | 10 | 11 |
+ | 13 | 12 |
+ When loading osm data
+ """
+ n10
+ n11
+ n12
+ n13
+ w10 Nn10,n11,n12,n13,n10
+ r10 Ttype=multipolygon,boundary=informal,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | informal | 4 |
+
+ When updating osm data
+ """
+ r10 Ttype=multipolygon,boundary=administrative,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+ When indexing
+ Then placex contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+
+
+ Scenario: Change boundary away from administrative
+ Given the grid
+ | 10 | 11 |
+ | 13 | 12 |
+ When loading osm data
+ """
+ n10
+ n11
+ n12
+ n13
+ w10 Nn10,n11,n12,n13,n10
+ r10 Ttype=multipolygon,boundary=administrative,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | administrative | 4 |
+
+ When updating osm data
+ """
+ r10 Ttype=multipolygon,boundary=informal,name=Border,admin_level=4 Mw10@
+ """
+ Then place contains exactly
+ | object | type | admin_level |
+ | R10:boundary | informal | 4 |
+ When indexing
+ Then placex contains exactly
+ | object | type | admin_level |
+ | R10:boundary | informal | 4 |
self.api_test_db = config['API_TEST_DB']
self.api_test_file = config['API_TEST_FILE']
self.tokenizer = config['TOKENIZER']
+ self.import_style = config['STYLE']
self.server_module_path = config['SERVER_MODULE_PATH']
self.reuse_template = not config['REMOVE_TEMPLATE']
self.keep_scenario_db = config['KEEP_TEST_DB']
self.code_coverage_path = config['PHPCOV']
self.code_coverage_id = 1
- self.default_config = Configuration(None, self.src_dir / 'settings').get_os_env()
+ self.default_config = Configuration(None).get_os_env()
self.test_env = None
self.template_db_done = False
self.api_db_done = False
if self.db_pass:
dsn += ';password=' + self.db_pass
- if self.website_dir is not None \
- and self.test_env is not None \
- and dsn == self.test_env['NOMINATIM_DATABASE_DSN']:
- return # environment already set uo
-
self.test_env = dict(self.default_config)
self.test_env['NOMINATIM_DATABASE_DSN'] = dsn
self.test_env['NOMINATIM_LANGUAGES'] = 'en,de,fr,ja'
self.test_env['NOMINATIM_CONFIGDIR'] = str((self.src_dir / 'settings').resolve())
self.test_env['NOMINATIM_DATABASE_MODULE_SRC_PATH'] = str((self.build_dir / 'module').resolve())
self.test_env['NOMINATIM_OSM2PGSQL_BINARY'] = str((self.build_dir / 'osm2pgsql' / 'osm2pgsql').resolve())
- self.test_env['NOMINATIM_NOMINATIM_TOOL'] = str((self.build_dir / 'nominatim').resolve())
if self.tokenizer is not None:
self.test_env['NOMINATIM_TOKENIZER'] = self.tokenizer
+ if self.import_style is not None:
+ self.test_env['NOMINATIM_IMPORT_STYLE'] = self.import_style
if self.server_module_path:
self.test_env['NOMINATIM_DATABASE_MODULE_PATH'] = self.server_module_path
def get_test_config(self):
- cfg = Configuration(Path(self.website_dir.name), self.src_dir / 'settings',
- environ=self.test_env)
+ cfg = Configuration(Path(self.website_dir.name), environ=self.test_env)
cfg.set_libdirs(module=self.build_dir / 'module',
- osm2pgsql=self.build_dir / 'osm2pgsql' / 'osm2pgsql',
- php=self.src_dir / 'lib-php',
- sql=self.src_dir / 'lib-sql',
- data=self.src_dir / 'data')
+ osm2pgsql=self.build_dir / 'osm2pgsql' / 'osm2pgsql')
return cfg
def get_libpq_dsn(self):
cli.nominatim(module_dir='',
osm2pgsql_path=str(self.build_dir / 'osm2pgsql' / 'osm2pgsql'),
- phplib_dir=str(self.src_dir / 'lib-php'),
- sqllib_dir=str(self.src_dir / 'lib-sql'),
- data_dir=str(self.src_dir / 'data'),
- config_dir=str(self.src_dir / 'settings'),
cli_args=cmdline,
phpcgi_path='',
environ=self.test_env)
else:
self.columns[column] = {key: value}
+ def db_delete(self, cursor):
+ """ Issue a delete for the given OSM object.
+ """
+ cursor.execute('DELETE FROM place WHERE osm_type = %s and osm_id = %s',
+ (self.columns['osm_type'] , self.columns['osm_id']))
+
def db_insert(self, cursor):
""" Insert the collected data into the database.
"""
context.nominatim.run_nominatim('refresh', '--functions')
with context.db.cursor() as cur:
for row in context.table:
- PlaceColumn(context).add_row(row, False).db_insert(cur)
+ col = PlaceColumn(context).add_row(row, False)
+ col.db_delete(cur)
+ col.db_insert(cur)
+ cur.execute('SELECT flush_deleted_places()')
context.nominatim.reindex_placex(context.db)
check_database_integrity(context)
"""
context.nominatim.run_nominatim('refresh', '--functions')
with context.db.cursor() as cur:
+ cur.execute('TRUNCATE place_to_be_deleted')
for oid in oids.split(','):
NominatimID(oid).query_osm_id(cur, 'DELETE FROM place WHERE {}')
+ cur.execute('SELECT flush_deleted_places()')
context.nominatim.reindex_placex(context.db)
if exact:
cur.execute('SELECT osm_type, osm_id, class from {}'.format(table))
- assert expected_content == set([(r[0], r[1], r[2]) for r in cur])
+ actual = set([(r[0], r[1], r[2]) for r in cur])
+ assert expected_content == actual, \
+ f"Missing entries: {expected_content - actual}\n" \
+ f"Not expected in table: {actual - expected_content}"
@then("(?P<table>placex|place) has no entry for (?P<oid>.*)")
assert not todo, f"Unmatched lines in table: {list(context.table[i] for i in todo)}"
+@then("location_property_osmline contains(?P<exact> exactly)?")
+def check_place_contents(context, exact):
+ """ Check contents of the interpolation table. Each row represents a table row
+ and all data must match. Data not present in the expected table, may
+ be arbitry. The rows are identified via the 'object' column which must
+ have an identifier of the form '<osm id>[:<startnumber>]'. When multiple
+ rows match (for example because 'startnumber' was left out and there are
+ multiple entries for the given OSM object) then all must match. All
+ expected rows are expected to be present with at least one database row.
+ When 'exactly' is given, there must not be additional rows in the database.
+ """
+ with context.db.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
+ expected_content = set()
+ for row in context.table:
+ if ':' in row['object']:
+ nid, start = row['object'].split(':', 2)
+ start = int(start)
+ else:
+ nid, start = row['object'], None
+
+ query = """SELECT *, ST_AsText(linegeo) as geomtxt,
+ ST_GeometryType(linegeo) as geometrytype
+ FROM location_property_osmline WHERE osm_id=%s"""
+
+ if ':' in row['object']:
+ query += ' and startnumber = %s'
+ params = [int(val) for val in row['object'].split(':', 2)]
+ else:
+ params = (int(row['object']), )
+
+ cur.execute(query, params)
+ assert cur.rowcount > 0, "No rows found for " + row['object']
+
+ for res in cur:
+ if exact:
+ expected_content.add((res['osm_id'], res['startnumber']))
+
+ DBRow(nid, res, context).assert_row(row, ['object'])
+
+ if exact:
+ cur.execute('SELECT osm_id, startnumber from location_property_osmline')
+ actual = set([(r[0], r[1]) for r in cur])
+ assert expected_content == actual, \
+ f"Missing entries: {expected_content - actual}\n" \
+ f"Not expected in table: {actual - expected_content}"
from pathlib import Path
from nominatim.tools.exec_utils import run_osm2pgsql
+from nominatim.tools.replication import run_osm2pgsql_updates
from geometry_alias import ALIASES
return dict(import_file=fname,
osm2pgsql=str(nominatim_env.build_dir / 'osm2pgsql' / 'osm2pgsql'),
osm2pgsql_cache=50,
- osm2pgsql_style=str(nominatim_env.src_dir / 'settings' / 'import-extratags.style'),
+ osm2pgsql_style=str(nominatim_env.get_test_config().get_import_style_file()),
+ osm2pgsql_style_path=nominatim_env.get_test_config().config_dir,
threads=1,
dsn=nominatim_env.get_libpq_dsn(),
flatnode_file='',
return fd.name
+@given('the lua style file')
+def lua_style_file(context):
+ """ Define a custom style file to use for the import.
+ """
+ style = Path(context.nominatim.website_dir.name) / 'custom.lua'
+ style.write_text(context.text)
+ context.nominatim.test_env['NOMINATIM_IMPORT_STYLE'] = str(style)
+
+
@given(u'the ([0-9.]+ )?grid(?: with origin (?P<origin>.*))?')
def define_node_grid(context, grid_step, origin):
"""
# create an OSM file and import it
fname = write_opl_file(context.text, context.osm)
try:
- run_osm2pgsql(get_osm2pgsql_options(context.nominatim, fname, append=True))
+ run_osm2pgsql_updates(context.db,
+ get_osm2pgsql_options(context.nominatim, fname, append=True))
finally:
os.remove(fname)
+
+@when('indexing')
+def index_database(context):
+ """
+ Run the Nominatim indexing step. This will process data previously
+ loaded with 'updating osm data'
+ """
+ context.nominatim.run_nominatim('index')
def _call_nominatim(*args):
return nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
- phplib_dir=str(src_dir / 'lib-php'),
- data_dir=str(src_dir / 'data'),
phpcgi_path='/usr/bin/php-cgi',
- sqllib_dir=str(src_dir / 'lib-sql'),
- config_dir=str(src_dir / 'settings'),
cli_args=args)
return _call_nominatim
('restrict-to-osm-way', '727'),
('restrict-to-osm-relation', '197532')
])
-def test_export_parameters(src_dir, tmp_path, param, value):
+def test_export_parameters(src_dir, tmp_path, param, value, monkeypatch):
(tmp_path / 'admin').mkdir()
(tmp_path / 'admin' / 'export.php').write_text(f"""<?php
exit(strpos(implode(' ', $_SERVER['argv']), '--{param} {value}') >= 0 ? 0 : 10);
""")
+ monkeypatch.setattr(nominatim.paths, 'PHPLIB_DIR', tmp_path)
+
assert nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
- phplib_dir=str(tmp_path),
- data_dir=str(src_dir / 'data'),
phpcgi_path='/usr/bin/php-cgi',
- sqllib_dir=str(src_dir / 'lib-sql'),
- config_dir=str(src_dir / 'settings'),
cli_args=['export', '--' + param, value]) == 0
@pytest.mark.parametrize("endpoint", (('search', 'reverse', 'lookup', 'details', 'status')))
-def test_no_api_without_phpcgi(src_dir, endpoint):
+def test_no_api_without_phpcgi(endpoint):
assert nominatim.cli.nominatim(module_dir='MODULE NOT AVAILABLE',
osm2pgsql_path='OSM2PGSQL NOT AVAILABLE',
- phplib_dir=str(src_dir / 'lib-php'),
- data_dir=str(src_dir / 'data'),
phpcgi_path=None,
- sqllib_dir=str(src_dir / 'lib-sql'),
- config_dir=str(src_dir / 'settings'),
cli_args=[endpoint]) == 1
class TestCliApiCall:
@pytest.fixture(autouse=True)
- def setup_cli_call(self, cli_call):
- self.call_nominatim = cli_call
+ def setup_cli_call(self, params, cli_call, mock_func_factory, tmp_path):
+ self.mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
- def test_api_commands_simple(self, mock_func_factory, params, tmp_path):
+ def _run():
+ return cli_call(*params, '--project-dir', str(tmp_path))
+
+ self.run_nominatim = _run
+
+
+ def test_api_commands_simple(self, tmp_path, params):
(tmp_path / 'website').mkdir()
(tmp_path / 'website' / (params[0] + '.php')).write_text('')
- mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
- assert self.call_nominatim(*params, '--project-dir', str(tmp_path)) == 0
+ assert self.run_nominatim() == 0
- assert mock_run_api.called == 1
- assert mock_run_api.last_args[0] == params[0]
+ assert self.mock_run_api.called == 1
+ assert self.mock_run_api.last_args[0] == params[0]
- def test_bad_project_idr(self, mock_func_factory, params):
- mock_run_api = mock_func_factory(nominatim.clicmd.api, 'run_api_script')
+ def test_bad_project_dir(self):
+ assert self.run_nominatim() == 1
- assert self.call_nominatim(*params) == 1
QUERY_PARAMS = {
'search': ('--query', 'somewhere'),
import nominatim.cli
import nominatim.indexer.indexer
import nominatim.tools.replication
+import nominatim.tools.refresh
from nominatim.db import status
@pytest.fixture
def test_replication_update_once_no_index(self, update_mock):
assert self.call_nominatim('--once', '--no-index') == 0
- assert str(update_mock.last_args[1]['osm2pgsql']) == 'OSM2PGSQL NOT AVAILABLE'
+ assert str(update_mock.last_args[1]['osm2pgsql']).endswith('OSM2PGSQL NOT AVAILABLE')
def test_replication_update_custom_osm2pgsql(self, monkeypatch, update_mock):
from nominatim.errors import UsageError
@pytest.fixture
-def make_config(src_dir):
+def make_config():
""" Create a configuration object from the given project directory.
"""
def _mk_config(project_dir=None):
- return Configuration(project_dir, src_dir / 'settings')
+ return Configuration(project_dir)
return _mk_config
@pytest.fixture
-def make_config_path(src_dir, tmp_path):
+def make_config_path(tmp_path):
""" Create a configuration object with project and config directories
in a temporary directory.
"""
def _mk_config():
(tmp_path / 'project').mkdir()
(tmp_path / 'config').mkdir()
- conf = Configuration(tmp_path / 'project', src_dir / 'settings')
+ conf = Configuration(tmp_path / 'project')
conf.config_dir = tmp_path / 'config'
return conf
monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', 'street')
- expected = src_dir / 'settings' / 'import-street.style'
+ expected = src_dir / 'settings' / 'import-street.lua'
assert config.get_import_style_file() == expected
"""
(tmp_path / 'project').mkdir()
(tmp_path / 'config').mkdir()
- conf = Configuration(tmp_path / 'project', src_dir / 'settings')
+ conf = Configuration(tmp_path / 'project')
conf.config_dir = tmp_path / 'config'
return conf
@pytest.fixture
-def def_config(src_dir):
- cfg = Configuration(None, src_dir / 'settings')
- cfg.set_libdirs(module='.', osm2pgsql='.',
- php=src_dir / 'lib-php',
- sql=src_dir / 'lib-sql',
- data=src_dir / 'data')
+def def_config():
+ cfg = Configuration(None)
+ cfg.set_libdirs(module='.', osm2pgsql='.')
return cfg
@pytest.fixture
-def project_env(src_dir, tmp_path):
+def project_env(tmp_path):
projdir = tmp_path / 'project'
projdir.mkdir()
- cfg = Configuration(projdir, src_dir / 'settings')
- cfg.set_libdirs(module='.', osm2pgsql='.',
- php=src_dir / 'lib-php',
- sql=src_dir / 'lib-sql',
- data=src_dir / 'data')
+ cfg = Configuration(projdir)
+ cfg.set_libdirs(module='.', osm2pgsql='.')
return cfg
@pytest.fixture
def sql_preprocessor_cfg(tmp_path, table_factory, temp_db_with_extensions):
table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, )))
- cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
- cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php',
- sql=tmp_path, data=SRC_DIR / 'data')
+ cfg = Configuration(None)
+ cfg.set_libdirs(module='.', osm2pgsql='.', sql=tmp_path)
return cfg
--- /dev/null
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Tests for sanitizer that clean up TIGER tags.
+"""
+import pytest
+
+from nominatim.tokenizer.place_sanitizer import PlaceSanitizer
+from nominatim.data.place_info import PlaceInfo
+
+class TestCleanTigerTags:
+
+ @pytest.fixture(autouse=True)
+ def setup_country(self, def_config):
+ self.config = def_config
+
+
+ def run_sanitizer_on(self, addr):
+ place = PlaceInfo({'address': addr})
+ _, outaddr = PlaceSanitizer([{'step': 'clean-tiger-tags'}], self.config).process_names(place)
+
+ return sorted([(p.name, p.kind, p.suffix) for p in outaddr])
+
+ @pytest.mark.parametrize('inname,outname', [('Hamilton, AL', 'Hamilton'),
+ ('Little, Borough, CA', 'Little, Borough')])
+ def test_well_formatted(self, inname, outname):
+ assert self.run_sanitizer_on({'tiger:county': inname})\
+ == [(outname, 'county', 'tiger')]
+
+
+ @pytest.mark.parametrize('name', ('Hamilton', 'Big, Road', ''))
+ def test_badly_formatted(self, name):
+ assert self.run_sanitizer_on({'tiger:county': name})\
+ == [(name, 'county', 'tiger')]
+
+
+ def test_unmatched(self):
+ assert self.run_sanitizer_on({'tiger:country': 'US'})\
+ == [('US', 'tiger', 'country')]
import pytest
+from nominatim.config import Configuration
import nominatim.tools.exec_utils as exec_utils
+import nominatim.paths
class TestRunLegacyScript:
@pytest.fixture(autouse=True)
- def setup_nominatim_env(self, tmp_path, def_config):
+ def setup_nominatim_env(self, tmp_path, monkeypatch):
tmp_phplib_dir = tmp_path / 'phplib'
tmp_phplib_dir.mkdir()
(tmp_phplib_dir / 'admin').mkdir()
- class _NominatimEnv:
- config = def_config
- phplib_dir = tmp_phplib_dir
- data_dir = Path('data')
- project_dir = Path('.')
- sqllib_dir = Path('lib-sql')
- config_dir = Path('settings')
- module_dir = 'module'
- osm2pgsql_path = 'osm2pgsql'
+ monkeypatch.setattr(nominatim.paths, 'PHPLIB_DIR', tmp_phplib_dir)
- self.testenv = _NominatimEnv
+ self.phplib_dir = tmp_phplib_dir
+ self.config = Configuration(tmp_path)
+ self.config.set_libdirs(module='.', osm2pgsql='default_osm2pgsql',
+ php=tmp_phplib_dir)
def mk_script(self, code):
- codefile = self.testenv.phplib_dir / 'admin' / 't.php'
+ codefile = self.phplib_dir / 'admin' / 't.php'
codefile.write_text('<?php\n' + code + '\n')
return 't.php'
def test_run_legacy_return_exit_code(self, return_code):
fname = self.mk_script('exit({});'.format(return_code))
assert return_code == \
- exec_utils.run_legacy_script(fname, nominatim_env=self.testenv)
+ exec_utils.run_legacy_script(fname, config=self.config)
def test_run_legacy_return_throw_on_fail(self):
fname = self.mk_script('exit(11);')
with pytest.raises(subprocess.CalledProcessError):
- exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
+ exec_utils.run_legacy_script(fname, config=self.config,
throw_on_fail=True)
def test_run_legacy_return_dont_throw_on_success(self):
fname = self.mk_script('exit(0);')
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv,
+ assert exec_utils.run_legacy_script(fname, config=self.config,
throw_on_fail=True) == 0
def test_run_legacy_use_given_module_path(self):
fname = self.mk_script("exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == '' ? 0 : 23);")
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
+ assert exec_utils.run_legacy_script(fname, config=self.config) == 0
def test_run_legacy_do_not_overwrite_module_path(self, monkeypatch):
fname = self.mk_script(
"exit($_SERVER['NOMINATIM_DATABASE_MODULE_PATH'] == 'other' ? 0 : 1);")
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
+ assert exec_utils.run_legacy_script(fname, config=self.config) == 0
def test_run_legacy_default_osm2pgsql_binary(self, monkeypatch):
- fname = self.mk_script("exit($_SERVER['NOMINATIM_OSM2PGSQL_BINARY'] == 'osm2pgsql' ? 0 : 23);")
+ fname = self.mk_script("exit($_SERVER['NOMINATIM_OSM2PGSQL_BINARY'] == 'default_osm2pgsql' ? 0 : 23);")
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
+ assert exec_utils.run_legacy_script(fname, config=self.config) == 0
def test_run_legacy_override_osm2pgsql_binary(self, monkeypatch):
fname = self.mk_script("exit($_SERVER['NOMINATIM_OSM2PGSQL_BINARY'] == 'somethingelse' ? 0 : 23);")
- assert exec_utils.run_legacy_script(fname, nominatim_env=self.testenv) == 0
+ assert exec_utils.run_legacy_script(fname, config=self.config) == 0
class TestRunApiScript:
sudo apt install -y php-cgi
sudo apt install -y build-essential cmake g++ libboost-dev libboost-system-dev \
libboost-filesystem-dev libexpat1-dev zlib1g-dev\
- libbz2-dev libpq-dev \
+ libbz2-dev libpq-dev liblua5.3-dev lua5.3\
postgresql-10-postgis-2.4 \
postgresql-contrib-10 postgresql-10-postgis-scripts \
php-cli php-pgsql php-intl libicu-dev python3-pip \
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
- wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
+ wget -O data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
fi #DOCS:
# The code must be built in a separate directory. Create this directory,
sudo tee /etc/php/7.2/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www]
; Replace the tcp listener and add the unix socket
-listen = /var/run/php7.2-fpm.sock
+listen = /var/run/php-fpm-nominatim.sock
; Ensure that the daemon runs as the correct user
listen.owner = www-data
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args;
- fastcgi_pass unix:/var/run/php7.2-fpm.sock;
+ fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index index.php;
include fastcgi_params;
}
if (!-f \$document_root\$fastcgi_script_name) {
return 404;
}
- fastcgi_pass unix:/var/run/php7.2-fpm.sock;
+ fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index search.php;
include fastcgi.conf;
}
sudo apt install -y php-cgi
sudo apt install -y build-essential cmake g++ libboost-dev libboost-system-dev \
libboost-filesystem-dev libexpat1-dev zlib1g-dev \
- libbz2-dev libpq-dev \
+ libbz2-dev libpq-dev liblua5.3-dev lua5.3 \
postgresql-12-postgis-3 \
postgresql-contrib-12 postgresql-12-postgis-3-scripts \
php-cli php-pgsql php-intl libicu-dev python3-dotenv \
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
- wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
+ wget -O data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
fi #DOCS:
# The code must be built in a separate directory. Create this directory,
sudo tee /etc/php/7.4/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www]
; Replace the tcp listener and add the unix socket
-listen = /var/run/php7.4-fpm.sock
+listen = /var/run/php-fpm-nominatim.sock
; Ensure that the daemon runs as the correct user
listen.owner = www-data
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args;
- fastcgi_pass unix:/var/run/php7.4-fpm.sock;
+ fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index index.php;
include fastcgi_params;
}
if (!-f \$document_root\$fastcgi_script_name) {
return 404;
}
- fastcgi_pass unix:/var/run/php7.4-fpm.sock;
+ fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index search.php;
include fastcgi.conf;
}
EOF_NGINX_CONF
#DOCS:```
-# If you have some errors, make sure that php7.4-fpm.sock is well under
+# If you have some errors, make sure that php-fpm-nominatim.sock is well under
# /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration
-# to /var/run/php/php7.4-fpm.sock.
+# to /var/run/php/php-fpm-nominatim.sock.
#
# Enable the configuration and restart Nginx
#
sudo apt install -y php-cgi
sudo apt install -y build-essential cmake g++ libboost-dev libboost-system-dev \
libboost-filesystem-dev libexpat1-dev zlib1g-dev \
- libbz2-dev libpq-dev \
+ libbz2-dev libpq-dev liblua5.3-dev lua5.3 \
postgresql-server-dev-14 postgresql-14-postgis-3 \
postgresql-contrib-14 postgresql-14-postgis-3-scripts \
php-cli php-pgsql php-intl libicu-dev python3-dotenv \
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
- wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
+ wget -O data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
fi #DOCS:
# The code must be built in a separate directory. Create this directory,
sudo tee /etc/php/8.1/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www]
; Replace the tcp listener and add the unix socket
-listen = /var/run/php8.1-fpm.sock
+listen = /var/run/php-fpm-nominatim.sock
; Ensure that the daemon runs as the correct user
listen.owner = www-data
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args;
- fastcgi_pass unix:/var/run/php8.1-fpm.sock;
+ fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index index.php;
include fastcgi_params;
}
if (!-f \$document_root\$fastcgi_script_name) {
return 404;
}
- fastcgi_pass unix:/var/run/php7.4-fpm.sock;
+ fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index search.php;
include fastcgi.conf;
}
EOF_NGINX_CONF
#DOCS:```
-# If you have some errors, make sure that php8.1-fpm.sock is well under
+# If you have some errors, make sure that php-fpm-nominatim.sock is well under
# /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration
-# to /var/run/php/php8.1-fpm.sock.
+# to /var/run/php/php-fpm-nominatim.sock.
#
# Enable the configuration and restart Nginx
#