ACLOCAL_AMFLAGS = -I osm2pgsql/m4
+AUTOMAKE_OPTIONS = -Wno-portability
SUBDIRS = osm2pgsql module nominatim
+NOMINATIM_SERVER ?= $(shell echo a | php -F lib/init.php -E 'echo CONST_Website_BaseURL."\n";')
+NOMINATIM_DATABASE ?= $(shell echo a | php -F lib/init.php -E 'echo DB::parseDSN(CONST_Database_DSN)["database"];')
+
install:
@echo Nominatim needs to be executed directly from this directory. No install necessary.
+
+test:
+ cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only
+
+test-fast:
+ cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_REUSE_TEMPLATE=1 NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only
+
+test-db:
+ cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only -t DB
+
+test-db-fast:
+ cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_REUSE_TEMPLATE=1 NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only -t DB
+
+test-api:
+ cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_DIR=.. lettuce -t -Fail -t -poldi-only features/api
+
+.PHONY: test test-fast test-db test-db-fast test-api
--- /dev/null
+This directory contains functional tests for the Nominatim API,
+for the import/update from osm files and for indexing.
+
+The tests use the lettuce framework (http://lettuce.it/) and
+nose (https://nose.readthedocs.org). API tests are meant to be run
+against a Nominatim installation with a complete planet-wide
+setup based on a fairly recent planet. If you only have an
+excerpt, some of the API tests may fail. Database tests can be
+run without having a database installed.
+
+Prerequisites
+=============
+
+ * lettuce framework (http://lettuce.it/)
+ * nose (https://nose.readthedocs.org)
+ * pytidylib (http://countergram.com/open-source/pytidylib)
+ * haversine (https://github.com/mapado/haversine)
+
+Usage
+=====
+
+ * get prerequisites
+
+ [sudo] pip install lettuce nose pytidylib haversine
+
+ * run the tests
+
+ NOMINATIM_SERVER=http://your.nominatim.instance/ lettuce features
+
+The tests can be configured with a set of environment variables:
+
+ * `NOMINATIM_SERVER` - URL of the nominatim instance (API tests)
+ * `NOMINATIM_DIR` - source directory of Nominatim (import tests)
+ * `TEMPLATE_DB` - name of template database used as a skeleton for
+ the test databases (db tests)
+ * `TEST_DB` - name of test database (db tests)
+ * `NOMINATIM_SETTINGS` - file to write temporary Nominatim settings to (db tests)
+ * `NOMINATIM_REUSE_TEMPLATE` - if defined, the template database will not be
+ deleted after the test runs and reused during
+ the next run. This speeds up tests considerably
+ but might lead to outdated errors for some
+ changes in the database layout.
+ * `LOGLEVEL` - set to 'debug' to get more verbose output (only works properly
+ when output to a logfile is configured)
+ * `LOGFILE` - sends debug output to the given file
+
+Writing Tests
+=============
+
+The following explanation assume that the reader is familiar with the lettuce
+notations of features, scenarios and steps.
+
+All possible steps can be found in the `steps` directory and should ideally
+be documented.
+
+
+API Tests (`features/api`)
+--------------------------
+
+These tests are meant to test the different API calls and their parameters.
+
+There are two kind of steps defined for these tests:
+request setup steps (see `steps/api_setup.py`)
+and steps for checking results (see `steps/api_result.py`).
+
+Each scenario follows this simple sequence of steps:
+
+ 1. One or more steps to define parameters and HTTP headers of the request.
+ These are cumulative, so you can use multiple steps.
+ 2. A single step to call the API. This sends a HTTP request to the configured
+ server and collects the answer. The cached parameters will be deleted,
+ to ensure that the setup works properly with scenario outlines.
+ 3. As many result checks as necessary. The result remains cached, so that
+ multiple tests can be added here.
+
+Indexing Tests (`features/db`)
+---------------------------------------------------
+
+These tests check the import and update of the Nominatim database. They do not
+test the correctness of osm2pgsql. Each test will write some data into the `place`
+table (and optionally `the planet_osm_*` tables if required) and then run
+Nominatim's processing functions on that.
+
+These tests need to create their own test databases. By default they will be
+called `test_template_nominatim` and `test_nominatim`. Names can be changed with
+the environment variables `TEMPLATE_DB` and `TEST_DB`. The user running the tests
+needs superuser rights for postgres.
+
+
+Import Tests (`features/osm2pgsql`)
+-----------------------------------
+
+These tests check that data is imported correctly into the place table. They
+use the same template database as the Indexing tests, so the same remarks apply.
--- /dev/null
+Feature: Object details
+ Check details page for correctness
+
+ Scenario Outline: Details via OSM id
+ When looking up details for <object>
+ Then the result is valid
+
+ Examples:
+ | object
+ | 1758375
+ | N158845944
+ | W72493656
+ | R62422
+
--- /dev/null
+Feature: Localization of search results
+
+ Scenario: Search - default language
+ When sending json search query "Germany"
+ Then results contain
+ | ID | display_name
+ | 0 | Deutschland.*
+
+ Scenario: Search - accept-language first
+ Given the request parameters
+ | accept-language
+ | en,de
+ When sending json search query "Deutschland"
+ Then results contain
+ | ID | display_name
+ | 0 | Germany.*
+
+ Scenario: Search - accept-language missing
+ Given the request parameters
+ | accept-language
+ | xx,fr,en,de
+ When sending json search query "Deutschland"
+ Then results contain
+ | ID | display_name
+ | 0 | Allemagne.*
+
+ Scenario: Search - http accept language header first
+ Given the HTTP header
+ | accept-language
+ | fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
+ When sending json search query "Deutschland"
+ Then results contain
+ | ID | display_name
+ | 0 | Allemagne.*
+
+ Scenario: Search - http accept language header and accept-language
+ Given the request parameters
+ | accept-language
+ | de,en
+ Given the HTTP header
+ | accept-language
+ | fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
+ When sending json search query "Deutschland"
+ Then results contain
+ | ID | display_name
+ | 0 | Deutschland.*
+
+ Scenario: Search - http accept language header fallback
+ Given the HTTP header
+ | accept-language
+ | fr-ca,en-ca;q=0.5
+ When sending json search query "Deutschland"
+ Then results contain
+ | ID | display_name
+ | 0 | Allemagne.*
+
+ Scenario: Search - http accept language header fallback (upper case)
+ Given the HTTP header
+ | accept-language
+ | fr-FR;q=0.8,en-ca;q=0.5
+ When sending json search query "Deutschland"
+ Then results contain
+ | ID | display_name
+ | 0 | Allemagne.*
+
+ Scenario: Reverse - default language
+ When looking up coordinates 48.13921,11.57328
+ Then result addresses contain
+ | ID | city
+ | 0 | München
+
+ Scenario: Reverse - accept-language parameter
+ Given the request parameters
+ | accept-language
+ | en,fr
+ When looking up coordinates 48.13921,11.57328
+ Then result addresses contain
+ | ID | city
+ | 0 | Munich
+
+ Scenario: Reverse - HTTP accept language header
+ Given the HTTP header
+ | accept-language
+ | fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
+ When looking up coordinates 48.13921,11.57328
+ Then result addresses contain
+ | ID | city
+ | 0 | Munich
+
+ Scenario: Reverse - accept-language parameter and HTTP header
+ Given the request parameters
+ | accept-language
+ | it
+ Given the HTTP header
+ | accept-language
+ | fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
+ When looking up coordinates 48.13921,11.57328
+ Then result addresses contain
+ | ID | city
+ | 0 | Monaco di Baviera
--- /dev/null
+Feature: API regression tests
+ Tests error cases reported in tickets.
+
+ @poldi-only
+ Scenario Outline: github #36
+ When sending json search query "<query>" with address
+ Then result addresses contain
+ | ID | road | city
+ | 0 | Seegasse | Gemeinde Wieselburg-Land
+
+ Examples:
+ | query
+ | Seegasse, Gemeinde Wieselburg-Land
+ | Seegasse, Wieselburg-Land
+ | Seegasse, Wieselburg
+
+ Scenario: trac #2430
+ When sending json search query "89 River Avenue, Hoddesdon, Hertfordshire, EN11 0JT"
+ Then at least 1 result is returned
+
+ Scenario: trac #2440
+ When sending json search query "East Harvard Avenue, Denver"
+ Then more than 2 results are returned
+
+ Scenario: trac #2456
+ When sending xml search query "Borlänge Kommun"
+ Then results contain
+ | ID | place_rank
+ | 0 | 19
+
+ Scenario: trac #2530
+ When sending json search query "Lange Straße, Bamberg" with address
+ Then result addresses contain
+ | ID | town
+ | 0 | Bamberg
+
+ Scenario: trac #2541
+ When sending json search query "pad, germany"
+ Then results contain
+ | ID | class | display_name
+ | 0 | aeroway | Paderborn/Lippstadt,.*
+
+ Scenario: trac #2579
+ When sending json search query "Johnsons Close, hackbridge" with address
+ Then result addresses contain
+ | ID | postcode
+ | 0 | SM5 2LU
+
+ @Fail
+ Scenario Outline: trac #2586
+ When sending json search query "<query>" with address
+ Then result addresses contain
+ | ID | country_code
+ | 0 | uk
+
+ Examples:
+ | query
+ | DL7 0SN
+ | DL70SN
+
+ Scenario: trac #2628 (1)
+ When sending json search query "Adam Kraft Str" with address
+ Then result addresses contain
+ | ID | road
+ | 0 | Adam-Kraft-Straße
+
+ Scenario: trac #2628 (2)
+ When sending json search query "Maxfeldstr. 5, Nürnberg" with address
+ Then result addresses contain
+ | ID | house_number | road | city
+ | 0 | 5 | Maxfeldstraße | Nürnberg
+
+ Scenario: trac #2638
+ When sending json search query "Nöthnitzer Str. 40, 01187 Dresden" with address
+ Then result addresses contain
+ | ID | house_number | road | city
+ | 0 | 40 | Nöthnitzer Straße | Dresden
+
+ Scenario Outline: trac #2667
+ When sending json search query "<query>" with address
+ Then result addresses contain
+ | ID | house_number
+ | 0 | <number>
+
+ Examples:
+ | number | query
+ | 16 | 16 Woodpecker Way, Cambourne
+ | 14906 | 14906, 114 Street Northwest, Edmonton, Alberta, Canada
+ | 14904 | 14904, 114 Street Northwest, Edmonton, Alberta, Canada
+ | 15022 | 15022, 114 Street Northwest, Edmonton, Alberta, Canada
+ | 15024 | 15024, 114 Street Northwest, Edmonton, Alberta, Canada
+
+ Scenario: trac #2681
+ When sending json search query "kirchstraße troisdorf Germany"
+ Then results contain
+ | ID | display_name
+ | 0 | .*, Troisdorf, .*
+
+ Scenario: trac #2758
+ When sending json search query "6а, полуботка, чернигов" with address
+ Then result addresses contain
+ | ID | house_number
+ | 0 | 6а
+
+ Scenario: trac #2790
+ When looking up coordinates 49.0942079697809,8.27565898861822
+ Then result addresses contain
+ | ID | road | village | country
+ | 0 | Daimlerstraße | Jockgrim | Deutschland
+
+ Scenario: trac #2794
+ When sending json search query "4008"
+ Then results contain
+ | ID | class | type
+ | 0 | place | postcode
+
+ Scenario: trac #2797
+ When sending json search query "Philippstr.4, 52349 Düren" with address
+ Then result addresses contain
+ | ID | road | town
+ | 0 | Philippstraße | Düren
+
+ Scenario: trac #2830
+ When sending json search query "528, Merkley Drive, K4A 1N5,CA" with address
+ Then result addresses contain
+ | ID | house_number | road | postcode | country
+ | 0 | 528 | Merkley Drive | K4A 1N5 | Canada
+
+ Scenario: trac #2830
+ When sending json search query "K4A 1N5,CA"
+ Then results contain
+ | ID | class | type | display_name
+ | 0 | place | postcode | .*, Canada
+
+ Scenario: trac #2845
+ When sending json search query "Leliestraat 31, Zwolle" with address
+ Then result addresses contain
+ | ID | city
+ | 0 | Zwolle
+
+ Scenario: trac #2852
+ When sending json search query "berlinerstrasse, leipzig" with address
+ Then result addresses contain
+ | ID | road
+ | 0 | Berliner Straße
+
+ Scenario: trac #2871
+ When looking up coordinates -33.906895553,150.99609375
+ Then result addresses contain
+ | ID | city | postcode | country
+ | 0 | [^0-9]* | 2197 | Australia
+
+ Scenario: trac #2974
+ When sending json search query "Azadi Square, Faruj" with address
+ Then result addresses contain
+ | ID | road | city
+ | 0 | ميدان آزادي | فاروج
+ And results contain
+ | ID | latlon
+ | 0 | 37.2323,58.2193 +-1km
+
+ Scenario: trac #2981
+ When sending json search query "Ohmstraße 7, Berlin" with address
+ Then at least 2 results are returned
+ And result addresses contain
+ | house_number | road | state
+ | 7 | Ohmstraße | Berlin
+
+ Scenario: trac #3049
+ When sending json search query "Soccer City"
+ Then results contain
+ | ID | class | type | latlon
+ | 0 | leisure | stadium | -26.2347261,27.982645 +-50m
+
+ Scenario: trac #3130
+ When sending json search query "Old Way, Frinton"
+ Then results contain
+ | ID | class | latlon
+ | 0 | highway | 51.8324206,1.2447352 +-100m
--- /dev/null
+Feature: Reverse geocoding
+ Testing the reverse function
+
+ # Make sure country is not overwritten by the postcode
+ Scenario: Country is returned
+ Given the request parameters
+ | accept-language
+ | de
+ When looking up coordinates 53.9788769,13.0830313
+ Then result addresses contain
+ | ID | country
+ | 0 | Deutschland
+
--- /dev/null
+Feature: Simple Reverse Tests
+ Simple tests for internal server errors and response format.
+ These tests should pass on any Nominatim installation.
+
+ Scenario Outline: Simple reverse-geocoding
+ When looking up xml coordinates <lat>,<lon>
+ Then the result is valid xml
+ When looking up json coordinates <lat>,<lon>
+ Then the result is valid json
+ When looking up jsonv2 coordinates <lat>,<lon>
+ Then the result is valid json
+
+ Examples:
+ | lat | lon
+ | 0.0 | 0.0
+ | 45.3 | 3.5
+ | -79.34 | 23.5
+ | 0.23 | -178.555
+
+ Scenario Outline: Wrapping of legal jsonp requests
+ Given the request parameters
+ | json_callback
+ | foo
+ When looking up <format> coordinates 67.3245,0.456
+ Then the result is valid json
+
+ Examples:
+ | format
+ | json
+ | jsonv2
+
+ Scenario: Reverse-geocoding without address
+ Given the request parameters
+ | addressdetails
+ | 0
+ When looking up xml coordinates 36.791966,127.171726
+ Then the result is valid xml
+ When looking up json coordinates 36.791966,127.171726
+ Then the result is valid json
+ When looking up jsonv2 coordinates 36.791966,127.171726
+ Then the result is valid json
+
+ Scenario: Reverse-geocoding with zoom
+ Given the request parameters
+ | zoom
+ | 10
+ When looking up xml coordinates 36.791966,127.171726
+ Then the result is valid xml
+ When looking up json coordinates 36.791966,127.171726
+ Then the result is valid json
+ When looking up jsonv2 coordinates 36.791966,127.171726
+ Then the result is valid json
--- /dev/null
+Feature: Search queries
+ Testing correctness of results
+
+ Scenario: UK House number search
+ When sending json search query "27 Thoresby Road, Broxtowe" with address
+ Then address of result 0 contains
+ | type | value
+ | house_number | 27
+ | road | Thoresby Road
+ | city | Broxtowe
+ | state | England
+ | country | United Kingdom
+ | country_code | gb
+
+
+ Scenario: House number search for non-street address
+ Given the request parameters
+ | accept-language
+ | en
+ When sending json search query "4 Pomocnia, Poland" with address
+ Then address of result 0 is
+ | type | value
+ | house_number | 4
+ | suburb | Pomocnia
+ | county | gmina Pokrzywnica
+ | state | Masovian Voivodeship
+ | postcode | 06-121
+ | country | Poland
+ | country_code | pl
+
+ Scenario: House number interpolation even
+ Given the request parameters
+ | accept-language
+ | en
+ When sending json search query "140 rue Don Bosco, Saguenay" with address
+ Then address of result 0 contains
+ | type | value
+ | house_number | 140
+ | road | rue Don Bosco
+ | city | Saguenay
+ | state | Quebec
+ | country | Canada
+ | country_code | ca
+
+ Scenario: House number interpolation odd
+ Given the request parameters
+ | accept-language
+ | en
+ When sending json search query "141 rue Don Bosco, Saguenay" with address
+ Then address of result 0 contains
+ | type | value
+ | house_number | 141
+ | road | rue Don Bosco
+ | city | Saguenay
+ | state | Quebec
+ | country | Canada
+ | country_code | ca
+
+ Scenario: TIGER house number
+ When sending json search query "3 West Victory Way, Craig"
+ Then result 0 has not attributes osm_id,osm_type
+
+ Scenario: TIGER house number (road fallback)
+ When sending json search query "3030 West Victory Way, Craig"
+ Then result 0 has attributes osm_id,osm_type
+
+ Scenario: Expansion of Illinois
+ Given the request parameters
+ | accept-language
+ | en
+ When sending json search query "il, us"
+ Then results contain
+ | ID | display_name
+ | 0 | Illinois.*
--- /dev/null
+Feature: Result order for Geocoding
+ Testing that importance ordering returns sensible results
+
+ Scenario Outline: city order in street search
+ When sending json search query "<street>, <city>" with address
+ Then address of result 0 contains
+ | type | value
+ | <type> | <city>
+
+ Examples:
+ | type | city | street
+ | city | Zürich | Rigistr
+ | city | Karlsruhe | Sophienstr
+ | city | München | Karlstr
+ | city | Praha | Dlouhá
+
+ Scenario Outline: use more important city in street search
+ When sending json search query "<street>, <city>" with address
+ Then result addresses contain
+ | ID | country_code
+ | 0 | <country>
+
+ Examples:
+ | country | city | street
+ | gb | London | Main St
+ | gb | Manchester | Central Street
+
+ # https://trac.openstreetmap.org/ticket/5094
+ Scenario: housenumbers are ordered by complete match first
+ When sending json search query "4 Докукина Москва" with address
+ Then result addresses contain
+ | ID | house_number
+ | 0 | 4
--- /dev/null
+Feature: Search queries
+ Testing different queries and parameters
+
+ Scenario: Simple XML search
+ When sending xml search query "Schaan"
+ Then result 0 has attributes place_id,osm_type,osm_id
+ And result 0 has attributes place_rank,boundingbox
+ And result 0 has attributes lat,lon,display_name
+ And result 0 has attributes class,type,importance,icon
+ And result 0 has not attributes address
+
+ Scenario: Simple JSON search
+ When sending json search query "Vaduz"
+ And result 0 has attributes place_id,licence,icon,class,type
+ And result 0 has attributes osm_type,osm_id,boundingbox
+ And result 0 has attributes lat,lon,display_name,importance
+ And result 0 has not attributes address
+
+ Scenario: JSON search with addressdetails
+ When sending json search query "Montevideo" with address
+ Then address of result 0 is
+ | type | value
+ | city | Montevideo
+ | state | Montevideo
+ | country | Uruguay
+ | country_code | uy
+
+ Scenario: XML search with addressdetails
+ When sending xml search query "Inuvik" with address
+ Then address of result 0 is
+ | type | value
+ | town | Inuvik
+ | state | Northwest Territories
+ | country | Canada
+ | country_code | ca
+
+ Scenario: Address details with unknown class types
+ When sending json search query "foobar, Essen" with address
+ Then results contain
+ | ID | class | type
+ | 0 | leisure | hackerspace
+ And result addresses contain
+ | ID | address29
+ | 0 | foobar
+ And address of result 0 does not contain leisure,hackerspace
+
+ Scenario: Disabling deduplication
+ When sending json search query "Oxford Street, London"
+ Then there are no duplicates
+ Given the request parameters
+ | dedupe
+ | 0
+ When sending json search query "Oxford Street, London"
+ Then there are duplicates
+
+ Scenario: Search with bounded viewbox in right area
+ Given the request parameters
+ | bounded | viewbox
+ | 1 | -87.7,41.9,-87.57,41.85
+ When sending json search query "restaurant" with address
+ Then result addresses contain
+ | ID | city
+ | 0 | Chicago
+
+ Scenario: Search with bounded viewboxlbrt in right area
+ Given the request parameters
+ | bounded | viewboxlbrt
+ | 1 | -87.7,41.85,-87.57,41.9
+ When sending json search query "restaurant" with address
+ Then result addresses contain
+ | ID | city
+ | 0 | Chicago
+
+ Scenario: No POI search with unbounded viewbox
+ Given the request parameters
+ | viewbox
+ | -87.7,41.9,-87.57,41.85
+ When sending json search query "restaurant"
+ Then results contain
+ | display_name
+ | [^,]*(?i)restaurant.*
+
+ Scenario: bounded search remains within viewbox, even with no results
+ Given the request parameters
+ | bounded | viewbox
+ | 1 | -5.662003,43.54285,-5.6563282,43.5403125
+ When sending json search query "restaurant"
+ Then less than 1 result is returned
+
+ Scenario: bounded search remains within viewbox with results
+ Given the request parameters
+ | bounded | viewbox
+ | 1 | -5.662003,43.55,-5.6563282,43.5403125
+ When sending json search query "restaurant"
+ | lon | lat
+ | >= -5.662003 | >= 43.5403125
+ | <= -5.6563282| <= 43.55
+
+ Scenario: Prefer results within viewbox
+ Given the request parameters
+ | accept-language
+ | en
+ When sending json search query "royan" with address
+ Then result addresses contain
+ | ID | country
+ | 0 | France
+ Given the request parameters
+ | accept-language | viewbox
+ | en | 51.94,36.59,51.99,36.56
+ When sending json search query "royan" with address
+ Then result addresses contain
+ | ID | country
+ | 0 | Iran
+
+ Scenario: Overly large limit number for search results
+ Given the request parameters
+ | limit
+ | 1000
+ When sending json search query "Neustadt"
+ Then at most 50 results are returned
+
+ Scenario: Limit number of search results
+ Given the request parameters
+ | limit
+ | 4
+ When sending json search query "Neustadt"
+ Then exactly 4 results are returned
+
+ Scenario: Restrict to feature type country
+ Given the request parameters
+ | featureType
+ | country
+ When sending xml search query "Monaco"
+ Then results contain
+ | place_rank
+ | 4
+
+ Scenario: Restrict to feature type state
+ When sending xml search query "Berlin"
+ Then results contain
+ | ID | place_rank
+ | 0 | 1[56]
+ Given the request parameters
+ | featureType
+ | state
+ When sending xml search query "Berlin"
+ Then results contain
+ | place_rank
+ | [78]
+
+ Scenario: Restrict to feature type city
+ Given the request parameters
+ | featureType
+ | city
+ When sending xml search query "Monaco"
+ Then results contain
+ | place_rank
+ | 1[56789]
+
+
+ Scenario: Restrict to feature type settlement
+ When sending json search query "Everest"
+ Then results contain
+ | ID | display_name
+ | 0 | Mount Everest.*
+ Given the request parameters
+ | featureType
+ | settlement
+ When sending json search query "Everest"
+ Then results contain
+ | ID | display_name
+ | 0 | Everest.*
--- /dev/null
+Feature: Simple Tests
+ Simple tests for internal server errors and response format.
+ These tests should pass on any Nominatim installation.
+
+ Scenario Outline: Testing different parameters
+ Given the request parameters
+ | <parameter>
+ | <value>
+ When sending search query "Manchester"
+ Then the result is valid html
+ Given the request parameters
+ | <parameter>
+ | <value>
+ When sending html search query "Manchester"
+ Then the result is valid html
+ Given the request parameters
+ | <parameter>
+ | <value>
+ When sending xml search query "Manchester"
+ Then the result is valid xml
+ Given the request parameters
+ | <parameter>
+ | <value>
+ When sending json search query "Manchester"
+ Then the result is valid json
+ Given the request parameters
+ | <parameter>
+ | <value>
+ When sending jsonv2 search query "Manchester"
+ Then the result is valid json
+
+ Examples:
+ | parameter | value
+ | addressdetails | 1
+ | addressdetails | 0
+ | polygon | 1
+ | polygon | 0
+ | polygon_text | 1
+ | polygon_text | 0
+ | polygon_kml | 1
+ | polygon_kml | 0
+ | polygon_geojson | 1
+ | polygon_geojson | 0
+ | polygon_svg | 1
+ | polygon_svg | 0
+ | accept-language | de,en
+ | countrycodes | uk,ir
+ | bounded | 1
+ | bounded | 0
+ | exclude_place_ids| 385252,1234515
+ | limit | 1000
+ | dedupe | 1
+ | dedupe | 0
+
+ Scenario: Search with invalid output format
+ Given the request parameters
+ | format
+ | fd$#
+ When sending search query "Berlin"
+ Then the result is valid html
+
+ Scenario Outline: Simple Searches
+ When sending search query "<query>"
+ Then the result is valid html
+ When sending html search query "<query>"
+ Then the result is valid html
+ When sending xml search query "<query>"
+ Then the result is valid xml
+ When sending json search query "<query>"
+ Then the result is valid json
+ When sending jsonv2 search query "<query>"
+ Then the result is valid json
+
+ Examples:
+ | query
+ | New York, New York
+ | France
+ | 12, Main Street, Houston
+ | München
+ | 東京都
+ | hotels in nantes
+ | xywxkrf
+ | gh; foo()
+ | %#$@*&l;der#$!
+ | 234
+ | 47.4,8.3
+
+ Scenario: Empty XML search
+ When sending xml search query "xnznxvcx"
+ Then result header contains
+ | attr | value
+ | querystring | xnznxvcx
+ | polygon | false
+ | more_url | .*format=xml.*q=xnznxvcx.*
+
+ Scenario: Empty XML search with special XML characters
+ When sending xml search query "xfdghn&zxn"xvbyx<vxx>cssdex"
+ Then result header contains
+ | attr | value
+ | querystring | xfdghn&zxn"xvbyx<vxx>cssdex
+ | polygon | false
+ | more_url | .*format=xml.*q=xfdghn&zxn"xvbyx<vxx>cssdex.*
+
+ Scenario: Empty XML search with viewbox
+ Given the request parameters
+ | viewbox
+ | 12,45.13,77,33
+ When sending xml search query "xnznxvcx"
+ Then result header contains
+ | attr | value
+ | querystring | xnznxvcx
+ | polygon | false
+ | viewbox | 12,45.13,77,33
+
+ Scenario: Empty XML search with viewboxlbrt
+ Given the request parameters
+ | viewboxlbrt
+ | 12,34.13,77,45
+ When sending xml search query "xnznxvcx"
+ Then result header contains
+ | attr | value
+ | querystring | xnznxvcx
+ | polygon | false
+ | viewbox | 12,45.13,77,33
+
+ Scenario: Empty XML search with viewboxlbrt and viewbox
+ Given the request parameters
+ | viewbox | viewboxblrt
+ | 12,45.13,77,33 | 1,2,3,4
+ When sending xml search query "pub"
+ Then result header contains
+ | attr | value
+ | querystring | pub
+ | polygon | false
+ | viewbox | 12,45.13,77,33
+
+
+ Scenario Outline: Empty XML search with polygon values
+ Given the request parameters
+ | polygon
+ | <polyval>
+ When sending xml search query "xnznxvcx"
+ Then result header contains
+ | attr | value
+ | polygon | <result>
+
+ Examples:
+ | result | polyval
+ | false | 0
+ | true | 1
+ | true | True
+ | true | true
+ | true | false
+ | true | FALSE
+ | true | yes
+ | true | no
+ | true | '; delete from foobar; select '
+
+
+ Scenario: Empty XML search with exluded place ids
+ Given the request parameters
+ | exclude_place_ids
+ | 123,76,342565
+ When sending xml search query "jghrleoxsbwjer"
+ Then result header contains
+ | attr | value
+ | exclude_place_ids | 123,76,342565
+
+ Scenario Outline: Wrapping of legal jsonp search requests
+ Given the request parameters
+ | json_callback
+ | <data>
+ When sending json search query "Tokyo"
+ Then there is a json wrapper "<data>"
+
+ Examples:
+ | data
+ | foo
+ | FOO
+ | __world
+ | $me
+ | m1[4]
+ | d_r[$d]
+
+ Scenario Outline: Wrapping of illegal jsonp search requests
+ Given the request parameters
+ | json_callback
+ | <data>
+ When sending json search query "Tokyo"
+ Then a HTTP 400 is returned
+
+ Examples:
+ | data
+ | 1asd
+ | bar(foo)
+ | XXX['bad']
+ | foo; evil
+
+ Scenario Outline: Ignore jsonp parameter for anything but json
+ Given the request parameters
+ | json_callback
+ | 234
+ When sending json search query "Malibu"
+ Then a HTTP 400 is returned
+ Given the request parameters
+ | json_callback
+ | 234
+ When sending xml search query "Malibu"
+ Then the result is valid xml
+ Given the request parameters
+ | json_callback
+ | 234
+ When sending html search query "Malibu"
+ Then the result is valid html
+
+ Scenario: Empty JSON search
+ When sending json search query "YHlERzzx"
+ Then exactly 0 results are returned
+
+ Scenario: Empty JSONv2 search
+ When sending jsonv2 search query "Flubb XdfESSaZx"
+ Then exactly 0 results are returned
+
+ Scenario: Search for non-existing coordinates
+ When sending json search query "-21.0,-33.0"
+ Then exactly 0 results are returned
+
--- /dev/null
+Feature: Structured search queries
+ Testing correctness of results with
+ structured queries
+
+ Scenario: Country only
+ When sending json structured query with address
+ | country
+ | Canada
+ Then address of result 0 is
+ | type | value
+ | country | Canada
+ | country_code | ca
+
+ Scenario: Postcode only
+ When sending json structured query with address
+ | postalcode
+ | 22547
+ Then at least 1 result is returned
+ And results contain
+ | type
+ | post(al_)?code
+ And result addresses contain
+ | postcode
+ | 22547
+
+
+ Scenario: Street, postcode and country
+ When sending xml structured query with address
+ | street | postalcode | country
+ | Old Palace Road | GU2 7UP | United Kingdom
+ Then at least 1 result is returned
+ Then result header contains
+ | attr | value
+ | querystring | Old Palace Road, GU2 7UP, United Kingdom
+
+
+ Scenario: gihub #176
+ When sending json structured query with address
+ | city
+ | Washington
+ Then at least 1 result is returned
--- /dev/null
+@DB
+Feature: Linking of places
+ Tests for correctly determining linked places
+
+ Scenario: Waterways are linked when in waterway relations
+ Given the scene split-road
+ And the place ways
+ | osm_type | osm_id | class | type | name | geometry
+ | W | 1 | waterway | river | Rhein | :w-2
+ | W | 2 | waterway | river | Rhein | :w-3
+ | R | 13 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
+ | R | 23 | waterway | river | Limmat| :w-4a
+ And the relations
+ | id | members | tags
+ | 13 | R23:tributary,W1,W2:main_stream | 'type' : 'waterway'
+ When importing
+ Then table placex contains
+ | object | linked_place_id
+ | W1 | R13
+ | W2 | R13
+ | R13 | None
+ | R23 | None
+ When sending query "rhein"
+ Then results contain
+ | osm_type
+ | R
+
+ Scenario: Relations are not linked when in waterway relations
+ Given the scene split-road
+ And the place ways
+ | osm_type | osm_id | class | type | name | geometry
+ | W | 1 | waterway | river | Rhein | :w-2
+ | W | 2 | waterway | river | Rhein | :w-3
+ | R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
+ | R | 2 | waterway | river | Limmat| :w-4a
+ And the relations
+ | id | members | tags
+ | 1 | R2 | 'type' : 'waterway'
+ When importing
+ Then table placex contains
+ | object | linked_place_id
+ | W1 | None
+ | W2 | None
+ | R1 | None
+ | R2 | None
+
+ Scenario: Empty waterway relations are handled correctly
+ Given the scene split-road
+ And the place ways
+ | osm_type | osm_id | class | type | name | geometry
+ | R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
+ And the relations
+ | id | members | tags
+ | 1 | | 'type' : 'waterway'
+ When importing
+ Then table placex contains
+ | object | linked_place_id
+ | R1 | None
+
+ Scenario: Waterways are not linked when waterway types don't match
+ Given the scene split-road
+ And the place ways
+ | osm_type | osm_id | class | type | name | geometry
+ | W | 1 | waterway | drain | Rhein | :w-2
+ | R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
+ And the relations
+ | id | members | tags
+ | 1 | N23,N34,W1,R45 | 'type' : 'multipolygon'
+ When importing
+ Then table placex contains
+ | object | linked_place_id
+ | W1 | None
+ | R1 | None
+ When sending query "rhein"
+ Then results contain
+ | ID | osm_type
+ | 0 | R
+ | 1 | W
+
+ Scenario: Side streams are linked only when they have the same name
+ Given the scene split-road
+ And the place ways
+ | osm_type | osm_id | class | type | name | geometry
+ | W | 1 | waterway | river | Rhein2 | :w-2
+ | W | 2 | waterway | river | Rhein | :w-3
+ | R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
+ And the relations
+ | id | members | tags
+ | 1 | W1:side_stream,W2:side_stream | 'type' : 'waterway'
+ When importing
+ Then table placex contains
+ | object | linked_place_id
+ | W1 | None
+ | W2 | R1
+ When sending query "rhein2"
+ Then results contain
+ | osm_type
+ | W
--- /dev/null
+@DB
+Feature: Import and search of names
+ Tests all naming related issues: normalisation,
+ abbreviations, internationalisation, etc.
+
+
+ Scenario: Case-insensitivity of search
+ Given the place nodes
+ | osm_id | class | type | name
+ | 1 | place | locality | 'name' : 'FooBar'
+ When importing
+ Then table placex contains
+ | object | class | type | name
+ | N1 | place | locality | 'name' : 'FooBar'
+ When sending query "FooBar"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "foobar"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "fOObar"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "FOOBAR"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+
+ Scenario: Multiple spaces in name
+ Given the place nodes
+ | osm_id | class | type | name
+ | 1 | place | locality | 'name' : 'one two three'
+ When importing
+ When sending query "one two three"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "one two three"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "one two three"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query " one two three"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+
+ Scenario: Special characters in name
+ Given the place nodes
+ | osm_id | class | type | name
+ | 1 | place | locality | 'name' : 'Jim-Knopf-Str'
+ | 2 | place | locality | 'name' : 'Smith/Weston'
+ | 3 | place | locality | 'name' : 'space mountain'
+ | 4 | place | locality | 'name' : 'space'
+ | 5 | place | locality | 'name' : 'mountain'
+ When importing
+ When sending query "Jim-Knopf-Str"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "Jim Knopf-Str"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "Jim Knopf Str"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "Jim/Knopf-Str"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "Jim-Knopfstr"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+ When sending query "Smith/Weston"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 2
+ When sending query "Smith Weston"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 2
+ When sending query "Smith-Weston"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 2
+ When sending query "space mountain"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 3
+ When sending query "space-mountain"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 3
+ When sending query "space/mountain"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 3
+ When sending query "space\mountain"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 3
+ When sending query "space(mountain)"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 3
+
+ Scenario: No copying name tag if only one name
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | locality | 'name' : 'german' | country:de
+ When importing
+ Then table placex contains
+ | object | calculated_country_code |
+ | N1 | de
+ And table placex contains as names for N1
+ | object | k | v
+ | N1 | name | german
+
+ Scenario: Copying name tag to default language if it does not exist
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | locality | 'name' : 'german', 'name:fi' : 'finnish' | country:de
+ When importing
+ Then table placex contains
+ | object | calculated_country_code |
+ | N1 | de
+ And table placex contains as names for N1
+ | k | v
+ | name | german
+ | name:fi | finnish
+ | name:de | german
+
+ Scenario: Copying default language name tag to name if it does not exist
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | locality | 'name:de' : 'german', 'name:fi' : 'finnish' | country:de
+ When importing
+ Then table placex contains
+ | object | calculated_country_code |
+ | N1 | de
+ And table placex contains as names for N1
+ | k | v
+ | name | german
+ | name:fi | finnish
+ | name:de | german
+
+ Scenario: Do not overwrite default language with name tag
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | locality | 'name' : 'german', 'name:fi' : 'finnish', 'name:de' : 'local' | country:de
+ When importing
+ Then table placex contains
+ | object | calculated_country_code |
+ | N1 | de
+ And table placex contains as names for N1
+ | k | v
+ | name | german
+ | name:fi | finnish
+ | name:de | local
+
+ Scenario: Landuse without name are ignored
+ Given the place areas
+ | osm_type | osm_id | class | type | geometry
+ | R | 1 | natural | meadow | (0 0, 1 0, 1 1, 0 1, 0 0)
+ | R | 2 | landuse | industrial | (0 0, -1 0, -1 -1, 0 -1, 0 0)
+ When importing
+ Then table placex has no entry for R1
+ And table placex has no entry for R2
+
+ Scenario: Landuse with name are found
+ Given the place areas
+ | osm_type | osm_id | class | type | name | geometry
+ | R | 1 | natural | meadow | 'name' : 'landuse1' | (0 0, 1 0, 1 1, 0 1, 0 0)
+ | R | 2 | landuse | industrial | 'name' : 'landuse2' | (0 0, -1 0, -1 -1, 0 -1, 0 0)
+ When importing
+ When sending query "landuse1"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | R | 1
+ When sending query "landuse2"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | R | 2
+
+ Scenario: Postcode boundaries without ref
+ Given the place areas
+ | osm_type | osm_id | class | type | postcode | geometry
+ | R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
+ When importing
+ When sending query "12345"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | R | 1
--- /dev/null
+@DB
+Feature: Parenting of objects
+ Tests that the correct parent is choosen
+
+ Scenario: Address inherits postcode from its street unless it has a postcode
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type | housenumber | geometry
+ | 1 | place | house | 4 | :p-N1
+ And the place nodes
+ | osm_id | class | type | housenumber | postcode | geometry
+ | 2 | place | house | 5 | 99999 | :p-N1
+ And the place ways
+ | osm_id | class | type | name | postcode | geometry
+ | 1 | highway | residential | galoo | 12345 | :w-north
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W1
+ | N2 | W1
+ When sending query "4 galoo"
+ Then results contain
+ | ID | osm_type | osm_id | langaddress
+ | 0 | N | 1 | 4, galoo, 12345
+ When sending query "5 galoo"
+ Then results contain
+ | ID | osm_type | osm_id | langaddress
+ | 0 | N | 2 | 5, galoo, 99999
+
+
+ Scenario: Address without tags, closest street
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type | geometry
+ | 1 | place | house | :p-N1
+ | 2 | place | house | :p-N2
+ | 3 | place | house | :p-S1
+ | 4 | place | house | :p-S2
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 1 | highway | residential | :w-north
+ | 2 | highway | residential | :w-south
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W1
+ | N2 | W1
+ | N3 | W2
+ | N4 | W2
+
+ Scenario: Address without tags avoids unnamed streets
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type | geometry
+ | 1 | place | house | :p-N1
+ | 2 | place | house | :p-N2
+ | 3 | place | house | :p-S1
+ | 4 | place | house | :p-S2
+ And the place ways
+ | osm_id | class | type | geometry
+ | 1 | highway | residential | :w-north
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 2 | highway | residential | :w-south
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W2
+ | N2 | W2
+ | N3 | W2
+ | N4 | W2
+
+ Scenario: addr:street tag parents to appropriately named street
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type | street| geometry
+ | 1 | place | house | south | :p-N1
+ | 2 | place | house | north | :p-N2
+ | 3 | place | house | south | :p-S1
+ | 4 | place | house | north | :p-S2
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | residential | north | :w-north
+ | 2 | highway | residential | south | :w-south
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W2
+ | N2 | W1
+ | N3 | W2
+ | N4 | W1
+
+ Scenario: addr:street tag parents to next named street
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type | street | geometry
+ | 1 | place | house | abcdef | :p-N1
+ | 2 | place | house | abcdef | :p-N2
+ | 3 | place | house | abcdef | :p-S1
+ | 4 | place | house | abcdef | :p-S2
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | residential | abcdef | :w-north
+ | 2 | highway | residential | abcdef | :w-south
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W1
+ | N2 | W1
+ | N3 | W2
+ | N4 | W2
+
+ Scenario: addr:street tag without appropriately named street
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type | street | geometry
+ | 1 | place | house | abcdef | :p-N1
+ | 2 | place | house | abcdef | :p-N2
+ | 3 | place | house | abcdef | :p-S1
+ | 4 | place | house | abcdef | :p-S2
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | residential | abcde | :w-north
+ | 2 | highway | residential | abcde | :w-south
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W1
+ | N2 | W1
+ | N3 | W2
+ | N4 | W2
+
+ Scenario: addr:place address
+ Given the scene road-with-alley
+ And the place nodes
+ | osm_id | class | type | addr_place | geometry
+ | 1 | place | house | myhamlet | :n-alley
+ And the place nodes
+ | osm_id | class | type | name | geometry
+ | 2 | place | hamlet | myhamlet | :n-main-west
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | residential | myhamlet | :w-main
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | N2
+
+ Scenario: addr:street is preferred over addr:place
+ Given the scene road-with-alley
+ And the place nodes
+ | osm_id | class | type | addr_place | street | geometry
+ | 1 | place | house | myhamlet | mystreet| :n-alley
+ And the place nodes
+ | osm_id | class | type | name | geometry
+ | 2 | place | hamlet | myhamlet | :n-main-west
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | residential | mystreet | :w-main
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W1
+
+ Scenario: Untagged address in simple associated street relation
+ Given the scene road-with-alley
+ And the place nodes
+ | osm_id | class | type | geometry
+ | 1 | place | house | :n-alley
+ | 2 | place | house | :n-corner
+ | 3 | place | house | :n-main-west
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | residential | foo | :w-main
+ | 2 | highway | service | bar | :w-alley
+ And the relations
+ | id | members | tags
+ | 1 | W1:street,N1,N2,N3 | 'type' : 'associatedStreet'
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W1
+ | N2 | W1
+ | N3 | W1
+
+ Scenario: Avoid unnamed streets in simple associated street relation
+ Given the scene road-with-alley
+ And the place nodes
+ | osm_id | class | type | geometry
+ | 1 | place | house | :n-alley
+ | 2 | place | house | :n-corner
+ | 3 | place | house | :n-main-west
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 1 | highway | residential | :w-main
+ And the place ways
+ | osm_id | class | type | geometry
+ | 2 | highway | residential | :w-alley
+ And the relations
+ | id | members | tags
+ | 1 | N1,N2,N3,W2:street,W1:street | 'type' : 'associatedStreet'
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W1
+ | N2 | W1
+ | N3 | W1
+
+ ### Scenario 10
+ Scenario: Associated street relation overrides addr:street
+ Given the scene road-with-alley
+ And the place nodes
+ | osm_id | class | type | street | geometry
+ | 1 | place | house | bar | :n-alley
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | residential | foo | :w-main
+ | 2 | highway | residential | bar | :w-alley
+ And the relations
+ | id | members | tags
+ | 1 | W1:street,N1,N2,N3 | 'type' : 'associatedStreet'
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W1
+
+ Scenario: Building without tags, closest street from center point
+ Given the scene building-on-street-corner
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 1 | building | yes | :w-building
+ | 2 | highway | primary | :w-WE
+ | 3 | highway | residential | :w-NS
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | W1 | W3
+
+ Scenario: Building with addr:street tags
+ Given the scene building-on-street-corner
+ And the named place ways
+ | osm_id | class | type | street | geometry
+ | 1 | building | yes | bar | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 2 | highway | primary | bar | :w-WE
+ | 3 | highway | residential | foo | :w-NS
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | W1 | W2
+
+ Scenario: Building with addr:place tags
+ Given the scene building-on-street-corner
+ And the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | village | bar | :n-outer
+ And the named place ways
+ | osm_id | class | type | addr_place | geometry
+ | 1 | building | yes | bar | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 2 | highway | primary | bar | :w-WE
+ | 3 | highway | residential | foo | :w-NS
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | W1 | N1
+
+ Scenario: Building in associated street relation
+ Given the scene building-on-street-corner
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 1 | building | yes | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 2 | highway | primary | bar | :w-WE
+ | 3 | highway | residential | foo | :w-NS
+ And the relations
+ | id | members | tags
+ | 1 | W1:house,W2:street | 'type' : 'associatedStreet'
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | W1 | W2
+
+ Scenario: Building in associated street relation overrides addr:street
+ Given the scene building-on-street-corner
+ And the named place ways
+ | osm_id | class | type | street | geometry
+ | 1 | building | yes | foo | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 2 | highway | primary | bar | :w-WE
+ | 3 | highway | residential | foo | :w-NS
+ And the relations
+ | id | members | tags
+ | 1 | W1:house,W2:street | 'type' : 'associatedStreet'
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | W1 | W2
+
+ Scenario: Wrong member in associated street relation is ignored
+ Given the scene building-on-street-corner
+ And the named place nodes
+ | osm_id | class | type | geometry
+ | 1 | place | house | :n-outer
+ And the named place ways
+ | osm_id | class | type | street | geometry
+ | 1 | building | yes | foo | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 2 | highway | primary | bar | :w-WE
+ | 3 | highway | residential | foo | :w-NS
+ And the relations
+ | id | members | tags
+ | 1 | N1:house,W1:street,W3:street | 'type' : 'associatedStreet'
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W3
+
+ Scenario: POIs in building inherit address
+ Given the scene building-on-street-corner
+ And the named place nodes
+ | osm_id | class | type | geometry
+ | 1 | amenity | bank | :n-inner
+ | 2 | shop | bakery | :n-edge-NS
+ | 3 | shop | supermarket| :n-edge-WE
+ And the place ways
+ | osm_id | class | type | street | addr_place | housenumber | geometry
+ | 1 | building | yes | foo | nowhere | 3 | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 2 | highway | primary | bar | :w-WE
+ | 3 | highway | residential | foo | :w-NS
+ When importing
+ Then table placex contains
+ | object | parent_place_id | street | addr_place | housenumber
+ | W1 | W3 | foo | nowhere | 3
+ | N1 | W3 | foo | nowhere | 3
+ | N2 | W3 | foo | nowhere | 3
+ | N3 | W3 | foo | nowhere | 3
+
+ Scenario: POIs don't inherit from streets
+ Given the scene building-on-street-corner
+ And the named place nodes
+ | osm_id | class | type | geometry
+ | 1 | amenity | bank | :n-inner
+ And the place ways
+ | osm_id | class | type | street | addr_place | housenumber | geometry
+ | 1 | highway | path | foo | nowhere | 3 | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 3 | highway | residential | foo | :w-NS
+ When importing
+ Then table placex contains
+ | object | parent_place_id | street | addr_place | housenumber
+ | N1 | W3 | None | None | None
+
+ Scenario: POIs with own address do not inherit building address
+ Given the scene building-on-street-corner
+ And the named place nodes
+ | osm_id | class | type | street | geometry
+ | 1 | amenity | bank | bar | :n-inner
+ And the named place nodes
+ | osm_id | class | type | housenumber | geometry
+ | 2 | shop | bakery | 4 | :n-edge-NS
+ And the named place nodes
+ | osm_id | class | type | addr_place | geometry
+ | 3 | shop | supermarket| nowhere | :n-edge-WE
+ And the place nodes
+ | osm_id | class | type | name | geometry
+ | 4 | place | isolated_dwelling | theplace | :n-outer
+ And the place ways
+ | osm_id | class | type | addr_place | housenumber | geometry
+ | 1 | building | yes | theplace | 3 | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 2 | highway | primary | bar | :w-WE
+ | 3 | highway | residential | foo | :w-NS
+ When importing
+ Then table placex contains
+ | object | parent_place_id | street | addr_place | housenumber
+ | W1 | N4 | None | theplace | 3
+ | N1 | W2 | bar | None | None
+ | N2 | W3 | None | None | 4
+ | N3 | W2 | None | nowhere | None
+
+ ### Scenario 20
+ Scenario: POIs parent a road if and only if they are attached to it
+ Given the scene points-on-roads
+ And the named place nodes
+ | osm_id | class | type | street | geometry
+ | 1 | highway | bus_stop | North St | :n-SE
+ | 2 | highway | bus_stop | South St | :n-NW
+ | 3 | highway | bus_stop | North St | :n-S-unglued
+ | 4 | highway | bus_stop | South St | :n-N-unglued
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | secondary | North St | :w-north
+ | 2 | highway | unclassified | South St | :w-south
+ And the ways
+ | id | nodes
+ | 1 | 100,101,2,103,104
+ | 2 | 200,201,1,202,203
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W2
+ | N2 | W1
+ | N3 | W1
+ | N4 | W2
+
+ Scenario: POIs do not parent non-roads they are attached to
+ Given the scene points-on-roads
+ And the named place nodes
+ | osm_id | class | type | street | geometry
+ | 1 | highway | bus_stop | North St | :n-SE
+ | 2 | highway | bus_stop | South St | :n-NW
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | landuse | residential | North St | :w-north
+ | 2 | waterway| river | South St | :w-south
+ And the ways
+ | id | nodes
+ | 1 | 100,101,2,103,104
+ | 2 | 200,201,1,202,203
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | 0
+ | N2 | 0
+
+ Scenario: POIs on building outlines inherit associated street relation
+ Given the scene building-on-street-corner
+ And the named place nodes
+ | osm_id | class | type | geometry
+ | 1 | place | house | :n-edge-NS
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 1 | building | yes | :w-building
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 2 | highway | primary | bar | :w-WE
+ | 3 | highway | residential | foo | :w-NS
+ And the relations
+ | id | members | tags
+ | 1 | W1:house,W2:street | 'type' : 'associatedStreet'
+ And the ways
+ | id | nodes
+ | 1 | 100,1,101,102,100
+ When importing
+ Then table placex contains
+ | object | parent_place_id
+ | N1 | W2
+
--- /dev/null
+@DB
+Feature: Import into placex
+ Tests that data in placex is completed correctly.
+
+ Scenario: No country code tag is available
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | highway | primary | 'name' : 'A1' | country:us
+ When importing
+ Then table placex contains
+ | object | country_code | calculated_country_code |
+ | N1 | None | us |
+
+ Scenario: Location overwrites country code tag
+ Given the scene country
+ And the place nodes
+ | osm_id | class | type | name | country_code | geometry
+ | 1 | highway | primary | 'name' : 'A1' | de | :us
+ When importing
+ Then table placex contains
+ | object | country_code | calculated_country_code |
+ | N1 | de | us |
+
+ Scenario: Country code tag overwrites location for countries
+ Given the place areas
+ | osm_type | osm_id | class | type | admin_level | name | country_code | geometry
+ | R | 1 | boundary | administrative | 2 | 'name' : 'foo' | de | (-100 40, -101 40, -101 41, -100 41, -100 40)
+ When importing
+ Then table placex contains
+ | object | country_code | calculated_country_code |
+ | R1 | de | de |
+
+ Scenario: Illegal country code tag for countries is ignored
+ And the place areas
+ | osm_type | osm_id | class | type | admin_level | name | country_code | geometry
+ | R | 1 | boundary | administrative | 2 | 'name' : 'foo' | xx | (-100 40, -101 40, -101 41, -100 41, -100 40)
+ When importing
+ Then table placex contains
+ | object | country_code | calculated_country_code |
+ | R1 | xx | us |
+
+ Scenario: admin level is copied over
+ Given the place nodes
+ | osm_id | class | type | admin_level | name
+ | 1 | place | state | 3 | 'name' : 'foo'
+ When importing
+ Then table placex contains
+ | object | admin_level |
+ | N1 | 3 |
+
+ Scenario: admin level is default 15
+ Given the place nodes
+ | osm_id | class | type | name
+ | 1 | amenity | prison | 'name' : 'foo'
+ When importing
+ Then table placex contains
+ | object | admin_level |
+ | N1 | 15 |
+
+ Scenario: admin level is never larger than 15
+ Given the place nodes
+ | osm_id | class | type | name | admin_level
+ | 1 | amenity | prison | 'name' : 'foo' | 16
+ When importing
+ Then table placex contains
+ | object | admin_level |
+ | N1 | 15 |
+
+
+ Scenario: postcode node without postcode is dropped
+ Given the place nodes
+ | osm_id | class | type
+ | 1 | place | postcode
+ When importing
+ Then table placex has no entry for N1
+
+ Scenario: postcode boundary without postcode is dropped
+ Given the place areas
+ | osm_type | osm_id | class | type | geometry
+ | R | 1 | boundary | postal_code | poly-area:0.1
+ When importing
+ Then table placex has no entry for R1
+
+ Scenario: search and address ranks for GB post codes correctly assigned
+ Given the place nodes
+ | osm_id | class | type | postcode | geometry
+ | 1 | place | postcode | E45 2CD | country:gb
+ | 2 | place | postcode | E45 2 | country:gb
+ | 3 | place | postcode | Y45 | country:gb
+ When importing
+ Then table placex contains
+ | object | postcode | calculated_country_code | rank_search | rank_address
+ | N1 | E45 2CD | gb | 25 | 5
+ | N2 | E45 2 | gb | 23 | 5
+ | N3 | Y45 | gb | 21 | 5
+
+ Scenario: wrongly formatted GB postcodes are down-ranked
+ Given the place nodes
+ | osm_id | class | type | postcode | geometry
+ | 1 | place | postcode | EA452CD | country:gb
+ | 2 | place | postcode | E45 23 | country:gb
+ | 3 | place | postcode | y45 | country:gb
+ When importing
+ Then table placex contains
+ | object | calculated_country_code | rank_search | rank_address
+ | N1 | gb | 30 | 30
+ | N2 | gb | 30 | 30
+ | N3 | gb | 30 | 30
+
+ Scenario: search and address rank for DE postcodes correctly assigned
+ Given the place nodes
+ | osm_id | class | type | postcode | geometry
+ | 1 | place | postcode | 56427 | country:de
+ | 2 | place | postcode | 5642 | country:de
+ | 3 | place | postcode | 5642A | country:de
+ | 4 | place | postcode | 564276 | country:de
+ When importing
+ Then table placex contains
+ | object | calculated_country_code | rank_search | rank_address
+ | N1 | de | 21 | 11
+ | N2 | de | 30 | 30
+ | N3 | de | 30 | 30
+ | N4 | de | 30 | 30
+
+ Scenario: search and address rank for other postcodes are correctly assigned
+ Given the place nodes
+ | osm_id | class | type | postcode | geometry
+ | 1 | place | postcode | 1 | country:ca
+ | 2 | place | postcode | X3 | country:ca
+ | 3 | place | postcode | 543 | country:ca
+ | 4 | place | postcode | 54dc | country:ca
+ | 5 | place | postcode | 12345 | country:ca
+ | 6 | place | postcode | 55TT667 | country:ca
+ | 7 | place | postcode | 123-65 | country:ca
+ | 8 | place | postcode | 12 445 4 | country:ca
+ | 9 | place | postcode | A1:bc10 | country:ca
+ When importing
+ Then table placex contains
+ | object | calculated_country_code | rank_search | rank_address
+ | N1 | ca | 21 | 11
+ | N2 | ca | 21 | 11
+ | N3 | ca | 21 | 11
+ | N4 | ca | 21 | 11
+ | N5 | ca | 21 | 11
+ | N6 | ca | 21 | 11
+ | N7 | ca | 25 | 11
+ | N8 | ca | 25 | 11
+ | N9 | ca | 25 | 11
+
+
+ Scenario: search and address ranks for places are correctly assigned
+ Given the named place nodes
+ | osm_id | class | type |
+ | 1 | foo | bar |
+ | 11 | place | Continent |
+ | 12 | place | continent |
+ | 13 | place | sea |
+ | 14 | place | country |
+ | 15 | place | state |
+ | 16 | place | region |
+ | 17 | place | county |
+ | 18 | place | city |
+ | 19 | place | island |
+ | 20 | place | town |
+ | 21 | place | village |
+ | 22 | place | hamlet |
+ | 23 | place | municipality |
+ | 24 | place | district |
+ | 25 | place | unincorporated_area |
+ | 26 | place | borough |
+ | 27 | place | suburb |
+ | 28 | place | croft |
+ | 29 | place | subdivision |
+ | 30 | place | isolated_dwelling |
+ | 31 | place | farm |
+ | 32 | place | locality |
+ | 33 | place | islet |
+ | 34 | place | mountain_pass |
+ | 35 | place | neighbourhood |
+ | 36 | place | house |
+ | 37 | place | building |
+ | 38 | place | houses |
+ And the named place nodes
+ | osm_id | class | type | extratags
+ | 100 | place | locality | 'locality' : 'townland'
+ | 101 | place | city | 'capital' : 'yes'
+ When importing
+ Then table placex contains
+ | object | rank_search | rank_address |
+ | N1 | 30 | 30 |
+ | N11 | 30 | 30 |
+ | N12 | 2 | 2 |
+ | N13 | 2 | 0 |
+ | N14 | 4 | 4 |
+ | N15 | 8 | 8 |
+ | N16 | 18 | 0 |
+ | N17 | 12 | 12 |
+ | N18 | 16 | 16 |
+ | N19 | 17 | 0 |
+ | N20 | 18 | 16 |
+ | N21 | 19 | 16 |
+ | N22 | 19 | 16 |
+ | N23 | 19 | 16 |
+ | N24 | 19 | 16 |
+ | N25 | 19 | 16 |
+ | N26 | 19 | 16 |
+ | N27 | 20 | 20 |
+ | N28 | 20 | 20 |
+ | N29 | 20 | 20 |
+ | N30 | 20 | 20 |
+ | N31 | 20 | 0 |
+ | N32 | 20 | 0 |
+ | N33 | 20 | 0 |
+ | N34 | 20 | 0 |
+ | N100 | 20 | 20 |
+ | N101 | 15 | 16 |
+ | N35 | 22 | 22 |
+ | N36 | 30 | 30 |
+ | N37 | 30 | 30 |
+ | N38 | 28 | 0 |
+
+ Scenario: search and address ranks for boundaries are correctly assigned
+ Given the named place nodes
+ | osm_id | class | type
+ | 1 | boundary | administrative
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 10 | boundary | administrative | 10 10, 11 11
+ And the named place areas
+ | osm_type | osm_id | class | type | admin_level | geometry
+ | R | 20 | boundary | administrative | 2 | (1 1, 2 2, 1 2, 1 1)
+ | R | 21 | boundary | administrative | 32 | (3 3, 4 4, 3 4, 3 3)
+ | R | 22 | boundary | nature_park | 6 | (0 0, 1 0, 0 1, 0 0)
+ | R | 23 | boundary | natural_reserve| 10 | (0 0, 1 1, 1 0, 0 0)
+ When importing
+ Then table placex has no entry for N1
+ And table placex has no entry for W10
+ And table placex contains
+ | object | rank_search | rank_address
+ | R20 | 4 | 4
+ | R21 | 30 | 30
+ | R22 | 12 | 0
+ | R23 | 20 | 0
+
+ Scenario Outline: minor highways droped without name, included with
+ Given the scene roads-with-pois
+ And a wiped database
+ And the place ways
+ | osm_id | class | type | geometry
+ | 1 | highway | <type> | :w-south
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 2 | highway | <type> | :w-north
+ When importing
+ Then table placex has no entry for W1
+ And table placex contains
+ | object | rank_search | rank_address
+ | W2 | <rank> | <rank>
+
+ Examples:
+ | type | rank
+ | service | 27
+ | cycleway | 27
+ | path | 27
+ | footway | 27
+ | steps | 27
+ | bridleway | 27
+ | track | 26
+ | byway | 26
+ | motorway_link | 27
+ | primary_link | 27
+ | trunk_link | 27
+ | secondary_link| 27
+ | tertiary_link | 27
+
+ Scenario: search and address ranks for highways correctly assigned
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type
+ | 1 | highway | bus_stop
+ And the place ways
+ | osm_id | class | type | geometry
+ | 1 | highway | primary | :w-south
+ | 2 | highway | secondary | :w-south
+ | 3 | highway | tertiary | :w-south
+ | 4 | highway | residential | :w-north
+ | 5 | highway | unclassified | :w-north
+ | 6 | highway | something | :w-north
+ When importing
+ Then table placex contains
+ | object | rank_search | rank_address
+ | N1 | 30 | 30
+ | W1 | 26 | 26
+ | W2 | 26 | 26
+ | W3 | 26 | 26
+ | W4 | 26 | 26
+ | W5 | 26 | 26
+ | W6 | 26 | 26
+
+ Scenario: rank and inclusion of landuses
+ Given the place nodes
+ | osm_id | class | type
+ | 1 | landuse | residential
+ And the named place nodes
+ | osm_id | class | type
+ | 2 | landuse | residential
+ And the place ways
+ | osm_id | class | type | geometry
+ | 1 | landuse | residential | 0 0, 0 1
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 2 | landuse | residential | 1 1, 1 1.1
+ And the place areas
+ | osm_type | osm_id | class | type | geometry
+ | W | 3 | landuse | residential | poly-area:0.1
+ | R | 1 | landuse | residential | poly-area:0.01
+ | R | 10 | landuse | residential | poly-area:0.5
+ And the named place areas
+ | osm_type | osm_id | class | type | geometry
+ | W | 4 | landuse | residential | poly-area:0.1
+ | R | 2 | landuse | residential | poly-area:0.05
+ When importing
+ Then table placex has no entry for N1
+ And table placex has no entry for W1
+ And table placex has no entry for W3
+ And table placex has no entry for R1
+ And table placex has no entry for R10
+ And table placex contains
+ | object | rank_search | rank_address
+ | N2 | 30 | 30
+ | W2 | 30 | 30
+ | W4 | 22 | 22
+ | R2 | 22 | 22
+
+ Scenario: rank and inclusion of naturals
+ Given the place nodes
+ | osm_id | class | type
+ | 1 | natural | peak
+ | 3 | natural | volcano
+ And the named place nodes
+ | osm_id | class | type
+ | 2 | natural | peak
+ | 4 | natural | volcano
+ | 5 | natural | foobar
+ And the place ways
+ | osm_id | class | type | geometry
+ | 1 | natural | mountain_range | 10 10,11 11
+ And the named place ways
+ | osm_id | class | type | geometry
+ | 2 | natural | mountain_range | 12 12,11 11
+ | 3 | natural | foobar | 13 13,13.1 13
+ | 4 | natural | coastline | 14 14,14.1 14
+ And the place areas
+ | osm_type | osm_id | class | type | geometry
+ | R | 1 | natural | volcano | poly-area:0.1
+ | R | 2 | natural | volcano | poly-area:1.0
+ And the named place areas
+ | osm_type | osm_id | class | type | geometry
+ | R | 3 | natural | volcano | poly-area:0.1
+ | R | 4 | natural | foobar | poly-area:0.5
+ | R | 5 | natural | sea | poly-area:5.0
+ | R | 6 | natural | sea | poly-area:0.01
+ | R | 7 | natural | coastline | poly-area:1.0
+ When importing
+ Then table placex has no entry for N1
+ And table placex has no entry for N3
+ And table placex has no entry for W1
+ And table placex has no entry for R1
+ And table placex has no entry for R2
+ And table placex has no entry for R7
+ And table placex has no entry for W4
+ And table placex contains
+ | object | rank_search | rank_address
+ | N2 | 18 | 0
+ | N4 | 18 | 0
+ | N5 | 30 | 30
+ | W2 | 18 | 0
+ | R3 | 18 | 0
+ | R4 | 22 | 22
+ | R5 | 4 | 4
+ | R6 | 4 | 4
+ | W3 | 30 | 30
+
--- /dev/null
+@DB
+Feature: Creation of search terms
+ Tests that search_name table is filled correctly
+
+ Scenario: POIs without a name have no search entry
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type | geometry
+ | 1 | place | house | :p-N1
+ And the place ways
+ | osm_id | class | type | geometry
+ | 1 | highway | residential | :w-north
+ When importing
+ Then table search_name has no entry for N1
+
+
+ Scenario: Named POIs inherit address from parent
+ Given the scene roads-with-pois
+ And the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | house | foo | :p-N1
+ And the place ways
+ | osm_id | class | type | name | geometry
+ | 1 | highway | residential | the road | :w-north
+ When importing
+ Then search_name table contains
+ | place_id | name_vector | nameaddress_vector
+ | N1 | foo | the road
--- /dev/null
+@DB
+Feature: Import of simple objects
+ Testing simple stuff
+
+ Scenario: Import place node
+ Given the place nodes:
+ | osm_id | class | type | name | geometry
+ | 1 | place | village | 'name' : 'Foo' | 10.0 -10.0
+ When importing
+ Then table placex contains
+ | object | class | type | name | centroid
+ | N1 | place | village | 'name' : 'Foo' | 10.0,-10.0 +- 1m
+ When sending query "Foo"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | N | 1
+
--- /dev/null
+@DB
+Feature: Updates of linked places
+ Tests that linked places are correctly added and deleted.
+
+
+ Scenario: Add linked place when linking relation is renamed
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | city | foo | 0 0
+ And the place areas
+ | osm_type | osm_id | class | type | name | admin_level | geometry
+ | R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
+ When importing
+ And sending query "foo" with dups
+ Then results contain
+ | osm_type
+ | R
+ When updating place areas
+ | osm_type | osm_id | class | type | name | admin_level | geometry
+ | R | 1 | boundary | administrative | foobar | 8 | poly-area:0.1
+ Then table placex contains
+ | object | linked_place_id
+ | N1 | None
+ When sending query "foo" with dups
+ Then results contain
+ | osm_type
+ | N
+
+ Scenario: Add linked place when linking relation is removed
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | city | foo | 0 0
+ And the place areas
+ | osm_type | osm_id | class | type | name | admin_level | geometry
+ | R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
+ When importing
+ And sending query "foo" with dups
+ Then results contain
+ | osm_type
+ | R
+ When marking for delete R1
+ Then table placex contains
+ | object | linked_place_id
+ | N1 | None
+ And sending query "foo" with dups
+ Then results contain
+ | osm_type
+ | N
+
+ Scenario: Remove linked place when linking relation is added
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | city | foo | 0 0
+ When importing
+ And sending query "foo" with dups
+ Then results contain
+ | osm_type
+ | N
+ When updating place areas
+ | osm_type | osm_id | class | type | name | admin_level | geometry
+ | R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
+ Then table placex contains
+ | object | linked_place_id
+ | N1 | R1
+ When sending query "foo" with dups
+ Then results contain
+ | osm_type
+ | R
+
+ Scenario: Remove linked place when linking relation is renamed
+ Given the place nodes
+ | osm_id | class | type | name | geometry
+ | 1 | place | city | foo | 0 0
+ And the place areas
+ | osm_type | osm_id | class | type | name | admin_level | geometry
+ | R | 1 | boundary | administrative | foobar | 8 | poly-area:0.1
+ When importing
+ And sending query "foo" with dups
+ Then results contain
+ | osm_type
+ | N
+ When updating place areas
+ | osm_type | osm_id | class | type | name | admin_level | geometry
+ | R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
+ Then table placex contains
+ | object | linked_place_id
+ | N1 | R1
+ When sending query "foo" with dups
+ Then results contain
+ | osm_type
+ | R
+
--- /dev/null
+@DB
+Feature: Update of names in place objects
+ Test all naming related issues in updates
+
+
+ Scenario: Updating postcode in postcode boundaries without ref
+ Given the place areas
+ | osm_type | osm_id | class | type | postcode | geometry
+ | R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
+ When importing
+ And sending query "12345"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | R | 1
+ When updating place areas
+ | osm_type | osm_id | class | type | postcode | geometry
+ | R | 1 | boundary | postal_code | 54321 | (0 0, 1 0, 1 1, 0 1, 0 0)
+ And sending query "12345"
+ Then exactly 0 results are returned
+ When sending query "54321"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | R | 1
+
+
+ Scenario: Delete postcode from postcode boundaries without ref
+ Given the place areas
+ | osm_type | osm_id | class | type | postcode | geometry
+ | R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
+ When importing
+ And sending query "12345"
+ Then results contain
+ | ID | osm_type | osm_id
+ | 0 | R | 1
+ When updating place areas
+ | osm_type | osm_id | class | type | geometry
+ | R | 1 | boundary | postal_code | (0 0, 1 0, 1 1, 0 1, 0 0)
+ Then table placex has no entry for R1
+
--- /dev/null
+@DB
+Feature: Update of simple objects
+ Testing simple stuff
+
+ Scenario: Remove name from a landuse object
+ Given the place nodes
+ | osm_id | class | type | name
+ | 1 | landuse | wood | 'name' : 'Foo'
+ When importing
+ Then table placex contains
+ | object | class | type | name
+ | N1 | landuse| wood | 'name' : 'Foo'
+ When updating place nodes
+ | osm_id | class | type
+ | 1 | landuse | wood
+ Then table placex has no entry for N1
+
+
+ Scenario: Do delete small boundary features
+ Given the place areas
+ | osm_type | osm_id | class | type | admin_level | geometry
+ | R | 1 | boundary | administrative | 3 | (0 0, 1 0, 1 1, 0 1, 0 0)
+ When importing
+ Then table placex contains
+ | object | rank_search
+ | R1 | 6
+ When marking for delete R1
+ Then table placex has no entry for R1
+
+ Scenario: Do not delete large boundary features
+ Given the place areas
+ | osm_type | osm_id | class | type | admin_level | geometry
+ | R | 1 | boundary | administrative | 3 | (0 0, 2 0, 2 2.1, 0 2, 0 0)
+ When importing
+ Then table placex contains
+ | object | rank_search
+ | R1 | 6
+ When marking for delete R1
+ Then table placex contains
+ | object | rank_search
+ | R1 | 6
+
+ Scenario: Do delete large features of low rank
+ Given the named place areas
+ | osm_type | osm_id | class | type | geometry
+ | W | 1 | place | house | (0 0, 2 0, 2 2.1, 0 2, 0 0)
+ | R | 1 | boundary | national_park | (0 0, 2 0, 2 2.1, 0 2, 0 0)
+ When importing
+ Then table placex contains
+ | object | rank_address
+ | R1 | 0
+ | W1 | 30
+ When marking for delete R1,W1
+ Then table placex has no entry for W1
+ Then table placex has no entry for R1
--- /dev/null
+@DB
+Feature: Import of relations by osm2pgsql
+ Testing specific relation problems related to members.
+
+ Scenario: Don't import empty waterways
+ Given the osm nodes:
+ | id | tags
+ | 1 | 'amenity' : 'prison', 'name' : 'foo'
+ And the osm relations:
+ | id | tags | members
+ | 1 | 'type' : 'waterway', 'waterway' : 'river', 'name' : 'XZ' | N1
+ When loading osm data
+ Then table place has no entry for R1
--- /dev/null
+@DB
+Feature: Import of simple objects by osm2pgsql
+ Testing basic functions of osm2pgsql.
+
+ Scenario: Import simple objects
+ Given the osm nodes:
+ | id | tags
+ | 1 | 'amenity' : 'prison', 'name' : 'foo'
+ Given the osm nodes:
+ | id | geometry
+ | 100 | 0 0
+ | 101 | 0 0.1
+ | 102 | 0.1 0.2
+ | 200 | 0 0
+ | 201 | 0 1
+ | 202 | 1 1
+ | 203 | 1 0
+ Given the osm ways:
+ | id | tags | nodes
+ | 1 | 'shop' : 'toys', 'name' : 'tata' | 100 101 102
+ | 2 | 'ref' : '45' | 200 201 202 203 200
+ Given the osm relations:
+ | id | tags | members
+ | 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | N1,W2
+ When loading osm data
+ Then table place contains
+ | object | class | type | name
+ | N1 | amenity | prison | 'name' : 'foo'
+ | W1 | shop | toys | 'name' : 'tata'
+ | R1 | tourism | hotel | 'name' : 'XZ'
+
+ Scenario: Import object with two main tags
+ Given the osm nodes:
+ | id | tags
+ | 1 | 'tourism' : 'hotel', 'amenity' : 'restaurant', 'name' : 'foo'
+ When loading osm data
+ Then table place contains
+ | object | class | type | name
+ | N1:tourism | tourism | hotel | 'name' : 'foo'
+ | N1:amenity | amenity | restaurant | 'name' : 'foo'
+
+ Scenario: Import stand-alone house number with postcode
+ Given the osm nodes:
+ | id | tags
+ | 1 | 'addr:housenumber' : '4', 'addr:postcode' : '3345'
+ When loading osm data
+ Then table place contains
+ | object | class | type
+ | N1 | place | house
--- /dev/null
+@DB
+Feature: Update of relations by osm2pgsql
+ Testing relation update by osm2pgsql.
+
+Scenario: Remove all members of a relation
+ Given the osm nodes:
+ | id | tags
+ | 1 | 'amenity' : 'prison', 'name' : 'foo'
+ Given the osm nodes:
+ | id | geometry
+ | 200 | 0 0
+ | 201 | 0 0.0001
+ | 202 | 0.0001 0.0001
+ | 203 | 0.0001 0
+ Given the osm ways:
+ | id | tags | nodes
+ | 2 | 'ref' : '45' | 200 201 202 203 200
+ Given the osm relations:
+ | id | tags | members
+ | 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | W2
+ When loading osm data
+ Then table place contains
+ | object | class | type | name
+ | R1 | tourism | hotel | 'name' : 'XZ'
+ Given the osm relations:
+ | action | id | tags | members
+ | M | 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | N1
+ When updating osm data
+ Then table place has no entry for R1
+
--- /dev/null
+@DB
+Feature: Update of simple objects by osm2pgsql
+ Testing basic update functions of osm2pgsql.
+
+ Scenario: Import object with two main tags
+ Given the osm nodes:
+ | id | tags
+ | 1 | 'tourism' : 'hotel', 'amenity' : 'restaurant', 'name' : 'foo'
+ When loading osm data
+ Then table place contains
+ | object | class | type | name
+ | N1:tourism | tourism | hotel | 'name' : 'foo'
+ | N1:amenity | amenity | restaurant | 'name' : 'foo'
+ Given the osm nodes:
+ | action | id | tags
+ | M | 1 | 'tourism' : 'hotel', 'name' : 'foo'
+ When updating osm data
+ Then table place has no entry for N1:amenity
+ And table place contains
+ | object | class | type | name
+ | N1:tourism | tourism | hotel | 'name' : 'foo'
+
--- /dev/null
+#/bin/bash -e
+#
+# Regenerates wkts for scenarios.
+#
+
+datadir="$( cd "$( dirname "$0" )" && cd ../data && pwd )"
+
+if [! -d "$datadir" ]; then
+ echo "Cannot find data dir.";
+ exit -1;
+fi
+
+echo "Using datadir $datadir"
+pushd $datadir
+
+# remove old wkts
+rm $datadir/*.wkt
+
+# create wkts from SQL scripts
+for fl in *.sql; do
+ echo "Processing $fl.."
+ cat $fl | psql -d nominatim -t -o ${fl/.sql/.wkt}
+done
+
+# create wkts from .osm files
+for fl in *.osm; do
+ echo "Processing $fl.."
+ ../bin/osm2wkt $fl
+done
+
+popd
--- /dev/null
+
+// The code in this file is released into the Public Domain.
+
+#include <iostream>
+#include <fstream>
+#include <string>
+#include <unordered_map>
+
+#include <osmium/area/assembler.hpp>
+#include <osmium/area/collector.hpp>
+#include <osmium/area/problem_reporter_exception.hpp>
+#include <osmium/geom/wkt.hpp>
+#include <osmium/handler.hpp>
+#include <osmium/handler/node_locations_for_ways.hpp>
+#include <osmium/io/any_input.hpp>
+#include <osmium/visitor.hpp>
+#include <osmium/index/map/stl_map.hpp>
+
+typedef osmium::index::map::StlMap<osmium::unsigned_object_id_type, osmium::Location> index_type;
+
+typedef osmium::handler::NodeLocationsForWays<index_type, index_type> location_handler_type;
+
+
+class ExportToWKTHandler : public osmium::handler::Handler {
+
+ osmium::geom::WKTFactory m_factory;
+ std::unordered_map<std::string, std::ofstream> m_files;
+
+public:
+
+ void node(const osmium::Node& node) {
+ print_geometry(node.tags(), m_factory.create_point(node));
+ }
+
+ void way(const osmium::Way& way) {
+ if (!way.is_closed() || !way.tags().get_value_by_key("area"))
+ print_geometry(way.tags(), m_factory.create_linestring(way));
+ }
+
+ void area(const osmium::Area& area) {
+ if (!area.from_way() || area.tags().get_value_by_key("area"))
+ print_geometry(area.tags(), m_factory.create_multipolygon(area));
+ }
+
+ void close() {
+ for (auto& fd : m_files)
+ fd.second.close();
+ }
+
+private:
+
+ void print_geometry(const osmium::TagList& tags, const std::string& wkt) {
+ const char* scenario = tags.get_value_by_key("test:section");
+ const char* id = tags.get_value_by_key("test:id");
+ if (scenario && id) {
+ auto& fd = m_files[std::string(scenario)];
+ if (!fd.is_open())
+ fd.open(std::string(scenario) + ".wkt");
+ fd << id << " | " << wkt << "\n";
+ }
+ }
+
+}; // class ExportToWKTHandler
+
+int main(int argc, char* argv[]) {
+ if (argc != 2) {
+ std::cerr << "Usage: " << argv[0] << " OSMFILE\n";
+ exit(1);
+ }
+
+ std::string input_filename {argv[1]};
+
+ typedef osmium::area::Assembler area_assembler_type;
+ osmium::area::ProblemReporterException problem_reporter;
+ area_assembler_type assembler(&problem_reporter);
+ osmium::area::Collector<area_assembler_type> collector(assembler);
+
+ std::cerr << "Pass 1...\n";
+ osmium::io::Reader reader1(input_filename);
+ collector.read_relations(reader1);
+ std::cerr << "Pass 1 done\n";
+
+ index_type index_pos;
+ index_type index_neg;
+ location_handler_type location_handler(index_pos, index_neg);
+
+ std::cerr << "Pass 2...\n";
+ ExportToWKTHandler export_handler;
+ osmium::io::Reader reader2(input_filename);
+ osmium::apply(reader2, location_handler, export_handler, collector.handler());
+ reader2.close();
+ osmium::apply(collector, export_handler);
+ export_handler.close();
+ std::cerr << "Pass 2 done\n";
+
+
+ google::protobuf::ShutdownProtobufLibrary();
+
+}
+
+
--- /dev/null
+n-edge-NS | POINT(1.0040019 2.000324)
+n-inner | POINT(1.0039385 2.0003548)
+n-outer | POINT(1.0039478 2.0004676)
+n-edge-WE | POINT(1.0039599 2.0002345)
+w-WE | LINESTRING(1.0031759 2.0002316,1.0040361 2.0002211,1.0042735 2.0002264)
+w-NS | LINESTRING(1.0040414 2.0001051,1.0040361 2.0002211,1.0040364 2.0006377)
+w-building | MULTIPOLYGON(((1.0040019 2.000324,1.0040016 2.0002344,1.0039599 2.0002345,1.0039037 2.0002347,1.0039043 2.0004389,1.0040023 2.0004386,1.0040019 2.000324)))
--- /dev/null
+select country_code, st_astext(st_pointonsurface(st_collect(geometry))) from country_osm_grid group by country_code order by country_code
--- /dev/null
+ ad | POINT(1.58972361752509 42.54241545)
+ ae | POINT(54.6158905029297 24.8243131637573)
+ af | POINT(65.9026412963867 34.8470859527588)
+ ag | POINT(-61.7243069800293 17.069)
+ ai | POINT(-63.1057155298182 18.2546197)
+ al | POINT(19.8494176864624 40.2123275624912)
+ am | POINT(44.6422958374023 40.3782157897949)
+ ao | POINT(16.2192406654358 -12.7701482772827)
+ aq | POINT(44.999999975 -75.6569557189941)
+ ar | POINT(-61.1075973510742 -34.3761558532715)
+ as | POINT(-170.684700024275 -14.2930755)
+ at | POINT(14.2574706077576 47.3654232025146)
+ au | POINT(138.231559753418 -23.7206888198853)
+ aw | POINT(-69.98255055 12.555)
+ ax | POINT(19.9183956313477 59.81682435)
+ az | POINT(48.385555267334 40.6163997650146)
+ ba | POINT(17.1851491928101 44.2558269500732)
+ bb | POINT(-59.53342165 13.19)
+ bd | POINT(89.759895324707 24.3420524597168)
+ be | POINT(4.90078139305115 50.3468225048828)
+ bf | POINT(-0.567435041069984 11.9047117233276)
+ bg | POINT(24.8061628341675 43.0985908508301)
+ bh | POINT(50.5203291219829 25.94685735)
+ bi | POINT(29.5456137866089 -2.99057915)
+ bj | POINT(2.70062518119812 10.0279288291931)
+ bl | POINT(-62.7934947763772 17.907)
+ bm | POINT(-64.7740692745195 32.30199165)
+ bn | POINT(114.521968608887 4.2863885)
+ bo | POINT(-62.0247344970703 -17.7772369384766)
+ bq | POINT(-63.1432235610045 17.566)
+ br | POINT(-45.7706508636475 -9.5868501663208)
+ bs | POINT(-77.6091675884277 23.8745)
+ bt | POINT(90.0135078430176 27.281379699707)
+ bv | POINT(3.35744155625 -54.4215)
+ bw | POINT(23.5150556564331 -23.4839134216309)
+ by | POINT(26.7725925445557 53.1588516235352)
+ bz | POINT(-88.6348991394043 16.3395160487277)
+ ca | POINT(-107.74817276001 67.1261215209961)
+ cc | POINT(96.8442066294247 -12.0173443)
+ cd | POINT(24.0954418182373 -1.67713665962219)
+ cf | POINT(22.5870132446289 5.98438787460327)
+ cg | POINT(15.7887516021729 0.403886616230011)
+ ch | POINT(7.65705513954163 46.5744686126709)
+ ci | POINT(-6.31190967559814 6.6278383731842)
+ ck | POINT(-159.778351359569 -21.23349585)
+ cl | POINT(-70.4179039001465 -53.7718944549561)
+ cm | POINT(13.260226726532 5.94519567489624)
+ cn | POINT(96.4428558349609 38.0426063537598)
+ co | POINT(-72.5295104980469 2.45174860954285)
+ cr | POINT(-83.8331413269043 9.935142993927)
+ cu | POINT(-80.8167381286621 21.8885278701782)
+ cv | POINT(-24.508106575 14.929)
+ cw | POINT(-68.9640918594077 12.1845)
+ cx | POINT(105.624119513558 -10.48417)
+ cy | POINT(32.959223486499 35.37010195)
+ cz | POINT(16.3209805488586 49.5069274902344)
+ de | POINT(9.30716800689697 50.2128944396973)
+ dj | POINT(42.969040422876 11.41542855)
+ dk | POINT(9.18490123748779 55.5634002685547)
+ dm | POINT(-61.0035801928854 15.6547055)
+ do | POINT(-69.6285591125488 18.5884169089722)
+ dz | POINT(4.24749487638474 25.797215461731)
+ ec | POINT(-77.4583168029785 -0.982844322919846)
+ ee | POINT(23.9428863525391 58.439525604248)
+ eg | POINT(28.952935218811 28.1771860122681)
+ eh | POINT(-13.6903142929077 25.0124177932739)
+ er | POINT(39.0122375488281 14.960337638855)
+ es | POINT(-2.59110307693481 38.7935485839844)
+ et | POINT(38.6169757843018 7.71399855613708)
+ fi | POINT(26.8979873657227 63.5619449615479)
+ fj | POINT(177.918533325195 -17.7423753738403)
+ fk | POINT(-60.0855102539062 -51.6555919647217)
+ fm | POINT(151.9535889125 8.5045)
+ fo | POINT(-6.60483694084778 62.10000995)
+ fr | POINT(0.284105718135834 47.5104522705078)
+ ga | POINT(10.8107047080994 -0.0742915570735931)
+ gb | POINT(-0.928231082856655 52.0161876678467)
+ gd | POINT(-61.6452430375 12.191)
+ ge | POINT(44.1666488647461 42.0038585662842)
+ gf | POINT(-53.4652481079102 3.56188893318176)
+ gg | POINT(-2.50580395030125 49.5854381)
+ gh | POINT(-0.463488027453423 7.16051578521729)
+ gi | POINT(-5.32053155848457 36.1106663)
+ gl | POINT(-33.8551120758057 74.6635551452637)
+ gm | POINT(-16.4096023535368 13.25)
+ gn | POINT(-13.839409828186 10.9629158973694)
+ gp | POINT(-61.6871265247053 16.23049055)
+ gq | POINT(10.2397356033325 1.43119311332703)
+ gr | POINT(23.1785039901733 39.0620670318604)
+ gs | POINT(-36.4943086948773 -54.4306784)
+ gt | POINT(-90.7436828613281 15.2042865753174)
+ gu | POINT(144.733626445767 13.444138)
+ gw | POINT(-14.8352527618408 11.9248690605164)
+ gy | POINT(-58.4516773223877 5.73698806762695)
+ hk | POINT(114.18577775 22.3492361)
+ hm | POINT(73.6823082266602 -53.22105985)
+ hn | POINT(-86.9541435241699 15.2382001876831)
+ hr | POINT(17.499662399292 45.5268955230713)
+ ht | POINT(-73.5192565917969 18.3249206691162)
+ hu | POINT(20.3536291122437 47.5172100067139)
+ id | POINT(123.345050811768 -0.837919592857361)
+ ie | POINT(-9.00520038604736 52.8772506713867)
+ il | POINT(35.4631499949707 32.86165655)
+ im | POINT(-4.86740773691101 54.023)
+ in | POINT(88.6762087020508 27.86155515)
+ io | POINT(71.4274391359073 -6.14349685)
+ iq | POINT(42.5810985565186 34.2610359191895)
+ ir | POINT(51.268892288208 34.1931705474854)
+ is | POINT(-17.5178508758545 64.7168769836426)
+ it | POINT(10.4263944625854 44.8790493011475)
+ je | POINT(-2.19261599848299 49.1245833)
+ jm | POINT(-76.8402003547852 18.3935)
+ jo | POINT(36.5555210113525 30.7574186325073)
+ jp | POINT(138.725311279297 35.9209995269775)
+ ke | POINT(36.9060287475586 1.08512867614627)
+ kg | POINT(76.1557197570801 41.6649742126465)
+ kh | POINT(104.319019317627 12.9555516242981)
+ ki | POINT(173.633537933333 0.139)
+ km | POINT(44.3147485207764 -12.241)
+ kn | POINT(-62.6937987175 17.2555)
+ kp | POINT(126.655757904053 39.6457576751709)
+ kr | POINT(127.277404785156 36.4138870239258)
+ kw | POINT(47.3068407840576 29.6918055)
+ ky | POINT(-81.0745526670982 19.2994923579778)
+ kz | POINT(72.008113861084 49.8885555267334)
+ la | POINT(102.443916320801 19.8160953521729)
+ lb | POINT(35.4846443715483 33.4176673878926)
+ lc | POINT(-60.978944125 13.891)
+ li | POINT(9.54693948514429 47.15934115)
+ lk | POINT(80.3852043151855 8.41649961471558)
+ lr | POINT(-11.169605255127 4.04122126102448)
+ ls | POINT(28.6698419546997 -29.9453849)
+ lt | POINT(24.5173501968384 55.4929389953613)
+ lu | POINT(6.08649672997471 49.81533445)
+ lv | POINT(23.5103368759155 56.6714401245117)
+ ly | POINT(15.3684158325195 28.1217727661133)
+ ma | POINT(-4.0306156873703 33.2169628143311)
+ mc | POINT(7.47743150426578 43.62917385)
+ md | POINT(29.6172503477783 46.6651745)
+ me | POINT(19.7229134314941 43.02441345)
+ mf | POINT(-63.0666651534257 18.0810209)
+ mg | POINT(45.8637886047363 -20.5024528503418)
+ mh | POINT(171.949820566667 5.983)
+ mk | POINT(21.421085357666 41.0898007597656)
+ ml | POINT(-1.93310506641865 16.4699301719666)
+ mm | POINT(95.5462455749512 21.0962018966675)
+ mn | POINT(99.8113822937012 48.1861572265625)
+ mo | POINT(113.564416766761 22.16209625)
+ mp | POINT(145.213452483189 14.1490205)
+ mq | POINT(-60.8112834227783 14.43706925)
+ mr | POINT(-9.42324566841125 22.5925149917603)
+ ms | POINT(-62.1945521583333 16.745)
+ mt | POINT(14.3836306158583 35.9446731)
+ mu | POINT(57.551211475 -20.41)
+ mv | POINT(73.3929214477539 4.19375014305115)
+ mw | POINT(33.9572296142578 -12.2821822166443)
+ mx | POINT(-105.892219543457 25.8682699203491)
+ my | POINT(112.711540222168 2.10098683834076)
+ mz | POINT(37.5868968963623 -15.5801844596863)
+ na | POINT(16.6856970787048 -21.4657220840454)
+ nc | POINT(164.953224182129 -20.3888988494873)
+ ne | POINT(10.060417175293 19.0827360153198)
+ nf | POINT(167.95718166875 -29.0645)
+ ng | POINT(10.1778125762939 10.1780409812927)
+ ni | POINT(-85.8797492980957 13.2171587944031)
+ nl | POINT(-68.5706209441406 12.041)
+ no | POINT(23.1155624389648 70.0993499755859)
+ np | POINT(83.3625984191895 28.1310758590698)
+ nr | POINT(166.934792270833 -0.5275)
+ nu | POINT(-169.848737911905 -19.05305275)
+ nz | POINT(167.972099304199 -45.1305675506592)
+ om | POINT(56.8605518341064 20.4741315841675)
+ pa | POINT(-79.4016036987305 8.80656003952026)
+ pe | POINT(-78.6654052734375 -7.54711985588074)
+ pf | POINT(-145.057191213086 -16.7086236)
+ pg | POINT(146.646003723145 -7.37427568435669)
+ ph | POINT(121.483592987061 15.0996527671814)
+ pk | POINT(72.1134796142578 31.1462965011597)
+ pl | POINT(17.8813629150391 52.771821975708)
+ pm | POINT(-56.1951589074841 46.7832469)
+ pn | POINT(-130.106425528029 -25.0695595)
+ pr | POINT(-65.8875553967285 18.3716905)
+ ps | POINT(35.3980153741943 32.24773475)
+ pt | POINT(-8.45743942260742 40.1115436553955)
+ pw | POINT(134.496454875 7.3245)
+ py | POINT(-59.5178718566895 -22.4128150939941)
+ qa | POINT(51.4990362304443 24.9981677)
+ re | POINT(55.7734550547607 -21.3638828)
+ ro | POINT(26.3763284683228 45.3612003326416)
+ rs | POINT(20.4037199020386 44.5641384124756)
+ ru | POINT(116.440608978271 59.0678024291992)
+ rw | POINT(29.5788261333252 -1.6240443)
+ sa | POINT(47.7316932678223 22.4379062652588)
+ sb | POINT(164.638946533203 -10.2360653877258)
+ sc | POINT(46.3656697 -9.454)
+ sd | POINT(28.1472072601318 14.5642309188843)
+ se | POINT(15.6866798400879 60.3556804656982)
+ sg | POINT(103.84187219299 1.304)
+ sh | POINT(-12.2815573611979 -37.11546755)
+ si | POINT(14.0473856628607 46.390855)
+ sj | POINT(15.2755260467529 79.2336540222168)
+ sk | POINT(20.416033744812 48.869701385498)
+ sl | POINT(-11.4777312278748 8.78156280517578)
+ sm | POINT(12.4606268797657 43.9427969)
+ sn | POINT(-15.3711128234863 14.9947791099548)
+ so | POINT(46.9338359832764 9.34094429016113)
+ sr | POINT(-56.4855213165283 4.5773549079895)
+ ss | POINT(28.1357345581055 8.50933408737183)
+ st | POINT(6.61025854583333 0.2215)
+ sv | POINT(-89.3666543301004 13.4307287)
+ sx | POINT(-63.1539330807882 17.9345)
+ sy | POINT(38.1551322937012 35.3422107696533)
+ sz | POINT(31.782634398523 -26.14244365)
+ tc | POINT(-71.325541342334 21.35)
+ td | POINT(17.4209251403809 13.4622311592102)
+ tf | POINT(137.5 -67.5)
+ tg | POINT(1.0698350071907 7.87677597999573)
+ th | POINT(102.008777618408 16.4231028556824)
+ tj | POINT(71.9134941101074 39.0152739312988)
+ tk | POINT(-171.826039878679 -9.209903)
+ tl | POINT(126.225208282471 -8.72636747360229)
+ tm | POINT(57.7160358428955 39.9253444671631)
+ tn | POINT(9.04958724975586 34.8419933319092)
+ to | POINT(-176.993202209473 -23.1110429763794)
+ tr | POINT(32.8200283050537 39.8635063171387)
+ tt | POINT(-60.70793924375 11.1385)
+ tv | POINT(178.774993896484 -9.41685771942139)
+ tw | POINT(120.300746917725 23.1700229644775)
+ tz | POINT(33.5389289855957 -5.01840615272522)
+ ua | POINT(33.4433536529541 49.3061904907227)
+ ug | POINT(32.9652328491211 2.08584922552109)
+ um | POINT(-169.509930872296 16.74605815)
+ us | POINT(-116.395355224609 40.7137908935547)
+ uy | POINT(-56.4650554656982 -33.6265888214111)
+ uz | POINT(61.3552989959717 42.9610729217529)
+ va | POINT(12.3319785703086 42.0493197)
+ vc | POINT(-61.0990541737305 13.316)
+ ve | POINT(-64.8832321166992 7.69849991798401)
+ vg | POINT(-64.6247911940199 18.419)
+ vi | POINT(-64.8895090795187 18.3226325)
+ vn | POINT(104.201791331787 10.27644235)
+ vu | POINT(167.319198608398 -15.8868751525879)
+ wf | POINT(-176.207816222208 -13.28535775)
+ ws | POINT(-172.109667323427 -13.850938)
+ ye | POINT(45.945629119873 16.1633830070496)
+ yt | POINT(44.9377459760742 -12.6088246)
+ za | POINT(23.1948881149292 -30.4327602386475)
+ zm | POINT(26.3861808776855 -14.3996663093567)
+ zw | POINT(30.1241998672485 -19.8690795898438)
+
--- /dev/null
+n-N-unglued | POINT(1.004922 2.0005155)
+n-S-unglued | POINT(1.0046259 2.0002949)
+n-NE | POINT(1.0050661 2.0006118)
+n-SE | POINT(1.0051339 2.0003349)
+n-NW | POINT(1.0047583 2.0004087)
+n-SW | POINT(1.0047275 2.0003564)
+w-north | LINESTRING(1.0044996 2.0004302,1.0046259 2.0003841,1.0047583 2.0004087,1.004922 2.0005155,1.0050661 2.0006118,1.0053155 2.0006241)
+w-south | LINESTRING(1.0045243 2.0002241,1.0046259 2.0002949,1.0047275 2.0003564,1.004826 2.0002918,1.0049368 2.0002641,1.0051339 2.0003349,1.0053278 2.0003687)
--- /dev/null
+0.0001 | MULTIPOLYGON(((0.001 0,0 0,0 0.1,0.001 0.1,0.001 0)))
+0.0005 | MULTIPOLYGON(((0.005 0,0 0,0 0.1,0.005 0.1,0.005 0)))
+0.001 | MULTIPOLYGON(((0.01 0,0 0,0 0.1,0.01 0.1,0.01 0)))
+0.005 | MULTIPOLYGON(((0.05 0,0 0,0 0.1,0.05 0.1,0.05 0)))
+0.01 | MULTIPOLYGON(((0.1 0,0 0,0 0.1,0.1 0.1,0.1 0)))
+0.05 | MULTIPOLYGON(((0.5 0,0 0,0 0.1,0.5 0.1,0.5 0)))
+0.1 | MULTIPOLYGON(((0.1 0,0 0,0 1,0.1 1,0.1 0)))
+0.5 | MULTIPOLYGON(((0.5 0,0 0,0 1,0.5 1,0.5 0)))
+1.0 | MULTIPOLYGON(((1 0,0 0,0 1,1 1,1 0)))
+2.0 | MULTIPOLYGON(((2 0,0 0,0 1,2 1,2 0)))
+5.0 | MULTIPOLYGON(((5 0,0 0,0 1,5 1,5 0)))
--- /dev/null
+<?xml version='1.0' encoding='UTF-8'?>
+<osm version='0.6'>
+ <node id="100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="101" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
+ <node id="102" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.001" />
+ <node id="103" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.001" />
+ <way id="100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="100" />
+ <nd ref="101" />
+ <nd ref="102" />
+ <nd ref="103" />
+ <nd ref="100" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="0.0001"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="200" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="201" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
+ <node id="202" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.005" />
+ <node id="203" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.005" />
+ <way id="200" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="200" />
+ <nd ref="201" />
+ <nd ref="202" />
+ <nd ref="203" />
+ <nd ref="200" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="0.0005"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="300" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="301" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
+ <node id="302" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.01" />
+ <node id="303" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.01" />
+ <way id="300" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="300" />
+ <nd ref="301" />
+ <nd ref="302" />
+ <nd ref="303" />
+ <nd ref="300" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="0.001"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="400" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="401" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
+ <node id="402" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.05" />
+ <node id="403" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.05" />
+ <way id="400" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="400" />
+ <nd ref="401" />
+ <nd ref="402" />
+ <nd ref="403" />
+ <nd ref="400" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="0.005"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="500" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="501" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
+ <node id="502" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.1" />
+ <node id="503" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.1" />
+ <way id="500" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="500" />
+ <nd ref="501" />
+ <nd ref="502" />
+ <nd ref="503" />
+ <nd ref="500" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="0.01"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="600" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="601" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
+ <node id="602" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.5" />
+ <node id="603" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.5" />
+ <way id="600" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="600" />
+ <nd ref="601" />
+ <nd ref="602" />
+ <nd ref="603" />
+ <nd ref="600" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="0.05"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="700" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="701" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
+ <node id="702" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.1" />
+ <node id="703" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.1" />
+ <way id="700" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="700" />
+ <nd ref="701" />
+ <nd ref="702" />
+ <nd ref="703" />
+ <nd ref="700" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="0.1"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="800" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="801" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
+ <node id="802" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.5" />
+ <node id="803" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.5" />
+ <way id="800" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="800" />
+ <nd ref="801" />
+ <nd ref="802" />
+ <nd ref="803" />
+ <nd ref="800" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="0.5"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="900" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="901" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
+ <node id="902" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="1.0" />
+ <node id="903" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="1.0" />
+ <way id="900" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="900" />
+ <nd ref="901" />
+ <nd ref="902" />
+ <nd ref="903" />
+ <nd ref="900" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="1.0"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="1000" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="1001" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
+ <node id="1002" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="2.0" />
+ <node id="1003" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="2.0" />
+ <way id="1000" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="1000" />
+ <nd ref="1001" />
+ <nd ref="1002" />
+ <nd ref="1003" />
+ <nd ref="1000" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="2.0"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+ <node id="1100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
+ <node id="1101" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
+ <node id="1102" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="5.0" />
+ <node id="1103" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="5.0" />
+ <way id="1100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
+ <nd ref="1100" />
+ <nd ref="1101" />
+ <nd ref="1102" />
+ <nd ref="1103" />
+ <nd ref="1100" />
+ <tag k="test:section" v="poly-area"/>
+ <tag k="test:id" v="5.0"/>
+ <tag k="area" v="yes"/>
+ </way>
+
+</osm>
--- /dev/null
+n-main-east | POINT(1.0024481 2.0003542)
+n-main-west | POINT(1.001552 2.0002662)
+n-alley | POINT(1.0019235 2.0005463)
+n-corner | POINT(1.0019235 2.0003542)
+w-alley | LINESTRING(1.0019594 2.0003086,1.0019594 2.0005756)
+w-main | LINESTRING(1.0013435 2.0003118,1.0016759 2.0003053,1.0019594 2.0003086,1.0021255 2.0003151,1.0023699 2.0003118,1.0026078 2.0002988)
--- /dev/null
+p-N2 | POINT(1.0003904 2.0003399)
+p-S1 | POINT(1.0008104 2.0002927)
+p-N1 | POINT(1.0005321 2.0005288)
+p-S2 | POINT(1.0006398 2.0001064)
+w-north | LINESTRING(1.0001174 2.0004055,1.0004298 2.0003976,1.0006608 2.0004579,1.0010624 2.0005419)
+w-south | LINESTRING(1.0001384 2.0001903,1.0007212 2.0001982,1.0010677 2.0002192)
--- /dev/null
+<?xml version='1.0' encoding='UTF-8'?>
+<osm version='0.6' upload='false' generator='JOSM'>
+ <node id='-204' action='modify' visible='true' lat='2.000651586300547' lon='1.005746444902722'>
+ <tag k='name' v='split-road' />
+ </node>
+ <node id='-189' action='modify' visible='true' lat='2.0005350827634585' lon='1.0057490943252971' />
+ <node id='-187' action='modify' visible='true' lat='2.0005430261867043' lon='1.0058974619895207' />
+ <node id='-185' action='modify' visible='true' lat='2.0005430261867043' lon='1.0060511284988949' />
+ <node id='-183' action='modify' visible='true' lat='2.0005324349557014' lon='1.0061306111761574' />
+ <node id='-174' action='modify' visible='true' lat='2.0005244915324045' lon='1.00551064629351' />
+ <node id='-172' action='modify' visible='true' lat='2.0005668564562127' lon='1.0056086749288005' />
+ <node id='-170' action='modify' visible='true' lat='2.000561560840796' lon='1.0056855081834875' />
+ <node id='-168' action='modify' visible='true' lat='2.0005006612622807' lon='1.0057358472124205' />
+ <node id='-166' action='modify' visible='true' lat='2.000505956877894' lon='1.0059107091023978' />
+ <node id='-164' action='modify' visible='true' lat='2.0005112524934896' lon='1.0060537779214704' />
+ <node id='-162' action='modify' visible='true' lat='2.0004953656466506' lon='1.006135910021308' />
+ <node id='-160' action='modify' visible='true' lat='2.000513900301281' lon='1.0062842776855314' />
+ <node id='-158' action='modify' visible='true' lat='2.000471535376104' lon='1.0063717086305204' />
+ <node id='-156' action='modify' visible='true' lat='2.000296780048186' lon='1.0063584615176433' />
+ <node id='-154' action='modify' visible='true' lat='2.000262358542008' lon='1.006281628262956' />
+ <node id='-152' action='modify' visible='true' lat='2.000339144977876' lon='1.0061994961631182' />
+ <node id='-150' action='modify' visible='true' lat='2.0003576796342712' lon='1.0061041169504032' />
+ <node id='-148' action='modify' visible='true' lat='2.0003497362101275' lon='1.0060034388925374' />
+ <node id='-146' action='modify' visible='true' lat='2.000325905937466' lon='1.0058868642992191' />
+ <node id='-144' action='modify' visible='true' lat='2.000280893199271' lon='1.0057941345090795' />
+ <node id='-142' action='modify' visible='true' lat='2.0002782453911037' lon='1.0056351691545544' />
+ <node id='-141' action='modify' visible='true' lat='2.0003603274423103' lon='1.005470904954879' />
+ <node id='-139' action='modify' visible='true' lat='2.0007265421773432' lon='1.0047054313390744'>
+ <tag k='name' v='points-on-road' />
+ </node>
+ <node id='-112' action='modify' visible='true' lat='2.0005155253763816' lon='1.0049220138358423' />
+ <node id='-110' action='modify' visible='true' lat='2.0005155253763816' lon='1.0049220138358423'>
+ <tag k='test:id' v='n-N-unglued' />
+ <tag k='test:section' v='points-on-roads' />
+ </node>
+ <node id='-108' action='modify' visible='true' lat='2.0002948553437463' lon='1.0046258759080025'>
+ <tag k='test:id' v='n-S-unglued' />
+ <tag k='test:section' v='points-on-roads' />
+ </node>
+ <node id='-106' action='modify' visible='true' lat='2.0002948553437463' lon='1.0046258759080025' />
+ <node id='-104' action='modify' visible='true' lat='2.0006117740392657' lon='1.0050661381425037'>
+ <tag k='test:id' v='n-NE' />
+ <tag k='test:section' v='points-on-roads' />
+ </node>
+ <node id='-102' action='modify' visible='true' lat='2.000334854794143' lon='1.0051338707939657'>
+ <tag k='test:id' v='n-SE' />
+ <tag k='test:section' v='points-on-roads' />
+ </node>
+ <node id='-100' action='modify' visible='true' lat='2.0006240815601832' lon='1.0053155174501582' />
+ <node id='-98' action='modify' visible='true' lat='2.0004086999307416' lon='1.0047582624540412'>
+ <tag k='test:id' v='n-NW' />
+ <tag k='test:section' v='points-on-roads' />
+ </node>
+ <node id='-96' action='modify' visible='true' lat='2.0003840848855767' lon='1.0046258759080025' />
+ <node id='-94' action='modify' visible='true' lat='2.0004302380949586' lon='1.004499646875733' />
+ <node id='-92' action='modify' visible='true' lat='2.0003687004821606' lon='1.0053278324776966' />
+ <node id='-90' action='modify' visible='true' lat='2.0002640865351053' lon='1.0049368303533495' />
+ <node id='-88' action='modify' visible='true' lat='2.000291778462916' lon='1.004825995105503' />
+ <node id='-86' action='modify' visible='true' lat='2.0003563929593238' lon='1.004727474885195'>
+ <tag k='test:id' v='n-SW' />
+ <tag k='test:section' v='points-on-roads' />
+ </node>
+ <node id='-84' action='modify' visible='true' lat='2.0002240870829975' lon='1.00452427693081' />
+ <node id='-82' action='modify' visible='true' lat='2.000715618411992' lon='1.0018322734314236'>
+ <tag k='name' v='road-with-alley' />
+ </node>
+ <node id='-80' action='modify' visible='true' lat='2.0007286441385155' lon='1.0004669962874884'>
+ <tag k='name' v='roads-with-pois' />
+ </node>
+ <node id='-78' action='modify' visible='true' lat='2.000354154459697' lon='1.002448114434296'>
+ <tag k='test:id' v='n-main-east' />
+ <tag k='test:section' v='road-with-alley' />
+ </node>
+ <node id='-76' action='modify' visible='true' lat='2.000266230783582' lon='1.0015520494830263'>
+ <tag k='test:id' v='n-main-west' />
+ <tag k='test:section' v='road-with-alley' />
+ </node>
+ <node id='-74' action='modify' visible='true' lat='2.000546283957771' lon='1.0019235091355527'>
+ <tag k='test:id' v='n-alley' />
+ <tag k='test:section' v='road-with-alley' />
+ </node>
+ <node id='-72' action='modify' visible='true' lat='2.000354154459697' lon='1.0019235091355527'>
+ <tag k='test:id' v='n-corner' />
+ <tag k='test:section' v='road-with-alley' />
+ </node>
+ <node id='-70' action='modify' visible='true' lat='2.0005755918453296' lon='1.0019593517336036' />
+ <node id='-68' action='modify' visible='true' lat='2.000298795108618' lon='1.0026077769165225' />
+ <node id='-66' action='modify' visible='true' lat='2.000311820838452' lon='1.0023699124021854' />
+ <node id='-64' action='modify' visible='true' lat='2.0003150772708946' lon='1.0021255310518389' />
+ <node id='-62' action='modify' visible='true' lat='2.0003085644060037' lon='1.0019593517336036' />
+ <node id='-60' action='modify' visible='true' lat='2.000305307973548' lon='1.0016758693672019' />
+ <node id='-58' action='modify' visible='true' lat='2.000311820838452' lon='1.0013435107307307' />
+ <node id='-56' action='modify' visible='true' lat='2.0004054696330322' lon='1.0001173628501097'>
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-54' action='modify' visible='true' lat='2.000397598928471' lon='1.0004297578433892'>
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-52' action='modify' visible='true' lat='2.0004579409958114' lon='1.0006607726283354'>
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-50' action='modify' visible='true' lat='2.0005418951727663' lon='1.0010624233339804'>
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-48' action='modify' visible='true' lat='2.000190337028091' lon='1.0001383641941959'>
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-46' action='modify' visible='true' lat='2.000198207733647' lon='1.000721151492583'>
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-44' action='modify' visible='true' lat='2.000219196281612' lon='1.0010676736700022'>
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-42' action='modify' visible='true' lat='2.000339880427198' lon='1.000390380323228'>
+ <tag k='test:id' v='p-N2' />
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-40' action='modify' visible='true' lat='2.000292656197374' lon='1.0008104072049482'>
+ <tag k='test:id' v='p-S1' />
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-38' action='modify' visible='true' lat='2.0005287773329004' lon='1.0005321393958087'>
+ <tag k='test:id' v='p-N1' />
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-36' action='modify' visible='true' lat='2.000106382833144' lon='1.0006397712842492'>
+ <tag k='test:id' v='p-S2' />
+ <tag k='test:section' v='roads-with-pois' />
+ </node>
+ <node id='-34' action='modify' visible='true' lat='2.0007211692424525' lon='1.0035828658185688'>
+ <tag k='name' v='building-on-street-corner' />
+ </node>
+ <node id='-32' action='modify' visible='true' lat='2.000231635335803' lon='1.0031759205058477' />
+ <node id='-30' action='modify' visible='true' lat='2.000221087674047' lon='1.0040360790429201' />
+ <node id='-28' action='modify' visible='true' lat='2.0002263615049336' lon='1.0042735461237067' />
+ <node id='-26' action='modify' visible='true' lat='2.000105063390253' lon='1.0040413560891597' />
+ <node id='-24' action='modify' visible='true' lat='2.0006377202618473' lon='1.0040363991494512' />
+ <node id='-22' action='modify' visible='true' lat='2.0004388569487612' lon='1.0039042871025967' />
+ <node id='-20' action='modify' visible='true' lat='2.0004385547672516' lon='1.0040022536164286' />
+ <node id='-18' action='modify' visible='true' lat='2.0002343878022306' lon='1.0040016230872442' />
+ <node id='-16' action='modify' visible='true' lat='2.000234689983778' lon='1.0039036565734125' />
+ <node id='-14' action='modify' visible='true' lat='2.0003240388594246' lon='1.0040018999567464'>
+ <tag k='test:id' v='n-edge-NS' />
+ <tag k='test:section' v='building-on-street-corner' />
+ </node>
+ <node id='-12' action='modify' visible='true' lat='2.000354798021768' lon='1.0039384858598128'>
+ <tag k='test:id' v='n-inner' />
+ <tag k='test:section' v='building-on-street-corner' />
+ </node>
+ <node id='-10' action='modify' visible='true' lat='2.0004676051467527' lon='1.0039478144477645'>
+ <tag k='test:id' v='n-outer' />
+ <tag k='test:section' v='building-on-street-corner' />
+ </node>
+ <node id='-8' action='modify' visible='true' lat='2.000234516370527' lon='1.0039599415620857'>
+ <tag k='test:id' v='n-edge-WE' />
+ <tag k='test:section' v='building-on-street-corner' />
+ </node>
+ <node id='100000' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='2.0' lon='1.0' />
+ <node id='100001' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='3.0' lon='1.0' />
+ <node id='100002' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='3.0' lon='2.0' />
+ <node id='100003' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='2.0' lon='2.0' />
+ <way id='-195' action='modify' visible='true'>
+ <nd ref='-170' />
+ <nd ref='-172' />
+ <nd ref='-174' />
+ <tag k='test:id' v='w-5' />
+ <tag k='test:section' v='split-road' />
+ </way>
+ <way id='-194' action='modify' visible='true'>
+ <nd ref='-160' />
+ <nd ref='-162' />
+ <nd ref='-164' />
+ <nd ref='-166' />
+ <nd ref='-168' />
+ <nd ref='-170' />
+ <tag k='test:id' v='w-4a' />
+ <tag k='test:section' v='split-road' />
+ </way>
+ <way id='-193' action='modify' visible='true'>
+ <nd ref='-152' />
+ <nd ref='-154' />
+ <nd ref='-156' />
+ <nd ref='-158' />
+ <nd ref='-160' />
+ <tag k='test:id' v='w-3' />
+ <tag k='test:section' v='split-road' />
+ </way>
+ <way id='-192' action='modify' visible='true'>
+ <nd ref='-144' />
+ <nd ref='-146' />
+ <nd ref='-148' />
+ <nd ref='-150' />
+ <nd ref='-152' />
+ <tag k='test:id' v='w-2' />
+ <tag k='test:section' v='split-road' />
+ </way>
+ <way id='-184' action='modify' visible='true'>
+ <nd ref='-160' />
+ <nd ref='-183' />
+ <nd ref='-185' />
+ <nd ref='-187' />
+ <nd ref='-189' />
+ <nd ref='-170' />
+ <tag k='test:id' v='w-4b' />
+ <tag k='test:section' v='split-road' />
+ </way>
+ <way id='-143' action='modify' visible='true'>
+ <nd ref='-141' />
+ <nd ref='-142' />
+ <nd ref='-144' />
+ <tag k='test:id' v='w-1' />
+ <tag k='test:section' v='split-road' />
+ </way>
+ <way id='-134' action='modify' visible='true'>
+ <nd ref='-94' />
+ <nd ref='-96' />
+ <nd ref='-98' />
+ <nd ref='-112' />
+ <nd ref='-104' />
+ <nd ref='-100' />
+ <tag k='test:id' v='w-north' />
+ <tag k='test:section' v='points-on-roads' />
+ </way>
+ <way id='-132' action='modify' visible='true'>
+ <nd ref='-84' />
+ <nd ref='-106' />
+ <nd ref='-86' />
+ <nd ref='-88' />
+ <nd ref='-90' />
+ <nd ref='-102' />
+ <nd ref='-92' />
+ <tag k='test:id' v='w-south' />
+ <tag k='test:section' v='points-on-roads' />
+ </way>
+ <way id='-130' action='modify' visible='true'>
+ <nd ref='-62' />
+ <nd ref='-70' />
+ <tag k='test:id' v='w-alley' />
+ <tag k='test:section' v='road-with-alley' />
+ </way>
+ <way id='-128' action='modify' visible='true'>
+ <nd ref='-58' />
+ <nd ref='-60' />
+ <nd ref='-62' />
+ <nd ref='-64' />
+ <nd ref='-66' />
+ <nd ref='-68' />
+ <tag k='test:id' v='w-main' />
+ <tag k='test:section' v='road-with-alley' />
+ </way>
+ <way id='-126' action='modify' visible='true'>
+ <nd ref='-56' />
+ <nd ref='-54' />
+ <nd ref='-52' />
+ <nd ref='-50' />
+ <tag k='test:id' v='w-north' />
+ <tag k='test:section' v='roads-with-pois' />
+ </way>
+ <way id='-124' action='modify' visible='true'>
+ <nd ref='-48' />
+ <nd ref='-46' />
+ <nd ref='-44' />
+ <tag k='test:id' v='w-south' />
+ <tag k='test:section' v='roads-with-pois' />
+ </way>
+ <way id='-122' action='modify' visible='true'>
+ <nd ref='-32' />
+ <nd ref='-30' />
+ <nd ref='-28' />
+ <tag k='test:id' v='w-WE' />
+ <tag k='test:section' v='building-on-street-corner' />
+ </way>
+ <way id='-120' action='modify' visible='true'>
+ <nd ref='-26' />
+ <nd ref='-30' />
+ <nd ref='-24' />
+ <tag k='test:id' v='w-NS' />
+ <tag k='test:section' v='building-on-street-corner' />
+ </way>
+ <way id='-118' action='modify' visible='true'>
+ <nd ref='-22' />
+ <nd ref='-20' />
+ <nd ref='-14' />
+ <nd ref='-18' />
+ <nd ref='-8' />
+ <nd ref='-16' />
+ <nd ref='-22' />
+ <tag k='area' v='yes' />
+ <tag k='test:id' v='w-building' />
+ <tag k='test:section' v='building-on-street-corner' />
+ </way>
+ <way id='100000' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1'>
+ <nd ref='100000' />
+ <nd ref='100001' />
+ <nd ref='100002' />
+ <nd ref='100003' />
+ <nd ref='100000' />
+ <tag k='note' v='test area, do not leave' />
+ </way>
+</osm>
--- /dev/null
+w-5 | LINESTRING(1.0056855 2.0005616,1.0056087 2.0005669,1.0055106 2.0005245)
+w-4a | LINESTRING(1.0062843 2.0005139,1.0061359 2.0004954,1.0060538 2.0005113,1.0059107 2.000506,1.0057358 2.0005007,1.0056855 2.0005616)
+w-3 | LINESTRING(1.0061995 2.0003391,1.0062816 2.0002624,1.0063585 2.0002968,1.0063717 2.0004715,1.0062843 2.0005139)
+w-2 | LINESTRING(1.0057941 2.0002809,1.0058869 2.0003259,1.0060034 2.0003497,1.0061041 2.0003577,1.0061995 2.0003391)
+w-4b | LINESTRING(1.0062843 2.0005139,1.0061306 2.0005324,1.0060511 2.000543,1.0058975 2.000543,1.0057491 2.0005351,1.0056855 2.0005616)
+w-1 | LINESTRING(1.0054709 2.0003603,1.0056352 2.0002782,1.0057941 2.0002809)
--- /dev/null
+""" Steps for checking the results of queries.
+"""
+
+from nose.tools import *
+from lettuce import *
+from tidylib import tidy_document
+from collections import OrderedDict
+import json
+import logging
+import re
+from xml.dom.minidom import parseString
+
+logger = logging.getLogger(__name__)
+
+def _parse_xml():
+ """ Puts the DOM structure into more convenient python
+ with a similar structure as the json document, so
+ that the same the semantics can be used. It does not
+ check if the content is valid (or at least not more than
+ necessary to transform it into a dict structure).
+ """
+ page = parseString(world.page).documentElement
+
+ # header info
+ world.result_header = OrderedDict(page.attributes.items())
+ logger.debug('Result header: %r' % (world.result_header))
+ world.results = []
+
+ # results
+ if page.nodeName == 'searchresults':
+ for node in page.childNodes:
+ if node.nodeName != "#text":
+ assert_equals(node.nodeName, 'place', msg="Unexpected element '%s'" % node.nodeName)
+ newresult = OrderedDict(node.attributes.items())
+ assert_not_in('address', newresult)
+ assert_not_in('geokml', newresult)
+ address = OrderedDict()
+ for sub in node.childNodes:
+ if sub.nodeName == 'geokml':
+ newresult['geokml'] = sub.childNodes[0].toxml()
+ elif sub.nodeName == '#text':
+ pass
+ else:
+ address[sub.nodeName] = sub.firstChild.nodeValue.strip()
+ if address:
+ newresult['address'] = address
+ world.results.append(newresult)
+ elif page.nodeName == 'reversegeocode':
+ haserror = False
+ address = {}
+ for node in page.childNodes:
+ if node.nodeName == 'result':
+ assert_equals(len(world.results), 0)
+ assert (not haserror)
+ world.results.append(OrderedDict(node.attributes.items()))
+ assert_not_in('display_name', world.results[0])
+ assert_not_in('address', world.results[0])
+ world.results[0]['display_name'] = node.firstChild.nodeValue.strip()
+ elif node.nodeName == 'error':
+ assert_equals(len(world.results), 0)
+ haserror = True
+ elif node.nodeName == 'addressparts':
+ assert (not haserror)
+ address = OrderedDict()
+ for sub in node.childNodes:
+ address[sub.nodeName] = sub.firstChild.nodeValue.strip()
+ world.results[0]['address'] = address
+ elif node.nodeName == "#text":
+ pass
+ else:
+ assert False, "Unknown content '%s' in XML" % node.nodeName
+ else:
+ assert False, "Unknown document node name %s in XML" % page.nodeName
+
+ logger.debug("The following was parsed out of XML:")
+ logger.debug(world.results)
+
+@step(u'a HTTP (\d+) is returned')
+def api_result_http_error(step, error):
+ assert_equals(world.returncode, int(error))
+
+@step(u'the result is valid( \w+)?')
+def api_result_is_valid(step, fmt):
+ assert_equals(world.returncode, 200)
+
+ if world.response_format == 'html':
+ document, errors = tidy_document(world.page,
+ options={'char-encoding' : 'utf8'})
+ assert(len(errors) == 0), "Errors found in HTML document:\n%s" % errors
+ world.results = document
+ elif world.response_format == 'xml':
+ _parse_xml()
+ elif world.response_format == 'json':
+ world.results = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(world.page)
+ else:
+ assert False, "Unknown page format: %s" % (world.response_format)
+
+ if fmt:
+ assert_equals (fmt.strip(), world.response_format)
+
+
+def compare(operator, op1, op2):
+ if operator == 'less than':
+ return op1 < op2
+ elif operator == 'more than':
+ return op1 > op2
+ elif operator == 'exactly':
+ return op1 == op2
+ elif operator == 'at least':
+ return op1 >= op2
+ elif operator == 'at most':
+ return op1 <= op2
+ else:
+ raise Exception("unknown operator '%s'" % operator)
+
+@step(u'(less than|more than|exactly|at least|at most) (\d+) results? (?:is|are) returned')
+def validate_result_number(step, operator, number):
+ step.given('the result is valid')
+ numres = len(world.results)
+ assert compare(operator, numres, int(number)), \
+ "Bad number of results: expected %s %s, got %d." % (operator, number, numres)
+
+@step(u'result (\d+) has( not)? attributes (\S+)')
+def search_check_for_result_attribute(step, num, invalid, attrs):
+ num = int(num)
+ step.given('at least %d results are returned' % (num + 1))
+ res = world.results[num]
+ for attr in attrs.split(','):
+ if invalid:
+ assert_not_in(attr.strip(), res)
+ else:
+ assert_in(attr.strip(),res)
+
+@step(u'there is a json wrapper "([^"]*)"')
+def api_result_check_json_wrapper(step, wrapper):
+ step.given('the result is valid json')
+ assert_equals(world.json_callback, wrapper)
+
+@step(u'result header contains')
+def api_result_header_contains(step):
+ step.given('the result is valid')
+ for line in step.hashes:
+ assert_in(line['attr'], world.result_header)
+ m = re.match("%s$" % (line['value'],), world.result_header[line['attr']])
+
+@step(u'results contain$')
+def api_result_contains(step):
+ step.given('at least 1 result is returned')
+ for line in step.hashes:
+ if 'ID' in line:
+ reslist = (world.results[int(line['ID'])],)
+ else:
+ reslist = world.results
+ for k,v in line.iteritems():
+ if k == 'latlon':
+ for curres in reslist:
+ world.match_geometry((float(curres['lat']), float(curres['lon'])), v)
+ elif k != 'ID':
+ for curres in reslist:
+ assert_in(k, curres)
+ if v[0] in '<>=':
+ # mathematical operation
+ evalexp = '%s %s' % (curres[k], v)
+ res = eval(evalexp)
+ logger.debug('Evaluating: %s = %s' % (res, evalexp))
+ assert_true(res, "Evaluation failed: %s" % (evalexp, ))
+ else:
+ # regex match
+ m = re.match("%s$" % (v,), curres[k])
+ assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k]))
+
+
+@step(u'result addresses contain$')
+def api_result_address_contains(step):
+ step.given('the result is valid')
+ for line in step.hashes:
+ if 'ID' in line:
+ reslist = (world.results[int(line['ID'])],)
+ else:
+ reslist = world.results
+ for k,v in line.iteritems():
+ if k != 'ID':
+ for res in reslist:
+ curres = res['address']
+ assert_in(k, curres)
+ m = re.match("%s$" % (v,), curres[k])
+ assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k]))
+
+
+@step(u'address of result (\d+) contains')
+def api_result_address_exact(step, resid):
+ resid = int(resid)
+ step.given('at least %d results are returned' % (resid + 1))
+ addr = world.results[resid]['address']
+ for line in step.hashes:
+ assert_in(line['type'], addr)
+ assert_equals(line['value'], addr[line['type']])
+
+@step(u'address of result (\d+) does not contain (.*)')
+def api_result_address_details_missing(step, resid, types):
+ resid = int(resid)
+ step.given('at least %d results are returned' % (resid + 1))
+ addr = world.results[resid]['address']
+ for t in types.split(','):
+ assert_not_in(t.strip(), addr)
+
+
+@step(u'address of result (\d+) is')
+def api_result_address_exact(step, resid):
+ resid = int(resid)
+ step.given('at least %d results are returned' % (resid + 1))
+ result = world.results[resid]
+ linenr = 0
+ assert_equals(len(step.hashes), len(result['address']))
+ for k,v in result['address'].iteritems():
+ assert_equals(step.hashes[linenr]['type'], k)
+ assert_equals(step.hashes[linenr]['value'], v)
+ linenr += 1
+
+
+@step('there are( no)? duplicates')
+def api_result_check_for_duplicates(step, nodups=None):
+ step.given('at least 1 result is returned')
+ resarr = []
+ for res in world.results:
+ resarr.append((res['osm_type'], res['class'],
+ res['type'], res['display_name']))
+
+ if nodups is None:
+ assert len(resarr) > len(set(resarr))
+ else:
+ assert_equal(len(resarr), len(set(resarr)))
--- /dev/null
+""" Steps for setting up and sending API requests.
+"""
+
+from nose.tools import *
+from lettuce import *
+import urllib
+import urllib2
+import logging
+
+logger = logging.getLogger(__name__)
+
+def api_call(requesttype):
+ world.json_callback = None
+ data = urllib.urlencode(world.params)
+ url = "%s/%s?%s" % (world.config.base_url, requesttype, data)
+ req = urllib2.Request(url=url, headers=world.header)
+ try:
+ fd = urllib2.urlopen(req)
+ world.page = fd.read()
+ world.returncode = 200
+ except urllib2.HTTPError, ex:
+ world.returncode = ex.code
+ world.page = None
+ return
+
+ pageinfo = fd.info()
+ assert_equal('utf-8', pageinfo.getparam('charset').lower())
+ pagetype = pageinfo.gettype()
+
+ fmt = world.params.get('format')
+ if fmt == 'html':
+ assert_equals('text/html', pagetype)
+ world.response_format = fmt
+ elif fmt == 'xml':
+ assert_equals('text/xml', pagetype)
+ world.response_format = fmt
+ elif fmt in ('json', 'jsonv2'):
+ if 'json_callback' in world.params:
+ world.json_callback = world.params['json_callback']
+ assert world.page.startswith(world.json_callback + '(')
+ assert world.page.endswith(')')
+ world.page = world.page[(len(world.json_callback)+1):-1]
+ assert_equals('application/javascript', pagetype)
+ else:
+ assert_equals('application/json', pagetype)
+ world.response_format = 'json'
+ else:
+ if requesttype == 'reverse':
+ assert_equals('text/xml', pagetype)
+ world.response_format = 'xml'
+ else:
+ assert_equals('text/html', pagetype)
+ world.response_format = 'html'
+ logger.debug("Page received (%s):" % world.response_format)
+ logger.debug(world.page)
+
+ api_setup_prepare_params(None)
+
+@before.each_scenario
+def api_setup_prepare_params(scenario):
+ world.results = []
+ world.params = {}
+ world.header = {}
+
+@step(u'the request parameters$')
+def api_setup_parameters(step):
+ """Define the parameters of the request as a hash.
+ Resets parameter list.
+ """
+ world.params = step.hashes[0]
+
+@step(u'the HTTP header$')
+def api_setup_parameters(step):
+ """Define additional HTTP header parameters as a hash.
+ Resets parameter list.
+ """
+ world.header = step.hashes[0]
+
+
+@step(u'sending( \w+)? search query "([^"]*)"( with address)?')
+def api_setup_search(step, fmt, query, doaddr):
+ world.params['q'] = query.encode('utf8')
+ if doaddr:
+ world.params['addressdetails'] = 1
+ if fmt:
+ world.params['format'] = fmt.strip()
+ api_call('search')
+
+@step(u'sending( \w+)? structured query( with address)?$')
+def api_setup_structured_search(step, fmt, doaddr):
+ world.params.update(step.hashes[0])
+ if doaddr:
+ world.params['addressdetails'] = 1
+ if fmt:
+ world.params['format'] = fmt.strip()
+ api_call('search')
+
+@step(u'looking up (\w+ )?coordinates ([-\d.]+),([-\d.]+)')
+def api_setup_reverse(step, fmt, lat, lon):
+ world.params['lat'] = lat
+ world.params['lon'] = lon
+ if fmt and fmt.strip():
+ world.params['format'] = fmt.strip()
+ api_call('reverse')
+
+@step(u'looking up details for ([NRW]?\d+)')
+def api_setup_details(step, obj):
+ if obj[0] in ('N', 'R', 'W'):
+ # an osm id
+ world.params['osmtype'] = obj[0]
+ world.params['osmid'] = obj[1:]
+ else:
+ world.params['place_id'] = obj
+ api_call('details')
--- /dev/null
+""" Steps for checking the DB after import and update tests.
+
+ There are two groups of test here. The first group tests
+ the contents of db tables directly, the second checks
+ query results by using the command line query tool.
+"""
+
+from nose.tools import *
+from lettuce import *
+import psycopg2
+import psycopg2.extensions
+import psycopg2.extras
+import os
+import subprocess
+import random
+import json
+import re
+import logging
+from collections import OrderedDict
+
+logger = logging.getLogger(__name__)
+
+@step(u'table placex contains as names for (N|R|W)(\d+)')
+def check_placex_names(step, osmtyp, osmid):
+ """ Check for the exact content of the name hstaore in placex.
+ """
+ cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
+ cur.execute('SELECT name FROM placex where osm_type = %s and osm_id =%s', (osmtyp, int(osmid)))
+ for line in cur:
+ names = dict(line['name'])
+ for name in step.hashes:
+ assert_in(name['k'], names)
+ assert_equals(names[name['k']], name['v'])
+ del names[name['k']]
+ assert_equals(len(names), 0)
+
+
+
+@step(u'table ([a-z_]+) contains$')
+def check_placex_content(step, tablename):
+ """ check that the given lines are in the given table
+ Entries are searched by osm_type/osm_id and then all
+ given columns are tested. If there is more than one
+ line for an OSM object, they must match in these columns.
+ """
+ cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
+ for line in step.hashes:
+ osmtype, osmid, cls = world.split_id(line['object'])
+ q = 'SELECT *'
+ if tablename == 'placex':
+ q = q + ", ST_X(centroid) as clat, ST_Y(centroid) as clon"
+ q = q + ' FROM %s where osm_type = %%s and osm_id = %%s' % (tablename,)
+ if cls is None:
+ params = (osmtype, osmid)
+ else:
+ q = q + ' and class = %s'
+ params = (osmtype, osmid, cls)
+ cur.execute(q, params)
+ assert(cur.rowcount > 0)
+ for res in cur:
+ for k,v in line.iteritems():
+ if not k == 'object':
+ assert_in(k, res)
+ if type(res[k]) is dict:
+ val = world.make_hash(v)
+ assert_equals(res[k], val)
+ elif k in ('parent_place_id', 'linked_place_id'):
+ pid = world.get_placeid(v)
+ assert_equals(pid, res[k], "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, pid, res[k]))
+ elif k == 'centroid':
+ world.match_geometry((res['clat'], res['clon']), v)
+ else:
+ assert_equals(str(res[k]), v, "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, str(res[k]), v))
+
+@step(u'table (placex?) has no entry for (N|R|W)(\d+)(:\w+)?')
+def check_placex_missing(step, tablename, osmtyp, osmid, placeclass):
+ cur = world.conn.cursor()
+ q = 'SELECT count(*) FROM %s where osm_type = %%s and osm_id = %%s' % (tablename, )
+ args = [osmtyp, int(osmid)]
+ if placeclass is not None:
+ q = q + ' and class = %s'
+ args.append(placeclass[1:])
+ cur.execute(q, args)
+ numres = cur.fetchone()[0]
+ assert_equals (numres, 0)
+
+@step(u'search_name table contains$')
+def check_search_name_content(step):
+ cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
+ for line in step.hashes:
+ placeid = world.get_placeid(line['place_id'])
+ cur.execute('SELECT * FROM search_name WHERE place_id = %s', (placeid,))
+ assert(cur.rowcount > 0)
+ for res in cur:
+ for k,v in line.iteritems():
+ if k in ('search_rank', 'address_rank'):
+ assert_equals(int(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
+ elif k in ('importance'):
+ assert_equals(float(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
+ elif k in ('name_vector', 'nameaddress_vector'):
+ terms = [x.strip().replace('#', ' ') for x in v.split(',')]
+ cur.execute('SELECT word_id, word_token FROM word, (SELECT unnest(%s) as term) t WHERE word_token = make_standard_name(t.term)', (terms,))
+ assert cur.rowcount >= len(terms)
+ for wid in cur:
+ assert_in(wid['word_id'], res[k], "Missing term for %s/%s: %s" % (line['place_id'], k, wid['word_token']))
+ elif k in ('country_code'):
+ assert_equals(v, res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
+ elif k == 'place_id':
+ pass
+ else:
+ raise Exception("Cannot handle field %s in search_name table" % (k, ))
+
+
+@step(u'table search_name has no entry for (.*)')
+def check_placex_missing(step, osmid):
+ """ Checks if there is an entry in the search index for the
+ given place object.
+ """
+ cur = world.conn.cursor()
+ placeid = world.get_placeid(osmid)
+ cur.execute('SELECT count(*) FROM search_name WHERE place_id =%s', (placeid,))
+ numres = cur.fetchone()[0]
+ assert_equals (numres, 0)
+
--- /dev/null
+""" Steps for setting up a test database with imports and updates.
+
+ There are two ways to state geometries for test data: with coordinates
+ and via scenes.
+
+ Coordinates should be given as a wkt without the enclosing type name.
+
+ Scenes are prepared geometries which can be found in the scenes/data/
+ directory. Each scene is saved in a .wkt file with its name, which
+ contains a list of id/wkt pairs. A scene can be set globally
+ for a scene by using the step `the scene <scene name>`. Then each
+ object should be refered to as `:<object id>`. A geometry can also
+ be referred to without loading the scene by explicitly stating the
+ scene: `<scene name>:<object id>`.
+"""
+
+from nose.tools import *
+from lettuce import *
+import psycopg2
+import psycopg2.extensions
+import psycopg2.extras
+import os
+import subprocess
+import random
+import base64
+
+psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
+
+@before.each_scenario
+def setup_test_database(scenario):
+ """ Creates a new test database from the template database
+ that was set up earlier in terrain.py. Will be done only
+ for scenarios whose feature is tagged with 'DB'.
+ """
+ if scenario.feature.tags is not None and 'DB' in scenario.feature.tags:
+ world.db_template_setup()
+ world.write_nominatim_config(world.config.test_db)
+ conn = psycopg2.connect(database=world.config.template_db)
+ conn.set_isolation_level(0)
+ cur = conn.cursor()
+ cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, ))
+ cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db))
+ conn.close()
+ world.conn = psycopg2.connect(database=world.config.test_db)
+ psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True)
+
+@step('a wiped database')
+def db_setup_wipe_db(step):
+ """Explicit DB scenario setup only needed
+ to work around a bug where scenario outlines don't call
+ before_each_scenario correctly.
+ """
+ if hasattr(world, 'conn'):
+ world.conn.close()
+ conn = psycopg2.connect(database=world.config.template_db)
+ conn.set_isolation_level(0)
+ cur = conn.cursor()
+ cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, ))
+ cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db))
+ conn.close()
+ world.conn = psycopg2.connect(database=world.config.test_db)
+ psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True)
+
+
+@after.each_scenario
+def tear_down_test_database(scenario):
+ """ Drops any previously created test database.
+ """
+ if hasattr(world, 'conn'):
+ world.conn.close()
+ if scenario.feature.tags is not None and 'DB' in scenario.feature.tags and not world.config.keep_scenario_db:
+ conn = psycopg2.connect(database=world.config.template_db)
+ conn.set_isolation_level(0)
+ cur = conn.cursor()
+ cur.execute('DROP DATABASE %s' % (world.config.test_db,))
+ conn.close()
+
+
+def _format_placex_cols(cols, geomtype, force_name):
+ if 'name' in cols:
+ if cols['name'].startswith("'"):
+ cols['name'] = world.make_hash(cols['name'])
+ else:
+ cols['name'] = { 'name' : cols['name'] }
+ elif force_name:
+ cols['name'] = { 'name' : base64.urlsafe_b64encode(os.urandom(int(random.random()*30))) }
+ if 'extratags' in cols:
+ cols['extratags'] = world.make_hash(cols['extratags'])
+ if 'admin_level' not in cols:
+ cols['admin_level'] = 100
+ if 'geometry' in cols:
+ coords = world.get_scene_geometry(cols['geometry'])
+ if coords is None:
+ coords = "'%s(%s)'::geometry" % (geomtype, cols['geometry'])
+ else:
+ coords = "'%s'::geometry" % coords.wkt
+ cols['geometry'] = coords
+
+
+def _insert_place_table_nodes(places, force_name):
+ cur = world.conn.cursor()
+ for line in places:
+ cols = dict(line)
+ cols['osm_type'] = 'N'
+ _format_placex_cols(cols, 'POINT', force_name)
+ if 'geometry' in cols:
+ coords = cols.pop('geometry')
+ else:
+ coords = "ST_Point(%f, %f)" % (random.random()*360 - 180, random.random()*180 - 90)
+
+ query = 'INSERT INTO place (%s,geometry) values(%s, ST_SetSRID(%s, 4326))' % (
+ ','.join(cols.iterkeys()),
+ ','.join(['%s' for x in range(len(cols))]),
+ coords
+ )
+ cur.execute(query, cols.values())
+ world.conn.commit()
+
+
+def _insert_place_table_objects(places, geomtype, force_name):
+ cur = world.conn.cursor()
+ for line in places:
+ cols = dict(line)
+ if 'osm_type' not in cols:
+ cols['osm_type'] = 'W'
+ _format_placex_cols(cols, geomtype, force_name)
+ coords = cols.pop('geometry')
+
+ query = 'INSERT INTO place (%s, geometry) values(%s, ST_SetSRID(%s, 4326))' % (
+ ','.join(cols.iterkeys()),
+ ','.join(['%s' for x in range(len(cols))]),
+ coords
+ )
+ cur.execute(query, cols.values())
+ world.conn.commit()
+
+@step(u'the scene (.*)')
+def import_set_scene(step, scene):
+ world.load_scene(scene)
+
+@step(u'the (named )?place (node|way|area)s')
+def import_place_table_nodes(step, named, osmtype):
+ """Insert a list of nodes into the placex table.
+ Expects a table where columns are named in the same way as placex.
+ """
+ cur = world.conn.cursor()
+ cur.execute('ALTER TABLE place DISABLE TRIGGER place_before_insert')
+ if osmtype == 'node':
+ _insert_place_table_nodes(step.hashes, named is not None)
+ elif osmtype == 'way' :
+ _insert_place_table_objects(step.hashes, 'LINESTRING', named is not None)
+ elif osmtype == 'area' :
+ _insert_place_table_objects(step.hashes, 'POLYGON', named is not None)
+ cur.execute('ALTER TABLE place ENABLE TRIGGER place_before_insert')
+ cur.close()
+ world.conn.commit()
+
+
+@step(u'the relations')
+def import_fill_planet_osm_rels(step):
+ """Adds a raw relation to the osm2pgsql table.
+ Three columns need to be suplied: id, tags, members.
+ """
+ cur = world.conn.cursor()
+ for line in step.hashes:
+ members = []
+ parts = { 'n' : [], 'w' : [], 'r' : [] }
+ if line['members'].strip():
+ for mem in line['members'].split(','):
+ memparts = mem.strip().split(':', 2)
+ memid = memparts[0].lower()
+ parts[memid[0]].append(int(memid[1:]))
+ members.append(memid)
+ if len(memparts) == 2:
+ members.append(memparts[1])
+ else:
+ members.append('')
+ tags = []
+ for k,v in world.make_hash(line['tags']).iteritems():
+ tags.extend((k,v))
+ if not members:
+ members = None
+
+ cur.execute("""INSERT INTO planet_osm_rels
+ (id, way_off, rel_off, parts, members, tags, pending)
+ VALUES (%s, %s, %s, %s, %s, %s, false)""",
+ (line['id'], len(parts['n']), len(parts['n']) + len(parts['w']),
+ parts['n'] + parts['w'] + parts['r'], members, tags))
+ world.conn.commit()
+
+
+@step(u'the ways')
+def import_fill_planet_osm_ways(step):
+ cur = world.conn.cursor()
+ for line in step.hashes:
+ if 'tags' in line:
+ tags = world.make_hash(line['tags'])
+ else:
+ tags = None
+ nodes = [int(x.strip()) for x in line['nodes'].split(',')]
+
+ cur.execute("""INSERT INTO planet_osm_ways
+ (id, nodes, tags, pending)
+ VALUES (%s, %s, %s, false)""",
+ (line['id'], nodes, tags))
+ world.conn.commit()
+
+############### import and update steps #######################################
+
+@step(u'importing')
+def import_database(step):
+ """ Runs the actual indexing. """
+ world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions')
+ cur = world.conn.cursor()
+ cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level,
+ housenumber, street, addr_place, isin, postcode, country_code, extratags,
+ geometry) select * from place""")
+ world.conn.commit()
+ world.run_nominatim_script('setup', 'index', 'index-noanalyse')
+ #world.db_dump_table('placex')
+
+
+@step(u'updating place (node|way|area)s')
+def update_place_table_nodes(step, osmtype):
+ """ Replace a geometry in place by reinsertion and reindex database.
+ """
+ world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
+ if osmtype == 'node':
+ _insert_place_table_nodes(step.hashes, False)
+ elif osmtype == 'way':
+ _insert_place_table_objects(step.hashes, 'LINESTRING', False)
+ elif osmtype == 'area':
+ _insert_place_table_objects(step.hashes, 'POLYGON', False)
+ world.run_nominatim_script('update', 'index')
+
+@step(u'marking for delete (.*)')
+def update_delete_places(step, places):
+ """ Remove an entry from place and reindex database.
+ """
+ world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
+ cur = world.conn.cursor()
+ for place in places.split(','):
+ osmtype, osmid, cls = world.split_id(place)
+ if cls is None:
+ q = "delete from place where osm_type = %s and osm_id = %s"
+ params = (osmtype, osmid)
+ else:
+ q = "delete from place where osm_type = %s and osm_id = %s and class = %s"
+ params = (osmtype, osmid, cls)
+ cur.execute(q, params)
+ world.conn.commit()
+ #world.db_dump_table('placex')
+ world.run_nominatim_script('update', 'index')
+
+
+
+@step(u'sending query "(.*)"( with dups)?$')
+def query_cmd(step, query, with_dups):
+ """ Results in standard query output. The same tests as for API queries
+ can be used.
+ """
+ cmd = [os.path.join(world.config.source_dir, 'utils', 'query.php'),
+ '--search', query]
+ if with_dups is not None:
+ cmd.append('--nodedupe')
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (outp, err) = proc.communicate()
+ assert (proc.returncode == 0), "query.php failed with message: %s" % err
+ world.page = outp
+ world.response_format = 'json'
+ world.returncode = 200
+
--- /dev/null
+""" Steps for setting up a test database for osm2pgsql import.
+
+ Note that osm2pgsql features need a database and therefore need
+ to be tagged with @DB.
+"""
+
+from nose.tools import *
+from lettuce import *
+
+import logging
+import random
+import tempfile
+import os
+import subprocess
+
+logger = logging.getLogger(__name__)
+
+@before.each_scenario
+def osm2pgsql_setup_test(scenario):
+ world.osm2pgsql = []
+
+@step(u'the osm nodes:')
+def osm2pgsql_import_nodes(step):
+ """ Define a list of OSM nodes to be imported, given as a table.
+ Each line describes one node with all its attributes.
+ 'id' is mendatory, all other fields are filled with random values
+ when not given. If 'tags' is missing an empty tag list is assumed.
+ For updates, a mandatory 'action' column needs to contain 'A' (add),
+ 'M' (modify), 'D' (delete).
+ """
+ for line in step.hashes:
+ node = { 'type' : 'N', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
+ 'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
+ }
+ node.update(line)
+ node['id'] = int(node['id'])
+ if 'geometry' in node:
+ lat, lon = node['geometry'].split(' ')
+ node['lat'] = float(lat)
+ node['lon'] = float(lon)
+ else:
+ node['lon'] = random.random()*360 - 180
+ node['lat'] = random.random()*180 - 90
+ if 'tags' in node:
+ node['tags'] = world.make_hash(line['tags'])
+ else:
+ node['tags'] = {}
+
+ world.osm2pgsql.append(node)
+
+
+@step(u'the osm ways:')
+def osm2pgsql_import_ways(step):
+ """ Define a list of OSM ways to be imported.
+ """
+ for line in step.hashes:
+ way = { 'type' : 'W', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
+ 'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
+ }
+ way.update(line)
+
+ way['id'] = int(way['id'])
+ if 'tags' in way:
+ way['tags'] = world.make_hash(line['tags'])
+ else:
+ way['tags'] = None
+ way['nodes'] = way['nodes'].strip().split()
+
+ world.osm2pgsql.append(way)
+
+membertype = { 'N' : 'node', 'W' : 'way', 'R' : 'relation' }
+
+@step(u'the osm relations:')
+def osm2pgsql_import_rels(step):
+ """ Define a list of OSM relation to be imported.
+ """
+ for line in step.hashes:
+ rel = { 'type' : 'R', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
+ 'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
+ }
+ rel.update(line)
+
+ rel['id'] = int(rel['id'])
+ if 'tags' in rel:
+ rel['tags'] = world.make_hash(line['tags'])
+ else:
+ rel['tags'] = {}
+ members = []
+ if rel['members'].strip():
+ for mem in line['members'].split(','):
+ memparts = mem.strip().split(':', 2)
+ memid = memparts[0].upper()
+ members.append((membertype[memid[0]],
+ memid[1:],
+ memparts[1] if len(memparts) == 2 else ''
+ ))
+ rel['members'] = members
+
+ world.osm2pgsql.append(rel)
+
+
+
+def _sort_xml_entries(x, y):
+ if x['type'] == y['type']:
+ return cmp(x['id'], y['id'])
+ else:
+ return cmp('NWR'.find(x['type']), 'NWR'.find(y['type']))
+
+def write_osm_obj(fd, obj):
+ if obj['type'] == 'N':
+ fd.write('<node id="%(id)d" lat="%(lat).8f" lon="%(lon).8f" version="%(version)s" timestamp="%(timestamp)%" changeset="%(changeset)s" uid="%(uid)s" user="%(user)s"'% obj)
+ if obj['tags'] is None:
+ fd.write('/>\n')
+ else:
+ fd.write('>\n')
+ for k,v in obj['tags'].iteritems():
+ fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
+ fd.write('</node>\n')
+ elif obj['type'] == 'W':
+ fd.write('<way id="%(id)d" version="%(version)s" changeset="%(changeset)s" timestamp="%(timestamp)s" user="%(user)s" uid="%(uid)s">\n' % obj)
+ for nd in obj['nodes']:
+ fd.write('<nd ref="%s" />\n' % (nd,))
+ for k,v in obj['tags'].iteritems():
+ fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
+ fd.write('</way>\n')
+ elif obj['type'] == 'R':
+ fd.write('<relation id="%(id)d" version="%(version)s" changeset="%(changeset)s" timestamp="%(timestamp)s" user="%(user)s" uid="%(uid)s">\n' % obj)
+ for mem in obj['members']:
+ fd.write(' <member type="%s" ref="%s" role="%s"/>\n' % mem)
+ for k,v in obj['tags'].iteritems():
+ fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
+ fd.write('</relation>\n')
+
+@step(u'loading osm data')
+def osm2pgsql_load_place(step):
+ """Imports the previously defined OSM data into a fresh copy of a
+ Nominatim test database.
+ """
+
+ world.osm2pgsql.sort(cmp=_sort_xml_entries)
+
+ # create a OSM file in /tmp
+ with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd:
+ fname = fd.name
+ fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
+ fd.write('<osm version="0.6" generator="test-nominatim" timestamp="2014-08-26T20:22:02Z">\n')
+ fd.write('\t<bounds minlat="43.72335" minlon="7.409205" maxlat="43.75169" maxlon="7.448637"/>\n')
+
+ for obj in world.osm2pgsql:
+ write_osm_obj(fd, obj)
+
+ fd.write('</osm>\n')
+
+ logger.debug( "Filename: %s" % fname)
+
+ cmd = [os.path.join(world.config.source_dir, 'utils', 'setup.php')]
+ cmd.extend(['--osm-file', fname, '--import-data'])
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (outp, outerr) = proc.communicate()
+ assert (proc.returncode == 0), "OSM data import failed:\n%s\n%s\n" % (outp, outerr)
+
+ ### reintroduce the triggers/indexes we've lost by having osm2pgsql set up place again
+ cur = world.conn.cursor()
+ cur.execute("""CREATE TRIGGER place_before_delete BEFORE DELETE ON place
+ FOR EACH ROW EXECUTE PROCEDURE place_delete()""")
+ cur.execute("""CREATE TRIGGER place_before_insert BEFORE INSERT ON place
+ FOR EACH ROW EXECUTE PROCEDURE place_insert()""")
+ cur.execute("""CREATE UNIQUE INDEX idx_place_osm_unique on place using btree(osm_id,osm_type,class,type)""")
+ world.conn.commit()
+
+
+ os.remove(fname)
+ world.osm2pgsql = []
+
+actiontypes = { 'C' : 'create', 'M' : 'modify', 'D' : 'delete' }
+
+@step(u'updating osm data')
+def osm2pgsql_update_place(step):
+ """Creates an osc file from the previously defined data and imports it
+ into the database.
+ """
+ world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions')
+ cur = world.conn.cursor()
+ cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level,
+ housenumber, street, addr_place, isin, postcode, country_code, extratags,
+ geometry) select * from place""")
+ world.conn.commit()
+ world.run_nominatim_script('setup', 'index', 'index-noanalyse')
+ world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
+
+ with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd:
+ fname = fd.name
+ fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
+ fd.write('<osmChange version="0.6" generator="Osmosis 0.43.1">\n')
+
+ for obj in world.osm2pgsql:
+ fd.write('<%s>\n' % (actiontypes[obj['action']], ))
+ write_osm_obj(fd, obj)
+ fd.write('</%s>\n' % (actiontypes[obj['action']], ))
+
+ fd.write('</osmChange>\n')
+
+ logger.debug( "Filename: %s" % fname)
+
+ cmd = [os.path.join(world.config.source_dir, 'utils', 'update.php')]
+ cmd.extend(['--import-diff', fname])
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (outp, outerr) = proc.communicate()
+ assert (proc.returncode == 0), "OSM data update failed:\n%s\n%s\n" % (outp, outerr)
+
+ os.remove(fname)
+ world.osm2pgsql = []
--- /dev/null
+from lettuce import *
+from nose.tools import *
+import logging
+import os
+import subprocess
+import psycopg2
+import re
+from haversine import haversine
+from shapely.wkt import loads as wkt_load
+from shapely.ops import linemerge
+
+logger = logging.getLogger(__name__)
+
+class NominatimConfig:
+
+ def __init__(self):
+ # logging setup
+ loglevel = getattr(logging, os.environ.get('LOGLEVEL','info').upper())
+ if 'LOGFILE' in os.environ:
+ logging.basicConfig(filename=os.environ.get('LOGFILE','run.log'),
+ level=loglevel)
+ else:
+ logging.basicConfig(level=loglevel)
+ # Nominatim test setup
+ self.base_url = os.environ.get('NOMINATIM_SERVER', 'http://localhost/nominatim')
+ self.source_dir = os.path.abspath(os.environ.get('NOMINATIM_DIR', '../Nominatim'))
+ self.template_db = os.environ.get('TEMPLATE_DB', 'test_template_nominatim')
+ self.test_db = os.environ.get('TEST_DB', 'test_nominatim')
+ self.local_settings_file = os.environ.get('NOMINATIM_SETTINGS', '/tmp/nominatim_settings.php')
+ self.reuse_template = 'NOMINATIM_REUSE_TEMPLATE' in os.environ
+ self.keep_scenario_db = 'NOMINATIM_KEEP_SCENARIO_DB' in os.environ
+ os.environ['NOMINATIM_SETTINGS'] = '/tmp/nominatim_settings.php'
+
+ scriptpath = os.path.dirname(os.path.abspath(__file__))
+ self.scene_path = os.environ.get('SCENE_PATH',
+ os.path.join(scriptpath, '..', 'scenes', 'data'))
+
+
+ def __str__(self):
+ return 'Server URL: %s\nSource dir: %s\n' % (self.base_url, self.source_dir)
+
+world.config = NominatimConfig()
+
+@world.absorb
+def write_nominatim_config(dbname):
+ f = open(world.config.local_settings_file, 'w')
+ f.write("<?php\n @define('CONST_Database_DSN', 'pgsql://@/%s');\n" % dbname)
+ f.close()
+
+
+@world.absorb
+def run_nominatim_script(script, *args):
+ cmd = [os.path.join(world.config.source_dir, 'utils', '%s.php' % script)]
+ cmd.extend(['--%s' % x for x in args])
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (outp, outerr) = proc.communicate()
+ assert (proc.returncode == 0), "Script '%s' failed:\n%s\n%s\n" % (script, outp, outerr)
+
+@world.absorb
+def make_hash(inp):
+ return eval('{' + inp + '}')
+
+@world.absorb
+def split_id(oid):
+ """ Splits a unique identifier for places into its components.
+ As place_ids cannot be used for testing, we use a unique
+ identifier instead that is of the form <osmtype><osmid>[:class].
+ """
+ oid = oid.strip()
+ if oid == 'None':
+ return None, None, None
+ osmtype = oid[0]
+ assert_in(osmtype, ('R','N','W'))
+ if ':' in oid:
+ osmid, cls = oid[1:].split(':')
+ return (osmtype, int(osmid), cls)
+ else:
+ return (osmtype, int(oid[1:]), None)
+
+@world.absorb
+def get_placeid(oid):
+ """ Tries to retrive the place_id for a unique identifier. """
+ if oid[0].isdigit():
+ return int(oid)
+
+ osmtype, osmid, cls = world.split_id(oid)
+ if osmtype is None:
+ return None
+ cur = world.conn.cursor()
+ if cls is None:
+ q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s'
+ params = (osmtype, osmid)
+ else:
+ q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s and class = %s'
+ params = (osmtype, osmid, cls)
+ cur.execute(q, params)
+ assert_equals (cur.rowcount, 1)
+ return cur.fetchone()[0]
+
+
+@world.absorb
+def match_geometry(coord, matchstring):
+ m = re.match(r'([-0-9.]+),\s*([-0-9.]+)\s*(?:\+-([0-9.]+)([a-z]+)?)?', matchstring)
+ assert_is_not_none(m, "Invalid match string")
+
+ logger.debug("Distmatch: %s/%s %s %s" % (m.group(1), m.group(2), m.group(3), m.group(4) ))
+ dist = haversine(coord, (float(m.group(1)), float(m.group(2))))
+
+ if m.group(3) is not None:
+ expdist = float(m.group(3))
+ if m.group(4) is not None:
+ if m.group(4) == 'm':
+ expdist = expdist/1000
+ elif m.group(4) == 'km':
+ pass
+ else:
+ raise Exception("Unknown unit '%s' in geometry match" % (m.group(4), ))
+ else:
+ expdist = 0
+
+ logger.debug("Distances expected: %f, got: %f" % (expdist, dist))
+ assert dist <= expdist, "Geometry too far away, expected: %f, got: %f" % (expdist, dist)
+
+
+
+@world.absorb
+def db_dump_table(table):
+ cur = world.conn.cursor()
+ cur.execute('SELECT * FROM %s' % table)
+ print '<<<<<<< BEGIN OF TABLE DUMP %s' % table
+ for res in cur:
+ print res
+ print '<<<<<<< END OF TABLE DUMP %s' % table
+
+@world.absorb
+def db_drop_database(name):
+ conn = psycopg2.connect(database='postgres')
+ conn.set_isolation_level(0)
+ cur = conn.cursor()
+ cur.execute('DROP DATABASE IF EXISTS %s' % (name, ))
+ conn.close()
+
+
+world.is_template_set_up = False
+
+@world.absorb
+def db_template_setup():
+ """ Set up a template database, containing all tables
+ but not yet any functions.
+ """
+ if world.is_template_set_up:
+ return
+
+ world.is_template_set_up = True
+ world.write_nominatim_config(world.config.template_db)
+ if world.config.reuse_template:
+ # check that the template is there
+ conn = psycopg2.connect(database='postgres')
+ cur = conn.cursor()
+ cur.execute('select count(*) from pg_database where datname = %s',
+ (world.config.template_db,))
+ if cur.fetchone()[0] == 1:
+ return
+ else:
+ # just in case... make sure a previous table has been dropped
+ world.db_drop_database(world.config.template_db)
+ # call the first part of database setup
+ world.run_nominatim_script('setup', 'create-db', 'setup-db')
+ # remove external data to speed up indexing for tests
+ conn = psycopg2.connect(database=world.config.template_db)
+ psycopg2.extras.register_hstore(conn, globally=False, unicode=True)
+ cur = conn.cursor()
+ for table in ('gb_postcode', 'us_postcode', 'us_state', 'us_statecounty'):
+ cur.execute('TRUNCATE TABLE %s' % (table,))
+ conn.commit()
+ conn.close()
+ # execute osm2pgsql on an empty file to get the right tables
+ osm2pgsql = os.path.join(world.config.source_dir, 'osm2pgsql', 'osm2pgsql')
+ proc = subprocess.Popen([osm2pgsql, '-lsc', '-O', 'gazetteer', '-d', world.config.template_db, '-'],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ [outstr, errstr] = proc.communicate(input='<osm version="0.6"></osm>')
+ world.run_nominatim_script('setup', 'create-functions', 'create-tables', 'create-partition-tables', 'create-partition-functions', 'load-data', 'create-search-indices')
+
+
+# Leave the table around so it can be reused again after a non-reuse test round.
+#@after.all
+def db_template_teardown(total):
+ """ Set up a template database, containing all tables
+ but not yet any functions.
+ """
+ if world.is_template_set_up:
+ # remove template DB
+ if not world.config.reuse_template:
+ world.db_drop_database(world.config.template_db)
+ try:
+ os.remove(world.config.local_settings_file)
+ except OSError:
+ pass # ignore missing file
+
+
+##########################################################################
+#
+# Data scene handling
+#
+
+world.scenes = {}
+world.current_scene = None
+
+@world.absorb
+def load_scene(name):
+ if name in world.scenes:
+ world.current_scene = world.scenes[name]
+ else:
+ with open(os.path.join(world.config.scene_path, "%s.wkt" % name), 'r') as fd:
+ scene = {}
+ for line in fd:
+ if line.strip():
+ obj, wkt = line.split('|', 2)
+ wkt = wkt.strip()
+ scene[obj.strip()] = wkt_load(wkt)
+ world.scenes[name] = scene
+ world.current_scene = scene
+
+@world.absorb
+def get_scene_geometry(name):
+ if not ':' in name:
+ # Not a scene description
+ return None
+
+ geoms = []
+ for obj in name.split('+'):
+ oname = obj.strip()
+ if oname.startswith(':'):
+ geoms.append(world.current_scene[oname[1:]])
+ else:
+ scene, obj = oname.split(':', 2)
+ oldscene = world.current_scene
+ world.load_scene(scene)
+ wkt = world.current_scene[obj]
+ world.current_scene = oldscene
+ geoms.append(wkt)
+
+ if len(geoms) == 1:
+ return geoms[0]
+ else:
+ return linemerge(geoms)