X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/81922fc057b5a89fb41d05d3a31dab629ff0558a..61e3a053723a6a1131e234aa8ad1a37f18bbec56:/test/bdd/steps/queries.py?ds=sidebyside
diff --git a/test/bdd/steps/queries.py b/test/bdd/steps/queries.py
index d0cda774..d3b1203b 100644
--- a/test/bdd/steps/queries.py
+++ b/test/bdd/steps/queries.py
@@ -8,6 +8,7 @@ import json
import os
import io
import re
+import logging
from tidylib import tidy_document
import xml.etree.ElementTree as ET
import subprocess
@@ -15,11 +16,12 @@ from urllib.parse import urlencode
from collections import OrderedDict
from nose.tools import * # for assert functions
+logger = logging.getLogger(__name__)
+
BASE_SERVER_ENV = {
'HTTP_HOST' : 'localhost',
'HTTP_USER_AGENT' : 'Mozilla/5.0 (X11; Linux x86_64; rv:51.0) Gecko/20100101 Firefox/51.0',
'HTTP_ACCEPT' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
- 'HTTP_ACCEPT_LANGUAGE' : 'en,de;q=0.5',
'HTTP_ACCEPT_ENCODING' : 'gzip, deflate',
'HTTP_CONNECTION' : 'keep-alive',
'SERVER_SIGNATURE' : '
Nominatim BDD Tests',
@@ -54,8 +56,40 @@ def compare(operator, op1, op2):
else:
raise Exception("unknown operator '%s'" % operator)
+class GenericResponse(object):
+
+ def match_row(self, row):
+ if 'ID' in row.headings:
+ todo = [int(row['ID'])]
+ else:
+ todo = range(len(self.result))
-class SearchResponse(object):
+ for i in todo:
+ res = self.result[i]
+ for h in row.headings:
+ if h == 'ID':
+ pass
+ elif h == 'osm':
+ assert_equal(res['osm_type'], row[h][0])
+ assert_equal(res['osm_id'], int(row[h][1:]))
+ elif h == 'centroid':
+ x, y = row[h].split(' ')
+ assert_almost_equal(float(y), float(res['lat']))
+ assert_almost_equal(float(x), float(res['lon']))
+ elif row[h].startswith("^"):
+ assert_in(h, res)
+ assert_is_not_none(re.fullmatch(row[h], res[h]),
+ "attribute '%s': expected: '%s', got '%s'"
+ % (h, row[h], res[h]))
+ else:
+ assert_in(h, res)
+ assert_equal(str(res[h]), str(row[h]))
+
+ def property_list(self, prop):
+ return [ x[prop] for x in self.result ]
+
+
+class SearchResponse(GenericResponse):
def __init__(self, page, fmt='json', errorcode=200):
self.page = page
@@ -76,18 +110,31 @@ class SearchResponse(object):
self.header['json_func'] = m.group(1)
self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(code)
+ def parse_geojson(self):
+ self.parse_json()
+ self.result = geojson_results_to_json_results(self.result)
+
+ def parse_geocodejson(self):
+ self.parse_geojson()
+ if self.result is not None:
+ self.result = [r['geocoding'] for r in self.result]
+
def parse_html(self):
content, errors = tidy_document(self.page,
options={'char-encoding' : 'utf8'})
#eq_(len(errors), 0 , "Errors found in HTML document:\n%s" % errors)
+ self.result = []
b = content.find('nominatim_results =')
e = content.find('')
- content = content[b:e]
- b = content.find('[')
- e = content.rfind(']')
+ if b >= 0 and e >= 0:
+ content = content[b:e]
- self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(content[b:e+1])
+ b = content.find('[')
+ e = content.rfind(']')
+ if b >= 0 and e >= 0:
+ self.result = json.JSONDecoder(object_pairs_hook=OrderedDict)\
+ .decode(content[b:e+1])
def parse_xml(self):
et = ET.fromstring(self.page)
@@ -117,43 +164,145 @@ class SearchResponse(object):
self.result[-1]['address'] = address
- def match_row(self, row):
- if 'ID' in row.headings:
- todo = [int(row['ID'])]
+class ReverseResponse(GenericResponse):
+
+ def __init__(self, page, fmt='json', errorcode=200):
+ self.page = page
+ self.format = fmt
+ self.errorcode = errorcode
+ self.result = []
+ self.header = dict()
+
+ if errorcode == 200:
+ getattr(self, 'parse_' + fmt)()
+
+ def parse_html(self):
+ content, errors = tidy_document(self.page,
+ options={'char-encoding' : 'utf8'})
+ #eq_(len(errors), 0 , "Errors found in HTML document:\n%s" % errors)
+
+ b = content.find('nominatim_results =')
+ e = content.find('')
+ content = content[b:e]
+ b = content.find('[')
+ e = content.rfind(']')
+
+ self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(content[b:e+1])
+
+ def parse_json(self):
+ m = re.fullmatch(r'([\w$][^(]*)\((.*)\)', self.page)
+ if m is None:
+ code = self.page
else:
- todo = range(len(self.result))
+ code = m.group(2)
+ self.header['json_func'] = m.group(1)
+ self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(code)]
- for i in todo:
- res = self.result[i]
- for h in row.headings:
- if h == 'ID':
- pass
- elif h == 'osm':
- assert_equal(res['osm_type'], row[h][0])
- assert_equal(res['osm_id'], row[h][1:])
- elif h == 'centroid':
- x, y = row[h].split(' ')
- assert_almost_equal(float(y), float(res['lat']))
- assert_almost_equal(float(x), float(res['lon']))
- elif row[h].startswith("^"):
- assert_in(h, res)
- assert_is_not_none(re.fullmatch(row[h], res[h]),
- "attribute '%s': expected: '%s', got '%s'"
- % (h, row[h], res[h]))
- else:
- assert_in(h, res)
- assert_equal(str(res[h]), str(row[h]))
+ def parse_geojson(self):
+ self.parse_json()
+ if 'error' in self.result:
+ return
+ self.result = geojson_results_to_json_results(self.result[0])
- def property_list(self, prop):
- return [ x[prop] for x in self.result ]
+ def parse_geocodejson(self):
+ self.parse_geojson()
+ if self.result is not None:
+ self.result = [r['geocoding'] for r in self.result]
+
+ def parse_xml(self):
+ et = ET.fromstring(self.page)
+
+ self.header = dict(et.attrib)
+ self.result = []
+
+ for child in et:
+ if child.tag == 'result':
+ eq_(0, len(self.result), "More than one result in reverse result")
+ self.result.append(dict(child.attrib))
+ elif child.tag == 'addressparts':
+ address = {}
+ for sub in child:
+ address[sub.tag] = sub.text
+ self.result[0]['address'] = address
+ elif child.tag == 'extratags':
+ self.result[0]['extratags'] = {}
+ for tag in child:
+ self.result[0]['extratags'][tag.attrib['key']] = tag.attrib['value']
+ elif child.tag == 'namedetails':
+ self.result[0]['namedetails'] = {}
+ for tag in child:
+ self.result[0]['namedetails'][tag.attrib['desc']] = tag.text
+ elif child.tag in ('geokml'):
+ self.result[0][child.tag] = True
+ else:
+ assert child.tag == 'error', \
+ "Unknown XML tag %s on page: %s" % (child.tag, self.page)
+
+
+class DetailsResponse(GenericResponse):
+
+ def __init__(self, page, fmt='json', errorcode=200):
+ self.page = page
+ self.format = fmt
+ self.errorcode = errorcode
+ self.result = []
+ self.header = dict()
+
+ if errorcode == 200:
+ getattr(self, 'parse_' + fmt)()
+
+ def parse_html(self):
+ content, errors = tidy_document(self.page,
+ options={'char-encoding' : 'utf8'})
+ self.result = {}
+
+ def parse_json(self):
+ self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(self.page)]
+
+
+class StatusResponse(GenericResponse):
+
+ def __init__(self, page, fmt='text', errorcode=200):
+ self.page = page
+ self.format = fmt
+ self.errorcode = errorcode
+
+ if errorcode == 200 and fmt != 'text':
+ getattr(self, 'parse_' + fmt)()
+
+ def parse_json(self):
+ self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(self.page)]
+
+
+def geojson_result_to_json_result(geojson_result):
+ result = geojson_result['properties']
+ result['geojson'] = geojson_result['geometry']
+ if 'bbox' in geojson_result:
+ # bbox is minlon, minlat, maxlon, maxlat
+ # boundingbox is minlat, maxlat, minlon, maxlon
+ result['boundingbox'] = [
+ geojson_result['bbox'][1],
+ geojson_result['bbox'][3],
+ geojson_result['bbox'][0],
+ geojson_result['bbox'][2]
+ ]
+ return result
+
+
+def geojson_results_to_json_results(geojson_results):
+ if 'error' in geojson_results:
+ return
+ return list(map(geojson_result_to_json_result, geojson_results['features']))
@when(u'searching for "(?P.*)"(?P with dups)?')
def query_cmd(context, query, dups):
""" Query directly via PHP script.
"""
- cmd = [os.path.join(context.nominatim.build_dir, 'utils', 'query.php'),
- '--search', query]
+ cmd = ['/usr/bin/env', 'php']
+ cmd.append(os.path.join(context.nominatim.build_dir, 'utils', 'query.php'))
+ if query:
+ cmd.extend(['--search', query])
# add more parameters in table form
if context.table:
for h in context.table.headings:
@@ -172,16 +321,9 @@ def query_cmd(context, query, dups):
context.response = SearchResponse(outp.decode('utf-8'), 'json')
-
-@when(u'sending (?P\S+ )?search query "(?P.*)"(?P with address)?')
-def website_search_request(context, fmt, query, addr):
- env = BASE_SERVER_ENV
-
- params = { 'q' : query }
+def send_api_query(endpoint, params, fmt, context):
if fmt is not None:
params['format'] = fmt.strip()
- if addr is not None:
- params['addressdetails'] = '1'
if context.table:
if context.table.headings[0] == 'param':
for line in context.table:
@@ -189,15 +331,34 @@ def website_search_request(context, fmt, query, addr):
else:
for h in context.table.headings:
params[h] = context.table[0][h]
+
+ env = dict(BASE_SERVER_ENV)
env['QUERY_STRING'] = urlencode(params)
- env['REQUEST_URI'] = '/search.php?' + env['QUERY_STRING']
- env['SCRIPT_NAME'] = '/search.php'
+ env['SCRIPT_NAME'] = '/%s.php' % endpoint
+ env['REQUEST_URI'] = '%s?%s' % (env['SCRIPT_NAME'], env['QUERY_STRING'])
env['CONTEXT_DOCUMENT_ROOT'] = os.path.join(context.nominatim.build_dir, 'website')
- env['SCRIPT_FILENAME'] = os.path.join(context.nominatim.build_dir, 'website', 'search.php')
+ env['SCRIPT_FILENAME'] = os.path.join(env['CONTEXT_DOCUMENT_ROOT'],
+ '%s.php' % endpoint)
env['NOMINATIM_SETTINGS'] = context.nominatim.local_settings_file
- cmd = [ '/usr/bin/php-cgi', env['SCRIPT_FILENAME']]
+ logger.debug("Environment:" + json.dumps(env, sort_keys=True, indent=2))
+
+ if hasattr(context, 'http_headers'):
+ env.update(context.http_headers)
+
+ cmd = ['/usr/bin/env', 'php-cgi', '-f']
+ if context.nominatim.code_coverage_path:
+ env['COV_SCRIPT_FILENAME'] = env['SCRIPT_FILENAME']
+ env['COV_PHP_DIR'] = os.path.join(context.nominatim.src_dir, "lib")
+ env['COV_TEST_NAME'] = '%s:%s' % (context.scenario.filename, context.scenario.line)
+ env['SCRIPT_FILENAME'] = \
+ os.path.join(os.path.split(__file__)[0], 'cgi-with-coverage.php')
+ cmd.append(env['SCRIPT_FILENAME'])
+ env['PHP_CODE_COVERAGE_FILE'] = context.nominatim.next_code_coverage_file()
+ else:
+ cmd.append(env['SCRIPT_FILENAME'])
+
for k,v in params.items():
cmd.append("%s=%s" % (k, v))
@@ -205,21 +366,47 @@ def website_search_request(context, fmt, query, addr):
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, err) = proc.communicate()
+ outp = outp.decode('utf-8')
+ err = err.decode("utf-8")
+
+ logger.debug("Result: \n===============================\n"
+ + outp + "\n===============================\n")
assert_equals(0, proc.returncode,
- "query.php failed with message: %s\noutput: %s" % (err, outp))
+ "%s failed with message: %s" % (
+ os.path.basename(env['SCRIPT_FILENAME']),
+ err))
assert_equals(0, len(err), "Unexpected PHP error: %s" % (err))
- outp = outp.decode('utf-8')
-
if outp.startswith('Status: '):
status = int(outp[8:11])
else:
status = 200
content_start = outp.find('\r\n\r\n')
- assert_less(11, content_start)
+
+ return outp[content_start + 4:], status
+
+@given(u'the HTTP header')
+def add_http_header(context):
+ if not hasattr(context, 'http_headers'):
+ context.http_headers = {}
+
+ for h in context.table.headings:
+ envvar = 'HTTP_' + h.upper().replace('-', '_')
+ context.http_headers[envvar] = context.table[0][h]
+
+
+@when(u'sending (?P\S+ )?search query "(?P.*)"(?P with address)?')
+def website_search_request(context, fmt, query, addr):
+ params = {}
+ if query:
+ params['q'] = query
+ if addr is not None:
+ params['addressdetails'] = '1'
+
+ outp, status = send_api_query('search', params, fmt, context)
if fmt is None:
outfmt = 'html'
@@ -228,8 +415,73 @@ def website_search_request(context, fmt, query, addr):
else:
outfmt = fmt.strip()
- context.response = SearchResponse(outp[content_start + 4:], outfmt, status)
+ context.response = SearchResponse(outp, outfmt, status)
+
+@when(u'sending (?P\S+ )?reverse coordinates (?P.+)?,(?P.+)?')
+def website_reverse_request(context, fmt, lat, lon):
+ params = {}
+ if lat is not None:
+ params['lat'] = lat
+ if lon is not None:
+ params['lon'] = lon
+
+ outp, status = send_api_query('reverse', params, fmt, context)
+
+ if fmt is None:
+ outfmt = 'xml'
+ elif fmt == 'jsonv2 ':
+ outfmt = 'json'
+ else:
+ outfmt = fmt.strip()
+
+ context.response = ReverseResponse(outp, outfmt, status)
+
+@when(u'sending (?P\S+ )?details query for (?P.*)')
+def website_details_request(context, fmt, query):
+ params = {}
+ if query[0] in 'NWR':
+ params['osmtype'] = query[0]
+ params['osmid'] = query[1:]
+ else:
+ params['place_id'] = query
+ outp, status = send_api_query('details', params, fmt, context)
+
+ if fmt is None:
+ outfmt = 'html'
+ else:
+ outfmt = fmt.strip()
+
+ context.response = DetailsResponse(outp, outfmt, status)
+
+@when(u'sending (?P\S+ )?lookup query for (?P.*)')
+def website_lookup_request(context, fmt, query):
+ params = { 'osm_ids' : query }
+ outp, status = send_api_query('lookup', params, fmt, context)
+ if fmt == 'json ':
+ outfmt = 'json'
+ elif fmt == 'jsonv2 ':
+ outfmt = 'json'
+ elif fmt == 'geojson ':
+ outfmt = 'geojson'
+ elif fmt == 'geocodejson ':
+ outfmt = 'geocodejson'
+ else:
+ outfmt = 'xml'
+
+ context.response = SearchResponse(outp, outfmt, status)
+
+@when(u'sending (?P\S+ )?status query')
+def website_status_request(context, fmt):
+ params = {}
+ outp, status = send_api_query('status', params, fmt, context)
+
+ if fmt is None:
+ outfmt = 'text'
+ else:
+ outfmt = fmt.strip()
+
+ context.response = StatusResponse(outp, outfmt, status)
@step(u'(?Pless than|more than|exactly|at least|at most) (?P\d+) results? (?:is|are) returned')
def validate_result_number(context, operator, number):
@@ -242,10 +494,27 @@ def validate_result_number(context, operator, number):
def check_http_return_status(context, status):
eq_(context.response.errorcode, int(status))
+@then(u'the page contents equals "(?P.+)"')
+def check_page_content_equals(context, text):
+ eq_(context.response.page, text)
+
@then(u'the result is valid (?P\w+)')
def step_impl(context, fmt):
+ context.execute_steps("Then a HTTP 200 is returned")
eq_(context.response.format, fmt)
+@then(u'a (?P\w+) user error is returned')
+def check_page_error(context, fmt):
+ context.execute_steps("Then a HTTP 400 is returned")
+ eq_(context.response.format, fmt)
+
+ if fmt == 'html':
+ assert_is_not_none(re.search(r').+', context.response.page, re.DOTALL))
+ elif fmt == 'xml':
+ assert_is_not_none(re.search(r'.+', context.response.page, re.DOTALL))
+ else:
+ assert_is_not_none(re.search(r'({"error":)', context.response.page, re.DOTALL))
+
@then(u'result header contains')
def check_header_attr(context):
for line in context.table:
@@ -349,6 +618,27 @@ def step_impl(context, lid, coords):
assert_greater_equal(bbox[2], coord[2])
assert_less_equal(bbox[3], coord[3])
+@then(u'result (?P\d+ )?has centroid in (?P[\d,.-]+)')
+def step_impl(context, lid, coords):
+ if lid is None:
+ context.execute_steps("then at least 1 result is returned")
+ bboxes = zip(context.response.property_list('lat'),
+ context.response.property_list('lon'))
+ else:
+ context.execute_steps("then more than %sresults are returned" % lid)
+ res = context.response.result[int(lid)]
+ bboxes = [ (res['lat'], res['lon']) ]
+
+ coord = [ float(x) for x in coords.split(',') ]
+
+ for lat, lon in bboxes:
+ lat = float(lat)
+ lon = float(lon)
+ assert_greater_equal(lat, coord[0])
+ assert_less_equal(lat, coord[1])
+ assert_greater_equal(lon, coord[2])
+ assert_less_equal(lon, coord[3])
+
@then(u'there are(?P no)? duplicates')
def check_for_duplicates(context, neg):
context.execute_steps("then at least 1 result is returned")