X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/b2c1d086b5f60f6163e9994fc64662ecec9da045..56fd1bc1b5fde69ec2258aa94c4c9cb493554e41:/test/bdd/steps/queries.py?ds=sidebyside diff --git a/test/bdd/steps/queries.py b/test/bdd/steps/queries.py index 7d3dec69..d6473dfa 100644 --- a/test/bdd/steps/queries.py +++ b/test/bdd/steps/queries.py @@ -8,6 +8,7 @@ import json import os import io import re +import logging from tidylib import tidy_document import xml.etree.ElementTree as ET import subprocess @@ -15,6 +16,8 @@ from urllib.parse import urlencode from collections import OrderedDict from nose.tools import * # for assert functions +logger = logging.getLogger(__name__) + BASE_SERVER_ENV = { 'HTTP_HOST' : 'localhost', 'HTTP_USER_AGENT' : 'Mozilla/5.0 (X11; Linux x86_64; rv:51.0) Gecko/20100101 Firefox/51.0', @@ -68,7 +71,7 @@ class GenericResponse(object): pass elif h == 'osm': assert_equal(res['osm_type'], row[h][0]) - assert_equal(res['osm_id'], row[h][1:]) + assert_equal(res['osm_id'], int(row[h][1:])) elif h == 'centroid': x, y = row[h].split(' ') assert_almost_equal(float(y), float(res['lat'])) @@ -107,18 +110,31 @@ class SearchResponse(GenericResponse): self.header['json_func'] = m.group(1) self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(code) + def parse_geojson(self): + self.parse_json() + self.result = geojson_results_to_json_results(self.result) + + def parse_geocodejson(self): + self.parse_geojson() + if self.result is not None: + self.result = [r['geocoding'] for r in self.result] + def parse_html(self): content, errors = tidy_document(self.page, options={'char-encoding' : 'utf8'}) #eq_(len(errors), 0 , "Errors found in HTML document:\n%s" % errors) + self.result = [] b = content.find('nominatim_results =') e = content.find('') - content = content[b:e] - b = content.find('[') - e = content.rfind(']') + if b >= 0 and e >= 0: + content = content[b:e] - self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(content[b:e+1]) + b = content.find('[') + e = content.rfind(']') + if b >= 0 and e >= 0: + self.result = json.JSONDecoder(object_pairs_hook=OrderedDict)\ + .decode(content[b:e+1]) def parse_xml(self): et = ET.fromstring(self.page) @@ -182,6 +198,17 @@ class ReverseResponse(GenericResponse): self.header['json_func'] = m.group(1) self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(code)] + def parse_geojson(self): + self.parse_json() + if 'error' in self.result: + return + self.result = geojson_results_to_json_results(self.result[0]) + + def parse_geocodejson(self): + self.parse_geojson() + if self.result is not None: + self.result = [r['geocoding'] for r in self.result] + def parse_xml(self): et = ET.fromstring(self.page) @@ -229,12 +256,53 @@ class DetailsResponse(GenericResponse): options={'char-encoding' : 'utf8'}) self.result = {} + def parse_json(self): + self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(self.page)] + + +class StatusResponse(GenericResponse): + + def __init__(self, page, fmt='text', errorcode=200): + self.page = page + self.format = fmt + self.errorcode = errorcode + + if errorcode == 200 and fmt != 'text': + getattr(self, 'parse_' + fmt)() + + def parse_json(self): + self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(self.page)] + + +def geojson_result_to_json_result(geojson_result): + result = geojson_result['properties'] + result['geojson'] = geojson_result['geometry'] + if 'bbox' in geojson_result: + # bbox is minlon, minlat, maxlon, maxlat + # boundingbox is minlat, maxlat, minlon, maxlon + result['boundingbox'] = [ + geojson_result['bbox'][1], + geojson_result['bbox'][3], + geojson_result['bbox'][0], + geojson_result['bbox'][2] + ] + return result + + +def geojson_results_to_json_results(geojson_results): + if 'error' in geojson_results: + return + return list(map(geojson_result_to_json_result, geojson_results['features'])) + + @when(u'searching for "(?P.*)"(?P with dups)?') def query_cmd(context, query, dups): """ Query directly via PHP script. """ - cmd = [os.path.join(context.nominatim.build_dir, 'utils', 'query.php'), - '--search', query] + cmd = ['/usr/bin/env', 'php'] + cmd.append(os.path.join(context.nominatim.build_dir, 'utils', 'query.php')) + if query: + cmd.extend(['--search', query]) # add more parameters in table form if context.table: for h in context.table.headings: @@ -264,7 +332,7 @@ def send_api_query(endpoint, params, fmt, context): for h in context.table.headings: params[h] = context.table[0][h] - env = BASE_SERVER_ENV + env = dict(BASE_SERVER_ENV) env['QUERY_STRING'] = urlencode(params) env['SCRIPT_NAME'] = '/%s.php' % endpoint @@ -274,10 +342,23 @@ def send_api_query(endpoint, params, fmt, context): '%s.php' % endpoint) env['NOMINATIM_SETTINGS'] = context.nominatim.local_settings_file + logger.debug("Environment:" + json.dumps(env, sort_keys=True, indent=2)) + if hasattr(context, 'http_headers'): env.update(context.http_headers) - cmd = ['/usr/bin/php-cgi', env['SCRIPT_FILENAME']] + cmd = ['/usr/bin/env', 'php-cgi', '-f'] + if context.nominatim.code_coverage_path: + env['COV_SCRIPT_FILENAME'] = env['SCRIPT_FILENAME'] + env['COV_PHP_DIR'] = os.path.join(context.nominatim.src_dir, "lib") + env['COV_TEST_NAME'] = '%s:%s' % (context.scenario.filename, context.scenario.line) + env['SCRIPT_FILENAME'] = \ + os.path.join(os.path.split(__file__)[0], 'cgi-with-coverage.php') + cmd.append(env['SCRIPT_FILENAME']) + env['PHP_CODE_COVERAGE_FILE'] = context.nominatim.next_code_coverage_file() + else: + cmd.append(env['SCRIPT_FILENAME']) + for k,v in params.items(): cmd.append("%s=%s" % (k, v)) @@ -285,14 +366,19 @@ def send_api_query(endpoint, params, fmt, context): stdout=subprocess.PIPE, stderr=subprocess.PIPE) (outp, err) = proc.communicate() + outp = outp.decode('utf-8') + err = err.decode("utf-8") + + logger.debug("Result: \n===============================\n" + + outp + "\n===============================\n") assert_equals(0, proc.returncode, - "query.php failed with message: %s\noutput: %s" % (err, outp)) + "%s failed with message: %s" % ( + os.path.basename(env['SCRIPT_FILENAME']), + err)) assert_equals(0, len(err), "Unexpected PHP error: %s" % (err)) - outp = outp.decode('utf-8') - if outp.startswith('Status: '): status = int(outp[8:11]) else: @@ -331,7 +417,7 @@ def website_search_request(context, fmt, query, addr): context.response = SearchResponse(outp, outfmt, status) -@when(u'sending (?P\S+ )?reverse coordinates (?P[0-9.-]+)?,(?P[0-9.-]+)?') +@when(u'sending (?P\S+ )?reverse coordinates (?P.+)?,(?P.+)?') def website_reverse_request(context, fmt, lat, lon): params = {} if lat is not None: @@ -360,7 +446,12 @@ def website_details_request(context, fmt, query): params['place_id'] = query outp, status = send_api_query('details', params, fmt, context) - context.response = DetailsResponse(outp, 'html', status) + if fmt is None: + outfmt = 'html' + else: + outfmt = fmt.strip() + + context.response = DetailsResponse(outp, outfmt, status) @when(u'sending (?P\S+ )?lookup query for (?P.*)') def website_lookup_request(context, fmt, query): @@ -369,11 +460,28 @@ def website_lookup_request(context, fmt, query): if fmt == 'json ': outfmt = 'json' + elif fmt == 'jsonv2 ': + outfmt = 'json' + elif fmt == 'geojson ': + outfmt = 'geojson' + elif fmt == 'geocodejson ': + outfmt = 'geocodejson' else: outfmt = 'xml' context.response = SearchResponse(outp, outfmt, status) +@when(u'sending (?P\S+ )?status query') +def website_status_request(context, fmt): + params = {} + outp, status = send_api_query('status', params, fmt, context) + + if fmt is None: + outfmt = 'text' + else: + outfmt = fmt.strip() + + context.response = StatusResponse(outp, outfmt, status) @step(u'(?Pless than|more than|exactly|at least|at most) (?P\d+) results? (?:is|are) returned') def validate_result_number(context, operator, number): @@ -386,11 +494,27 @@ def validate_result_number(context, operator, number): def check_http_return_status(context, status): eq_(context.response.errorcode, int(status)) +@then(u'the page contents equals "(?P.+)"') +def check_page_content_equals(context, text): + eq_(context.response.page, text) + @then(u'the result is valid (?P\w+)') def step_impl(context, fmt): context.execute_steps("Then a HTTP 200 is returned") eq_(context.response.format, fmt) +@then(u'a (?P\w+) user error is returned') +def check_page_error(context, fmt): + context.execute_steps("Then a HTTP 400 is returned") + eq_(context.response.format, fmt) + + if fmt == 'html': + assert_is_not_none(re.search(r').+', context.response.page, re.DOTALL)) + elif fmt == 'xml': + assert_is_not_none(re.search(r'.+', context.response.page, re.DOTALL)) + else: + assert_is_not_none(re.search(r'({"error":)', context.response.page, re.DOTALL)) + @then(u'result header contains') def check_header_attr(context): for line in context.table: @@ -459,8 +583,8 @@ def check_address(context, lid, neg, attrs): else: assert_in(attr, addr_parts) -@then(u'address of result (?P\d+) is') -def check_address(context, lid): +@then(u'address of result (?P\d+) (?Pis|contains)') +def check_address(context, lid, complete): context.execute_steps("then more than %s results are returned" % lid) addr_parts = dict(context.response.result[int(lid)]['address']) @@ -471,7 +595,8 @@ def check_address(context, lid): "Bad address value for %s" % line['type']) del addr_parts[line['type']] - eq_(0, len(addr_parts), "Additional address parts found: %s" % str(addr_parts)) + if complete == 'is': + eq_(0, len(addr_parts), "Additional address parts found: %s" % str(addr_parts)) @then(u'result (?P\d+ )?has bounding box in (?P[\d,.-]+)') def step_impl(context, lid, coords): @@ -494,6 +619,27 @@ def step_impl(context, lid, coords): assert_greater_equal(bbox[2], coord[2]) assert_less_equal(bbox[3], coord[3]) +@then(u'result (?P\d+ )?has centroid in (?P[\d,.-]+)') +def step_impl(context, lid, coords): + if lid is None: + context.execute_steps("then at least 1 result is returned") + bboxes = zip(context.response.property_list('lat'), + context.response.property_list('lon')) + else: + context.execute_steps("then more than %sresults are returned" % lid) + res = context.response.result[int(lid)] + bboxes = [ (res['lat'], res['lon']) ] + + coord = [ float(x) for x in coords.split(',') ] + + for lat, lon in bboxes: + lat = float(lat) + lon = float(lon) + assert_greater_equal(lat, coord[0]) + assert_less_equal(lat, coord[1]) + assert_greater_equal(lon, coord[2]) + assert_less_equal(lon, coord[3]) + @then(u'there are(?P no)? duplicates') def check_for_duplicates(context, neg): context.execute_steps("then at least 1 result is returned")