X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/1e28f2478c6b4c3ba7ca566e4a43a7f5c293ce3c..2712c5f90ee9e15d69424e367c7e53ab2a579f7b:/test/bdd/steps/queries.py?ds=sidebyside diff --git a/test/bdd/steps/queries.py b/test/bdd/steps/queries.py index 5779c763..c7fdd6db 100644 --- a/test/bdd/steps/queries.py +++ b/test/bdd/steps/queries.py @@ -9,12 +9,12 @@ import os import io import re import logging -from tidylib import tidy_document import xml.etree.ElementTree as ET -import subprocess from urllib.parse import urlencode from collections import OrderedDict -from nose.tools import * # for assert functions + +from check_functions import Almost +from utils import run_script logger = logging.getLogger(__name__) @@ -70,20 +70,19 @@ class GenericResponse(object): if h == 'ID': pass elif h == 'osm': - assert_equal(res['osm_type'], row[h][0]) - assert_equal(res['osm_id'], row[h][1:]) + assert res['osm_type'] == row[h][0] + assert res['osm_id'] == int(row[h][1:]) elif h == 'centroid': x, y = row[h].split(' ') - assert_almost_equal(float(y), float(res['lat'])) - assert_almost_equal(float(x), float(res['lon'])) + assert Almost(float(y)) == float(res['lat']) + assert Almost(float(x)) == float(res['lon']) elif row[h].startswith("^"): - assert_in(h, res) - assert_is_not_none(re.fullmatch(row[h], res[h]), - "attribute '%s': expected: '%s', got '%s'" - % (h, row[h], res[h])) + assert h in res + assert re.fullmatch(row[h], res[h]) is not None, \ + "attribute '%s': expected: '%s', got '%s'" % (h, row[h], res[h]) else: - assert_in(h, res) - assert_equal(str(res[h]), str(row[h])) + assert h in res + assert str(res[h]) == str(row[h]) def property_list(self, prop): return [ x[prop] for x in self.result ] @@ -110,18 +109,14 @@ class SearchResponse(GenericResponse): self.header['json_func'] = m.group(1) self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(code) - def parse_html(self): - content, errors = tidy_document(self.page, - options={'char-encoding' : 'utf8'}) - #eq_(len(errors), 0 , "Errors found in HTML document:\n%s" % errors) - - b = content.find('nominatim_results =') - e = content.find('') - content = content[b:e] - b = content.find('[') - e = content.rfind(']') + def parse_geojson(self): + self.parse_json() + self.result = geojson_results_to_json_results(self.result) - self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(content[b:e+1]) + def parse_geocodejson(self): + self.parse_geojson() + if self.result is not None: + self.result = [r['geocoding'] for r in self.result] def parse_xml(self): et = ET.fromstring(self.page) @@ -129,7 +124,7 @@ class SearchResponse(GenericResponse): self.header = dict(et.attrib) for child in et: - assert_equal(child.tag, "place") + assert child.tag == "place" self.result.append(dict(child.attrib)) address = {} @@ -163,19 +158,6 @@ class ReverseResponse(GenericResponse): if errorcode == 200: getattr(self, 'parse_' + fmt)() - def parse_html(self): - content, errors = tidy_document(self.page, - options={'char-encoding' : 'utf8'}) - #eq_(len(errors), 0 , "Errors found in HTML document:\n%s" % errors) - - b = content.find('nominatim_results =') - e = content.find('') - content = content[b:e] - b = content.find('[') - e = content.rfind(']') - - self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(content[b:e+1]) - def parse_json(self): m = re.fullmatch(r'([\w$][^(]*)\((.*)\)', self.page) if m is None: @@ -185,6 +167,17 @@ class ReverseResponse(GenericResponse): self.header['json_func'] = m.group(1) self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(code)] + def parse_geojson(self): + self.parse_json() + if 'error' in self.result: + return + self.result = geojson_results_to_json_results(self.result[0]) + + def parse_geocodejson(self): + self.parse_geojson() + if self.result is not None: + self.result = [r['geocoding'] for r in self.result] + def parse_xml(self): et = ET.fromstring(self.page) @@ -193,7 +186,7 @@ class ReverseResponse(GenericResponse): for child in et: if child.tag == 'result': - eq_(0, len(self.result), "More than one result in reverse result") + assert len(self.result) == 0, "More than one result in reverse result" self.result.append(dict(child.attrib)) elif child.tag == 'addressparts': address = {} @@ -227,22 +220,53 @@ class DetailsResponse(GenericResponse): if errorcode == 200: getattr(self, 'parse_' + fmt)() - def parse_html(self): - content, errors = tidy_document(self.page, - options={'char-encoding' : 'utf8'}) - self.result = {} + def parse_json(self): + self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(self.page)] + + +class StatusResponse(GenericResponse): + + def __init__(self, page, fmt='text', errorcode=200): + self.page = page + self.format = fmt + self.errorcode = errorcode + + if errorcode == 200 and fmt != 'text': + getattr(self, 'parse_' + fmt)() def parse_json(self): self.result = [json.JSONDecoder(object_pairs_hook=OrderedDict).decode(self.page)] +def geojson_result_to_json_result(geojson_result): + result = geojson_result['properties'] + result['geojson'] = geojson_result['geometry'] + if 'bbox' in geojson_result: + # bbox is minlon, minlat, maxlon, maxlat + # boundingbox is minlat, maxlat, minlon, maxlon + result['boundingbox'] = [ + geojson_result['bbox'][1], + geojson_result['bbox'][3], + geojson_result['bbox'][0], + geojson_result['bbox'][2] + ] + return result + + +def geojson_results_to_json_results(geojson_results): + if 'error' in geojson_results: + return + return list(map(geojson_result_to_json_result, geojson_results['features'])) + + @when(u'searching for "(?P.*)"(?P with dups)?') def query_cmd(context, query, dups): """ Query directly via PHP script. """ cmd = ['/usr/bin/env', 'php'] cmd.append(os.path.join(context.nominatim.build_dir, 'utils', 'query.php')) - cmd.extend(['--search', query]) + if query: + cmd.extend(['--search', query]) # add more parameters in table form if context.table: for h in context.table.headings: @@ -253,13 +277,9 @@ def query_cmd(context, query, dups): if dups: cmd.extend(('--dedupe', '0')) - proc = subprocess.Popen(cmd, cwd=context.nominatim.build_dir, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (outp, err) = proc.communicate() - - assert_equals (0, proc.returncode, "query.php failed with message: %s\noutput: %s" % (err, outp)) + outp, err = run_script(cmd, cwd=context.nominatim.build_dir) - context.response = SearchResponse(outp.decode('utf-8'), 'json') + context.response = SearchResponse(outp, 'json') def send_api_query(endpoint, params, fmt, context): if fmt is not None: @@ -277,10 +297,9 @@ def send_api_query(endpoint, params, fmt, context): env['SCRIPT_NAME'] = '/%s.php' % endpoint env['REQUEST_URI'] = '%s?%s' % (env['SCRIPT_NAME'], env['QUERY_STRING']) - env['CONTEXT_DOCUMENT_ROOT'] = os.path.join(context.nominatim.build_dir, 'website') + env['CONTEXT_DOCUMENT_ROOT'] = os.path.join(context.nominatim.website_dir.name, 'website') env['SCRIPT_FILENAME'] = os.path.join(env['CONTEXT_DOCUMENT_ROOT'], '%s.php' % endpoint) - env['NOMINATIM_SETTINGS'] = context.nominatim.local_settings_file logger.debug("Environment:" + json.dumps(env, sort_keys=True, indent=2)) @@ -302,19 +321,9 @@ def send_api_query(endpoint, params, fmt, context): for k,v in params.items(): cmd.append("%s=%s" % (k, v)) - proc = subprocess.Popen(cmd, cwd=context.nominatim.build_dir, env=env, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - - (outp, err) = proc.communicate() - outp = outp.decode('utf-8') - - logger.debug("Result: \n===============================\n" - + outp + "\n===============================\n") - - assert_equals(0, proc.returncode, - "%s failed with message: %s\noutput: %s" % (env['SCRIPT_FILENAME'], err, outp)) + outp, err = run_script(cmd, cwd=context.nominatim.website_dir.name, env=env) - assert_equals(0, len(err), "Unexpected PHP error: %s" % (err)) + assert len(err) == 0, "Unexpected PHP error: %s" % (err) if outp.startswith('Status: '): status = int(outp[8:11]) @@ -345,9 +354,7 @@ def website_search_request(context, fmt, query, addr): outp, status = send_api_query('search', params, fmt, context) - if fmt is None: - outfmt = 'html' - elif fmt == 'jsonv2 ': + if fmt is None or fmt == 'jsonv2 ': outfmt = 'json' else: outfmt = fmt.strip() @@ -384,7 +391,7 @@ def website_details_request(context, fmt, query): outp, status = send_api_query('details', params, fmt, context) if fmt is None: - outfmt = 'html' + outfmt = 'json' else: outfmt = fmt.strip() @@ -397,42 +404,74 @@ def website_lookup_request(context, fmt, query): if fmt == 'json ': outfmt = 'json' + elif fmt == 'jsonv2 ': + outfmt = 'json' + elif fmt == 'geojson ': + outfmt = 'geojson' + elif fmt == 'geocodejson ': + outfmt = 'geocodejson' else: outfmt = 'xml' context.response = SearchResponse(outp, outfmt, status) +@when(u'sending (?P\S+ )?status query') +def website_status_request(context, fmt): + params = {} + outp, status = send_api_query('status', params, fmt, context) + + if fmt is None: + outfmt = 'text' + else: + outfmt = fmt.strip() + + context.response = StatusResponse(outp, outfmt, status) + @step(u'(?Pless than|more than|exactly|at least|at most) (?P\d+) results? (?:is|are) returned') def validate_result_number(context, operator, number): - eq_(context.response.errorcode, 200) + assert context.response.errorcode == 200 numres = len(context.response.result) - ok_(compare(operator, numres, int(number)), - "Bad number of results: expected %s %s, got %d." % (operator, number, numres)) + assert compare(operator, numres, int(number)), \ + "Bad number of results: expected %s %s, got %d." % (operator, number, numres) @then(u'a HTTP (?P\d+) is returned') def check_http_return_status(context, status): - eq_(context.response.errorcode, int(status)) + assert context.response.errorcode == int(status) + +@then(u'the page contents equals "(?P.+)"') +def check_page_content_equals(context, text): + assert context.response.page == text @then(u'the result is valid (?P\w+)') def step_impl(context, fmt): context.execute_steps("Then a HTTP 200 is returned") - eq_(context.response.format, fmt) + assert context.response.format == fmt + +@then(u'a (?P\w+) user error is returned') +def check_page_error(context, fmt): + context.execute_steps("Then a HTTP 400 is returned") + assert context.response.format == fmt + + if fmt == 'xml': + assert re.search(r'.+', context.response.page, re.DOTALL) is not None + else: + assert re.search(r'({"error":)', context.response.page, re.DOTALL) is not None @then(u'result header contains') def check_header_attr(context): for line in context.table: - assert_is_not_none(re.fullmatch(line['value'], context.response.header[line['attr']]), - "attribute '%s': expected: '%s', got '%s'" - % (line['attr'], line['value'], - context.response.header[line['attr']])) + assert re.fullmatch(line['value'], context.response.header[line['attr']]) is not None, \ + "attribute '%s': expected: '%s', got '%s'" % ( + line['attr'], line['value'], + context.response.header[line['attr']]) @then(u'result header has (?Pnot )?attributes (?P.*)') def check_header_no_attr(context, neg, attrs): for attr in attrs.split(','): if neg: - assert_not_in(attr, context.response.header) + assert attr not in context.response.header else: - assert_in(attr, context.response.header) + assert attr in context.response.header @then(u'results contain') def step_impl(context): @@ -453,9 +492,9 @@ def validate_attributes(context, lid, neg, attrs): for i in idx: for attr in attrs.split(','): if neg: - assert_not_in(attr, context.response.result[i]) + assert attr not in context.response.result[i] else: - assert_in(attr, context.response.result[i]) + assert attr in context.response.result[i] @then(u'result addresses contain') def step_impl(context): @@ -471,8 +510,8 @@ def step_impl(context): for h in context.table.headings: if h != 'ID': for p in addr_parts: - assert_in(h, p) - assert_equal(p[h], line[h], "Bad address value for %s" % h) + assert h in p + assert p[h] == line[h], "Bad address value for %s" % h @then(u'address of result (?P\d+) has(?P no)? types (?P.*)') def check_address(context, lid, neg, attrs): @@ -482,23 +521,24 @@ def check_address(context, lid, neg, attrs): for attr in attrs.split(','): if neg: - assert_not_in(attr, addr_parts) + assert attr not in addr_parts else: - assert_in(attr, addr_parts) + assert attr in addr_parts -@then(u'address of result (?P\d+) is') -def check_address(context, lid): +@then(u'address of result (?P\d+) (?Pis|contains)') +def check_address(context, lid, complete): context.execute_steps("then more than %s results are returned" % lid) addr_parts = dict(context.response.result[int(lid)]['address']) for line in context.table: - assert_in(line['type'], addr_parts) - assert_equal(addr_parts[line['type']], line['value'], - "Bad address value for %s" % line['type']) + assert line['type'] in addr_parts + assert addr_parts[line['type']] == line['value'], \ + "Bad address value for %s" % line['type'] del addr_parts[line['type']] - eq_(0, len(addr_parts), "Additional address parts found: %s" % str(addr_parts)) + if complete == 'is': + assert len(addr_parts) == 0, "Additional address parts found: %s" % str(addr_parts) @then(u'result (?P\d+ )?has bounding box in (?P[\d,.-]+)') def step_impl(context, lid, coords): @@ -516,10 +556,31 @@ def step_impl(context, lid, coords): bbox = bbox.split(',') bbox = [ float(x) for x in bbox ] - assert_greater_equal(bbox[0], coord[0]) - assert_less_equal(bbox[1], coord[1]) - assert_greater_equal(bbox[2], coord[2]) - assert_less_equal(bbox[3], coord[3]) + assert bbox[0] >= coord[0] + assert bbox[1] <= coord[1] + assert bbox[2] >= coord[2] + assert bbox[3] <= coord[3] + +@then(u'result (?P\d+ )?has centroid in (?P[\d,.-]+)') +def step_impl(context, lid, coords): + if lid is None: + context.execute_steps("then at least 1 result is returned") + bboxes = zip(context.response.property_list('lat'), + context.response.property_list('lon')) + else: + context.execute_steps("then more than %sresults are returned" % lid) + res = context.response.result[int(lid)] + bboxes = [ (res['lat'], res['lon']) ] + + coord = [ float(x) for x in coords.split(',') ] + + for lat, lon in bboxes: + lat = float(lat) + lon = float(lon) + assert lat >= coord[0] + assert lat <= coord[1] + assert lon >= coord[2] + assert lon <= coord[3] @then(u'there are(?P no)? duplicates') def check_for_duplicates(context, neg):