options={'char-encoding' : 'utf8'})
#eq_(len(errors), 0 , "Errors found in HTML document:\n%s" % errors)
+ self.result = []
b = content.find('nominatim_results =')
e = content.find('</script>')
- content = content[b:e]
- b = content.find('[')
- e = content.rfind(']')
+ if b >= 0 and e >= 0:
+ content = content[b:e]
- self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(content[b:e+1])
+ b = content.find('[')
+ e = content.rfind(']')
+ if b >= 0 and e >= 0:
+ self.result = json.JSONDecoder(object_pairs_hook=OrderedDict)\
+ .decode(content[b:e+1])
def parse_xml(self):
et = ET.fromstring(self.page)
assert_greater_equal(bbox[2], coord[2])
assert_less_equal(bbox[3], coord[3])
+@then(u'result (?P<lid>\d+ )?has centroid in (?P<coords>[\d,.-]+)')
+def step_impl(context, lid, coords):
+ if lid is None:
+ context.execute_steps("then at least 1 result is returned")
+ bboxes = zip(context.response.property_list('lat'),
+ context.response.property_list('lon'))
+ else:
+ context.execute_steps("then more than %sresults are returned" % lid)
+ res = context.response.result[int(lid)]
+ bboxes = [ (res['lat'], res['lon']) ]
+
+ coord = [ float(x) for x in coords.split(',') ]
+
+ for lat, lon in bboxes:
+ lat = float(lat)
+ lon = float(lon)
+ assert_greater_equal(lat, coord[0])
+ assert_less_equal(lat, coord[1])
+ assert_greater_equal(lon, coord[2])
+ assert_less_equal(lon, coord[3])
+
@then(u'there are(?P<neg> no)? duplicates')
def check_for_duplicates(context, neg):
context.execute_steps("then at least 1 result is returned")