shell: bash
- name: Install${{ matrix.flavour }} prerequisites
run: |
- sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION} lua-dkjson nlohmann-json3-dev
+ sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION} lua-dkjson nlohmann-json3-dev libspatialite7 libsqlite3-mod-spatialite
if [ "$FLAVOUR" == "oldstuff" ]; then
- pip3 install MarkupSafe==2.0.1 python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 sqlalchemy==1.4.31 datrie asyncpg
+ pip3 install MarkupSafe==2.0.1 python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 sqlalchemy==1.4.31 datrie asyncpg aiosqlite
else
sudo apt-get install -y -qq python3-icu python3-datrie python3-pyosmium python3-jinja2 python3-psutil python3-psycopg2 python3-dotenv python3-yaml
- pip3 install sqlalchemy psycopg
+ pip3 install sqlalchemy psycopg aiosqlite
fi
shell: bash
env:
if: matrix.flavour == 'oldstuff'
- name: Install Python webservers
- run: pip3 install falcon starlette
+ run: pip3 install falcon starlette asgi_lifespan
- name: Install latest pylint
- run: pip3 install -U pylint asgi_lifespan
+ run: pip3 install -U pylint
+ if: matrix.flavour != 'oldstuff'
- name: PHP linting
run: phpcs --report-width=120 .
working-directory: Nominatim
+ if: matrix.flavour != 'oldstuff'
- name: Python linting
run: python3 -m pylint nominatim
working-directory: Nominatim
+ if: matrix.flavour != 'oldstuff'
- name: PHP unit tests
run: phpunit ./
Next you need to set up the service that runs the Nominatim frontend. This is
easiest done with a systemd job.
+First you need to tell systemd to create a socket file to be used by
+hunicorn. Crate the following file `/etc/systemd/system/nominatim.socket`:
+
+``` systemd
+[Unit]
+Description=Gunicorn socket for Nominatim
+
+[Socket]
+ListenStream=/run/nominatim.sock
+SocketUser=www-data
+
+[Install]
+WantedBy=multi-user.target
+```
+
+Now you can add the systemd service for Nominatim itself.
Create the following file `/etc/systemd/system/nominatim.service`:
``` systemd
[`NOMINATIM_API_POOL_SIZE`](../customize/Settings.md#nominatim_api_pool_size)
connections to the database to serve requests in parallel.
-Make the new service known to systemd and start it:
+Make the new services known to systemd and start it:
``` sh
sudo systemctl daemon-reload
-sudo systemctl enable nominatim
-sudo systemctl start nominatim
+sudo systemctl enable nominatim.socket
+sudo systemctl start nominatim.socket
+sudo systemctl enable nominatim.service
+sudo systemctl start nominatim.service
```
This sets the service up, so that Nominatim is automatically started
if self._engine:
return
- dsn = self.config.get_database_params()
- pool_size = self.config.get_int('API_POOL_SIZE')
-
- query = {k: v for k, v in dsn.items()
- if k not in ('user', 'password', 'dbname', 'host', 'port')}
-
- dburl = sa.engine.URL.create(
- f'postgresql+{PGCORE_LIB}',
- database=dsn.get('dbname'),
- username=dsn.get('user'), password=dsn.get('password'),
- host=dsn.get('host'), port=int(dsn['port']) if 'port' in dsn else None,
- query=query)
- engine = sa_asyncio.create_async_engine(dburl, future=True,
- max_overflow=0, pool_size=pool_size,
- echo=self.config.get_bool('DEBUG_SQL'))
+ extra_args: Dict[str, Any] = {'future': True,
+ 'echo': self.config.get_bool('DEBUG_SQL')}
+
+ is_sqlite = self.config.DATABASE_DSN.startswith('sqlite:')
+
+ if is_sqlite:
+ params = dict((p.split('=', 1)
+ for p in self.config.DATABASE_DSN[7:].split(';')))
+ dburl = sa.engine.URL.create('sqlite+aiosqlite',
+ database=params.get('dbname'))
+
+ else:
+ dsn = self.config.get_database_params()
+ query = {k: v for k, v in dsn.items()
+ if k not in ('user', 'password', 'dbname', 'host', 'port')}
+
+ dburl = sa.engine.URL.create(
+ f'postgresql+{PGCORE_LIB}',
+ database=dsn.get('dbname'),
+ username=dsn.get('user'),
+ password=dsn.get('password'),
+ host=dsn.get('host'),
+ port=int(dsn['port']) if 'port' in dsn else None,
+ query=query)
+ extra_args['max_overflow'] = 0
+ extra_args['pool_size'] = self.config.get_int('API_POOL_SIZE')
+
+ engine = sa_asyncio.create_async_engine(dburl, **extra_args)
try:
async with engine.begin() as conn:
except (PGCORE_ERROR, sa.exc.OperationalError):
server_version = 0
- if server_version >= 110000:
+ if server_version >= 110000 and not is_sqlite:
@sa.event.listens_for(engine.sync_engine, "connect")
def _on_connect(dbapi_con: Any, _: Any) -> None:
cursor = dbapi_con.cursor()
# Make sure that all connections get the new settings
await self.close()
+ if is_sqlite:
+ @sa.event.listens_for(engine.sync_engine, "connect")
+ def _on_sqlite_connect(dbapi_con: Any, _: Any) -> None:
+ dbapi_con.run_async(lambda conn: conn.enable_load_extension(True))
+ cursor = dbapi_con.cursor()
+ cursor.execute("SELECT load_extension('mod_spatialite')")
+ cursor.execute('SELECT SetDecimalPrecision(7)')
+ dbapi_con.run_async(lambda conn: conn.enable_load_extension(False))
+
self._property_cache['DB:server_version'] = server_version
self._tables = SearchTables(sa.MetaData(), engine.name) # pylint: disable=no-member
sql = sql.where(t.c.osm_id == place.osm_id).limit(1)
if place.osm_class and place.osm_class.isdigit():
sql = sql.order_by(sa.func.greatest(0,
- sa.func.least(int(place.osm_class) - t.c.endnumber),
- t.c.startnumber - int(place.osm_class)))
+ int(place.osm_class) - t.c.endnumber,
+ t.c.startnumber - int(place.osm_class)))
else:
return None
if details.geometry_output & ntyp.GeometryFormat.GEOJSON:
def _add_geometry(sql: SaSelect, column: SaColumn) -> SaSelect:
- return sql.add_columns(sa.literal_column(f"""
- ST_AsGeoJSON(CASE WHEN ST_NPoints({column.name}) > 5000
- THEN ST_SimplifyPreserveTopology({column.name}, 0.0001)
- ELSE {column.name} END)
- """).label('geometry_geojson'))
+ return sql.add_columns(sa.func.ST_AsGeoJSON(
+ sa.case((sa.func.ST_NPoints(column) > 5000,
+ sa.func.ST_SimplifyPreserveTopology(column, 0.0001)),
+ else_=column), 7).label('geometry_geojson'))
else:
def _add_geometry(sql: SaSelect, column: SaColumn) -> SaSelect:
return sql.add_columns(sa.func.ST_GeometryType(column).label('geometry_type'))
# add missing details
assert result is not None
+ if 'type' in result.geometry:
+ result.geometry['type'] = GEOMETRY_TYPE_MAP.get(result.geometry['type'],
+ result.geometry['type'])
indexed_date = getattr(row, 'indexed_date', None)
if indexed_date is not None:
result.indexed_date = indexed_date.replace(tzinfo=dt.timezone.utc)
col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification)
if details.geometry_output & ntyp.GeometryFormat.GEOJSON:
- out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson'))
+ out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
if details.geometry_output & ntyp.GeometryFormat.TEXT:
out.append(sa.func.ST_AsText(col).label('geometry_text'))
if details.geometry_output & ntyp.GeometryFormat.KML:
- out.append(sa.func.ST_AsKML(col).label('geometry_kml'))
+ out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
if details.geometry_output & ntyp.GeometryFormat.SVG:
- out.append(sa.func.ST_AsSVG(col).label('geometry_svg'))
+ out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
return sql.add_columns(*out)
await nres.add_result_details(conn, [result], details)
return result
+
+
+GEOMETRY_TYPE_MAP = {
+ 'POINT': 'ST_Point',
+ 'MULTIPOINT': 'ST_MultiPoint',
+ 'LINESTRING': 'ST_LineString',
+ 'MULTILINESTRING': 'ST_MultiLineString',
+ 'POLYGON': 'ST_Polygon',
+ 'MULTIPOLYGON': 'ST_MultiPolygon',
+ 'GEOMETRYCOLLECTION': 'ST_GeometryCollection'
+}
import sqlalchemy as sa
from nominatim.typing import SaSelect, SaRow
-from nominatim.db.sqlalchemy_functions import CrosscheckNames
+from nominatim.db.sqlalchemy_types import Geometry
from nominatim.api.types import Point, Bbox, LookupDetails
from nominatim.api.connection import SearchConnection
from nominatim.api.logging import log
async def _finalize_entry(conn: SearchConnection, result: BaseResultT) -> None:
- assert result.address_rows
+ assert result.address_rows is not None
postcode = result.postcode
if not postcode and result.address:
postcode = result.address.get('postcode')
if not lookup_ids:
return
- ltab = sa.func.json_array_elements(sa.type_coerce(lookup_ids, sa.JSON))\
+ ltab = sa.func.JsonArrayEach(sa.type_coerce(lookup_ids, sa.JSON))\
.table_valued(sa.column('value', type_=sa.JSON)) # type: ignore[no-untyped-call]
t = conn.t.placex
.order_by('src_place_id')\
.order_by(sa.column('rank_address').desc())\
.order_by((taddr.c.place_id == ltab.c.value['pid'].as_integer()).desc())\
- .order_by(sa.case((CrosscheckNames(t.c.name, ltab.c.value['names']), 2),
+ .order_by(sa.case((sa.func.CrosscheckNames(t.c.name, ltab.c.value['names']), 2),
(taddr.c.isaddress, 0),
(sa.and_(taddr.c.fromarea,
t.c.geometry.ST_Contains(
parent_lookup_ids = list(filter(lambda e: e['pid'] != e['lid'], lookup_ids))
if parent_lookup_ids:
- ltab = sa.func.json_array_elements(sa.type_coerce(parent_lookup_ids, sa.JSON))\
+ ltab = sa.func.JsonArrayEach(sa.type_coerce(parent_lookup_ids, sa.JSON))\
.table_valued(sa.column('value', type_=sa.JSON)) # type: ignore[no-untyped-call]
sql = sa.select(ltab.c.value['pid'].as_integer().label('src_place_id'),
t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
return sa.select(t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
t.c.class_.label('class'), t.c.type,
t.c.admin_level, t.c.housenumber,
- sa.literal_column("""ST_GeometryType(geometry) in
- ('ST_Polygon','ST_MultiPolygon')""").label('fromarea'),
+ t.c.geometry.is_area().label('fromarea'),
t.c.rank_address,
- sa.literal_column(
- f"""ST_DistanceSpheroid(geometry,
- 'SRID=4326;{centroid.to_wkt()}'::geometry,
- 'SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]]')
- """).label('distance'))
+ t.c.geometry.distance_spheroid(
+ sa.bindparam('centroid', value=centroid, type_=Geometry)).label('distance'))
async def complete_linked_places(conn: SearchConnection, result: BaseResult) -> None:
sel = sa.select(t.c.word_id, t.c.word_token, t.c.word)
for name_tokens, address_tokens in await conn.execute(sql):
- for row in await conn.execute(sel.where(t.c.word_id == sa.any_(name_tokens))):
+ for row in await conn.execute(sel.where(t.c.word_id.in_(name_tokens))):
result.name_keywords.append(WordInfo(*row))
- for row in await conn.execute(sel.where(t.c.word_id == sa.any_(address_tokens))):
+ for row in await conn.execute(sel.where(t.c.word_id.in_(address_tokens))):
result.address_keywords.append(WordInfo(*row))
from nominatim.api.logging import log
from nominatim.api.types import AnyPoint, DataLayer, ReverseDetails, GeometryFormat, Bbox
from nominatim.db.sqlalchemy_types import Geometry
-import nominatim.db.sqlalchemy_functions as snfn
# In SQLAlchemy expression which compare with NULL need to be expressed with
# the equal sign.
else_=0).label('position')
-def _is_address_point(table: SaFromClause) -> SaColumn:
- return sa.and_(table.c.rank_address == 30,
- sa.or_(table.c.housenumber != None,
- table.c.name.has_key('addr:housename')))
-
-
def _get_closest(*rows: Optional[SaRow]) -> Optional[SaRow]:
return min(rows, key=lambda row: 1000 if row is None else row.distance)
col = sa.func.ST_SimplifyPreserveTopology(col, self.params.geometry_simplification)
if self.params.geometry_output & GeometryFormat.GEOJSON:
- out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson'))
+ out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
if self.params.geometry_output & GeometryFormat.TEXT:
out.append(sa.func.ST_AsText(col).label('geometry_text'))
if self.params.geometry_output & GeometryFormat.KML:
- out.append(sa.func.ST_AsKML(col).label('geometry_kml'))
+ out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
if self.params.geometry_output & GeometryFormat.SVG:
- out.append(sa.func.ST_AsSVG(col).label('geometry_svg'))
+ out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
return sql.add_columns(*out)
max_rank = min(29, self.max_rank)
restrict.append(lambda: no_index(t.c.rank_address).between(26, max_rank))
if self.max_rank == 30:
- restrict.append(lambda: _is_address_point(t))
+ restrict.append(lambda: sa.func.IsAddressPoint(t))
if self.layer_enabled(DataLayer.POI) and self.max_rank == 30:
restrict.append(lambda: sa.and_(no_index(t.c.rank_search) == 30,
t.c.class_.not_in(('place', 'building')),
sql: SaLambdaSelect = sa.lambda_stmt(lambda: _select_from_placex(t)
.where(t.c.geometry.ST_DWithin(WKT_PARAM, 0.001))
.where(t.c.parent_place_id == parent_place_id)
- .where(_is_address_point(t))
+ .where(sa.func.IsAddressPoint(t))
.where(t.c.indexed_status == 0)
.where(t.c.linked_place_id == None)
.order_by('distance')
inner = sa.select(t, sa.literal(0.0).label('distance'))\
.where(t.c.rank_search.between(5, MAX_RANK_PARAM))\
.where(t.c.geometry.intersects(WKT_PARAM))\
- .where(snfn.select_index_placex_geometry_reverse_lookuppolygon('placex'))\
+ .where(sa.func.PlacexGeometryReverseLookuppolygon())\
.order_by(sa.desc(t.c.rank_search))\
.limit(50)\
.subquery('area')
.where(t.c.rank_search > address_rank)\
.where(t.c.rank_search <= MAX_RANK_PARAM)\
.where(t.c.indexed_status == 0)\
- .where(snfn.select_index_placex_geometry_reverse_lookupplacenode('placex'))\
- .where(t.c.geometry
- .ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
- .intersects(WKT_PARAM))\
+ .where(sa.func.IntersectsReverseDistance(t, WKT_PARAM))\
.order_by(sa.desc(t.c.rank_search))\
.limit(50)\
.subquery('places')
return _select_from_placex(inner, False)\
.join(touter, touter.c.geometry.ST_Contains(inner.c.geometry))\
.where(touter.c.place_id == address_id)\
- .where(inner.c.distance < sa.func.reverse_place_diameter(inner.c.rank_search))\
+ .where(sa.func.IsBelowReverseDistance(inner.c.distance, inner.c.rank_search))\
.order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
.limit(1)
.where(t.c.indexed_status == 0)\
.where(t.c.linked_place_id == None)\
.where(self._filter_by_layer(t))\
- .where(t.c.geometry
- .ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
- .intersects(WKT_PARAM))\
+ .where(t.c.geometry.intersects(sa.func.ST_Expand(WKT_PARAM, 0.007)))\
.order_by(sa.desc(t.c.rank_search))\
+ .order_by('distance')\
.limit(50)\
.subquery()
.where(t.c.rank_search <= MAX_RANK_PARAM)\
.where(t.c.indexed_status == 0)\
.where(t.c.country_code.in_(ccodes))\
- .where(snfn.select_index_placex_geometry_reverse_lookupplacenode('placex'))\
- .where(t.c.geometry
- .ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
- .intersects(WKT_PARAM))\
+ .where(sa.func.IntersectsReverseDistance(t, WKT_PARAM))\
.order_by(sa.desc(t.c.rank_search))\
.limit(50)\
.subquery('area')
return _select_from_placex(inner, False)\
- .where(inner.c.distance < sa.func.reverse_place_diameter(inner.c.rank_search))\
+ .where(sa.func.IsBelowReverseDistance(inner.c.distance, inner.c.rank_search))\
.order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
.limit(1)
col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification)
if details.geometry_output & GeometryFormat.GEOJSON:
- out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson'))
+ out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
if details.geometry_output & GeometryFormat.TEXT:
out.append(sa.func.ST_AsText(col).label('geometry_text'))
if details.geometry_output & GeometryFormat.KML:
- out.append(sa.func.ST_AsKML(col).label('geometry_kml'))
+ out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
if details.geometry_output & GeometryFormat.SVG:
- out.append(sa.func.ST_AsSVG(col).label('geometry_svg'))
+ out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
return sql.add_columns(*out)
sql = sa.select(conn.t.import_status.c.lastimportdate).limit(1)
status.data_updated = await conn.scalar(sql)
+ if status.data_updated is not None:
+ status.data_updated = status.data_updated.replace(tzinfo=dt.timezone.utc)
+
# Database version
try:
verstr = await conn.get_property('database_version')
parser.add_subcommand('admin', clicmd.AdminFuncs())
parser.add_subcommand('export', clicmd.QueryExport())
+ parser.add_subcommand('convert', clicmd.ConvertDB())
parser.add_subcommand('serve', AdminServe())
parser.add_subcommand('search', clicmd.APISearch())
from nominatim.clicmd.freeze import SetupFreeze as SetupFreeze
from nominatim.clicmd.special_phrases import ImportSpecialPhrases as ImportSpecialPhrases
from nominatim.clicmd.export import QueryExport as QueryExport
+from nominatim.clicmd.convert import ConvertDB as ConvertDB
language: Optional[str]
restrict_to_country: Optional[str]
+ # Arguments to 'convert'
+ output: Path
+
# Arguments to 'refresh'
postcodes: bool
word_tokens: bool
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2023 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of the 'convert' subcommand.
+"""
+from typing import Set, Any, Union, Optional, Sequence
+import argparse
+import asyncio
+from pathlib import Path
+
+from nominatim.clicmd.args import NominatimArgs
+from nominatim.errors import UsageError
+
+# Do not repeat documentation of subcommand classes.
+# pylint: disable=C0111
+# Using non-top-level imports to avoid eventually unused imports.
+# pylint: disable=E0012,C0415
+
+class WithAction(argparse.Action):
+ """ Special action that saves a list of flags, given on the command-line
+ as `--with-foo` or `--without-foo`.
+ """
+ def __init__(self, option_strings: Sequence[str], dest: Any,
+ default: bool = True, **kwargs: Any) -> None:
+ if 'nargs' in kwargs:
+ raise ValueError("nargs not allowed.")
+ if option_strings is None:
+ raise ValueError("Positional parameter not allowed.")
+
+ self.dest_set = kwargs.pop('dest_set')
+ full_option_strings = []
+ for opt in option_strings:
+ if not opt.startswith('--'):
+ raise ValueError("short-form options not allowed")
+ if default:
+ self.dest_set.add(opt[2:])
+ full_option_strings.append(f"--with-{opt[2:]}")
+ full_option_strings.append(f"--without-{opt[2:]}")
+
+ super().__init__(full_option_strings, argparse.SUPPRESS, nargs=0, **kwargs)
+
+
+ def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+ values: Union[str, Sequence[Any], None],
+ option_string: Optional[str] = None) -> None:
+ assert option_string
+ if option_string.startswith('--with-'):
+ self.dest_set.add(option_string[7:])
+ if option_string.startswith('--without-'):
+ self.dest_set.discard(option_string[10:])
+
+
+class ConvertDB:
+ """ Convert an existing database into a different format. (EXPERIMENTAL)
+
+ Dump a read-only version of the database in a different format.
+ At the moment only a SQLite database suitable for reverse lookup
+ can be created.
+ """
+
+ def __init__(self) -> None:
+ self.options: Set[str] = set()
+
+ def add_args(self, parser: argparse.ArgumentParser) -> None:
+ parser.add_argument('--format', default='sqlite',
+ choices=('sqlite', ),
+ help='Format of the output database (must be sqlite currently)')
+ parser.add_argument('--output', '-o', required=True, type=Path,
+ help='File to write the database to.')
+ group = parser.add_argument_group('Switches to define database layout'
+ '(currently no effect)')
+ group.add_argument('--reverse', action=WithAction, dest_set=self.options, default=True,
+ help='Enable/disable support for reverse and lookup API'
+ ' (default: enabled)')
+ group.add_argument('--search', action=WithAction, dest_set=self.options, default=False,
+ help='Enable/disable support for search API (default: disabled)')
+ group.add_argument('--details', action=WithAction, dest_set=self.options, default=True,
+ help='Enable/disable support for details API (default: enabled)')
+
+
+ def run(self, args: NominatimArgs) -> int:
+ if args.output.exists():
+ raise UsageError(f"File '{args.output}' already exists. Refusing to overwrite.")
+
+ if args.format == 'sqlite':
+ from ..tools import convert_sqlite
+
+ asyncio.run(convert_sqlite.convert(args.project_dir, args.output, self.options))
+ return 0
+
+ return 1
"""
Custom functions and expressions for SQLAlchemy.
"""
+from __future__ import annotations
from typing import Any
import sqlalchemy as sa
-from sqlalchemy.sql.expression import FunctionElement
from sqlalchemy.ext.compiler import compiles
from nominatim.typing import SaColumn
-# pylint: disable=abstract-method,missing-function-docstring,consider-using-f-string
+# pylint: disable=all
-def select_index_placex_geometry_reverse_lookuppolygon(table: str) -> 'sa.TextClause':
- """ Create an expression with the necessary conditions over a placex
- table that the index 'idx_placex_geometry_reverse_lookupPolygon'
- can be used.
+class PlacexGeometryReverseLookuppolygon(sa.sql.functions.GenericFunction[Any]):
+ """ Check for conditions that allow partial index use on
+ 'idx_placex_geometry_reverse_lookupPolygon'.
+
+ Needs to be constant, so that the query planner picks them up correctly
+ in prepared statements.
"""
- return sa.text(f"ST_GeometryType({table}.geometry) in ('ST_Polygon', 'ST_MultiPolygon')"
- f" AND {table}.rank_address between 4 and 25"
- f" AND {table}.type != 'postcode'"
- f" AND {table}.name is not null"
- f" AND {table}.indexed_status = 0"
- f" AND {table}.linked_place_id is null")
+ name = 'PlacexGeometryReverseLookuppolygon'
+ inherit_cache = True
+
+
+@compiles(PlacexGeometryReverseLookuppolygon) # type: ignore[no-untyped-call, misc]
+def _default_intersects(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return ("(ST_GeometryType(placex.geometry) in ('ST_Polygon', 'ST_MultiPolygon')"
+ " AND placex.rank_address between 4 and 25"
+ " AND placex.type != 'postcode'"
+ " AND placex.name is not null"
+ " AND placex.indexed_status = 0"
+ " AND placex.linked_place_id is null)")
+
+
+@compiles(PlacexGeometryReverseLookuppolygon, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _sqlite_intersects(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return ("(ST_GeometryType(placex.geometry) in ('POLYGON', 'MULTIPOLYGON')"
+ " AND placex.rank_address between 4 and 25"
+ " AND placex.type != 'postcode'"
+ " AND placex.name is not null"
+ " AND placex.indexed_status = 0"
+ " AND placex.linked_place_id is null)")
+
+
+class IntersectsReverseDistance(sa.sql.functions.GenericFunction[Any]):
+ name = 'IntersectsReverseDistance'
+ inherit_cache = True
+
+ def __init__(self, table: sa.Table, geom: SaColumn) -> None:
+ super().__init__(table.c.geometry, # type: ignore[no-untyped-call]
+ table.c.rank_search, geom)
+ self.tablename = table.name
+
+
+@compiles(IntersectsReverseDistance) # type: ignore[no-untyped-call, misc]
+def default_reverse_place_diameter(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ table = element.tablename
+ return f"({table}.rank_address between 4 and 25"\
+ f" AND {table}.type != 'postcode'"\
+ f" AND {table}.name is not null"\
+ f" AND {table}.linked_place_id is null"\
+ f" AND {table}.osm_type = 'N'" + \
+ " AND ST_Buffer(%s, reverse_place_diameter(%s)) && %s)" % \
+ tuple(map(lambda c: compiler.process(c, **kw), element.clauses))
+
+
+@compiles(IntersectsReverseDistance, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_reverse_place_diameter(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ geom1, rank, geom2 = list(element.clauses)
+ table = element.tablename
+
+ return (f"({table}.rank_address between 4 and 25"\
+ f" AND {table}.type != 'postcode'"\
+ f" AND {table}.name is not null"\
+ f" AND {table}.linked_place_id is null"\
+ f" AND {table}.osm_type = 'N'"\
+ " AND MbrIntersects(%s, ST_Expand(%s, 14.0 * exp(-0.2 * %s) - 0.03))"\
+ f" AND {table}.place_id IN"\
+ " (SELECT place_id FROM placex_place_node_areas"\
+ " WHERE ROWID IN (SELECT ROWID FROM SpatialIndex"\
+ " WHERE f_table_name = 'placex_place_node_areas'"\
+ " AND search_frame = %s)))") % (
+ compiler.process(geom1, **kw),
+ compiler.process(geom2, **kw),
+ compiler.process(rank, **kw),
+ compiler.process(geom2, **kw))
+
+
+class IsBelowReverseDistance(sa.sql.functions.GenericFunction[Any]):
+ name = 'IsBelowReverseDistance'
+ inherit_cache = True
+
+
+@compiles(IsBelowReverseDistance) # type: ignore[no-untyped-call, misc]
+def default_is_below_reverse_distance(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ dist, rank = list(element.clauses)
+ return "%s < reverse_place_diameter(%s)" % (compiler.process(dist, **kw),
+ compiler.process(rank, **kw))
+
+
+@compiles(IsBelowReverseDistance, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_is_below_reverse_distance(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ dist, rank = list(element.clauses)
+ return "%s < 14.0 * exp(-0.2 * %s) - 0.03" % (compiler.process(dist, **kw),
+ compiler.process(rank, **kw))
+
def select_index_placex_geometry_reverse_lookupplacenode(table: str) -> 'sa.TextClause':
""" Create an expression with the necessary conditions over a placex
f" AND {table}.osm_type = 'N'")
-class CrosscheckNames(FunctionElement[Any]):
+class IsAddressPoint(sa.sql.functions.GenericFunction[Any]):
+ name = 'IsAddressPoint'
+ inherit_cache = True
+
+ def __init__(self, table: sa.Table) -> None:
+ super().__init__(table.c.rank_address, # type: ignore[no-untyped-call]
+ table.c.housenumber, table.c.name)
+
+
+@compiles(IsAddressPoint) # type: ignore[no-untyped-call, misc]
+def default_is_address_point(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ rank, hnr, name = list(element.clauses)
+ return "(%s = 30 AND (%s IS NOT NULL OR %s ? 'addr:housename'))" % (
+ compiler.process(rank, **kw),
+ compiler.process(hnr, **kw),
+ compiler.process(name, **kw))
+
+
+@compiles(IsAddressPoint, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_is_address_point(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ rank, hnr, name = list(element.clauses)
+ return "(%s = 30 AND coalesce(%s, json_extract(%s, '$.addr:housename')) IS NOT NULL)" % (
+ compiler.process(rank, **kw),
+ compiler.process(hnr, **kw),
+ compiler.process(name, **kw))
+
+
+class CrosscheckNames(sa.sql.functions.GenericFunction[Any]):
""" Check if in the given list of names in parameters 1 any of the names
from the JSON array in parameter 2 are contained.
"""
arg1, arg2 = list(element.clauses)
return "coalesce(avals(%s) && ARRAY(SELECT * FROM json_array_elements_text(%s)), false)" % (
compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(CrosscheckNames, 'sqlite') # type: ignore[no-untyped-call, misc]
+def compile_sqlite_crosscheck_names(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "EXISTS(SELECT *"\
+ " FROM json_each(%s) as name, json_each(%s) as match_name"\
+ " WHERE name.value = match_name.value)"\
+ % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+class JsonArrayEach(sa.sql.functions.GenericFunction[Any]):
+ """ Return elements of a json array as a set.
+ """
+ name = 'JsonArrayEach'
+ inherit_cache = True
+
+
+@compiles(JsonArrayEach) # type: ignore[no-untyped-call, misc]
+def default_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "json_array_elements(%s)" % compiler.process(element.clauses, **kw)
+
+
+@compiles(JsonArrayEach, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "json_each(%s)" % compiler.process(element.clauses, **kw)
+
+
+class Greatest(sa.sql.functions.GenericFunction[Any]):
+ """ Function to compute maximum of all its input parameters.
+ """
+ name = 'greatest'
+ inherit_cache = True
+
+
+@compiles(Greatest, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_greatest(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "max(%s)" % compiler.process(element.clauses, **kw)
from sqlalchemy.dialects.postgresql import HSTORE, ARRAY, JSONB, array
from sqlalchemy.dialects.sqlite import JSON as sqlite_json
+import nominatim.db.sqlalchemy_functions #pylint: disable=unused-import
from nominatim.db.sqlalchemy_types import Geometry
class PostgresTypes:
#pylint: disable=too-many-instance-attributes
class SearchTables:
""" Data class that holds the tables of the Nominatim database.
+
+ This schema strictly reflects the read-access view of the database.
+ Any data used for updates only will not be visible.
"""
def __init__(self, meta: sa.MetaData, engine_name: str) -> None:
sa.Column('value', sa.Text))
self.placex = sa.Table('placex', meta,
- sa.Column('place_id', sa.BigInteger, nullable=False, unique=True),
+ sa.Column('place_id', sa.BigInteger, nullable=False),
sa.Column('parent_place_id', sa.BigInteger),
sa.Column('linked_place_id', sa.BigInteger),
sa.Column('importance', sa.Float),
sa.Column('indexed_date', sa.DateTime),
sa.Column('rank_address', sa.SmallInteger),
sa.Column('rank_search', sa.SmallInteger),
- sa.Column('partition', sa.SmallInteger),
sa.Column('indexed_status', sa.SmallInteger),
sa.Column('osm_type', sa.String(1), nullable=False),
sa.Column('osm_id', sa.BigInteger, nullable=False),
sa.Column('centroid', Geometry))
self.addressline = sa.Table('place_addressline', meta,
- sa.Column('place_id', sa.BigInteger, index=True),
- sa.Column('address_place_id', sa.BigInteger, index=True),
+ sa.Column('place_id', sa.BigInteger),
+ sa.Column('address_place_id', sa.BigInteger),
sa.Column('distance', sa.Float),
- sa.Column('cached_rank_address', sa.SmallInteger),
sa.Column('fromarea', sa.Boolean),
sa.Column('isaddress', sa.Boolean))
self.postcode = sa.Table('location_postcode', meta,
- sa.Column('place_id', sa.BigInteger, unique=True),
+ sa.Column('place_id', sa.BigInteger),
sa.Column('parent_place_id', sa.BigInteger),
sa.Column('rank_search', sa.SmallInteger),
sa.Column('rank_address', sa.SmallInteger),
sa.Column('indexed_status', sa.SmallInteger),
sa.Column('indexed_date', sa.DateTime),
sa.Column('country_code', sa.String(2)),
- sa.Column('postcode', sa.Text, index=True),
+ sa.Column('postcode', sa.Text),
sa.Column('geometry', Geometry))
self.osmline = sa.Table('location_property_osmline', meta,
- sa.Column('place_id', sa.BigInteger, nullable=False, unique=True),
+ sa.Column('place_id', sa.BigInteger, nullable=False),
sa.Column('osm_id', sa.BigInteger),
sa.Column('parent_place_id', sa.BigInteger),
sa.Column('indexed_date', sa.DateTime),
sa.Column('startnumber', sa.Integer),
sa.Column('endnumber', sa.Integer),
sa.Column('step', sa.SmallInteger),
- sa.Column('partition', sa.SmallInteger),
sa.Column('indexed_status', sa.SmallInteger),
sa.Column('linegeo', Geometry),
sa.Column('address', self.types.Composite),
sa.Column('country_code', sa.String(2)),
sa.Column('name', self.types.Composite),
sa.Column('derived_name', self.types.Composite),
- sa.Column('country_default_language_code', sa.Text),
sa.Column('partition', sa.Integer))
self.country_grid = sa.Table('country_osm_grid', meta,
# The following tables are not necessarily present.
self.search_name = sa.Table('search_name', meta,
- sa.Column('place_id', sa.BigInteger, index=True),
+ sa.Column('place_id', sa.BigInteger),
sa.Column('importance', sa.Float),
sa.Column('search_rank', sa.SmallInteger),
sa.Column('address_rank', sa.SmallInteger),
- sa.Column('name_vector', self.types.IntArray, index=True),
- sa.Column('nameaddress_vector', self.types.IntArray, index=True),
+ sa.Column('name_vector', self.types.IntArray),
+ sa.Column('nameaddress_vector', self.types.IntArray),
sa.Column('country_code', sa.String(2)),
sa.Column('centroid', Geometry))
sa.Column('startnumber', sa.Integer),
sa.Column('endnumber', sa.Integer),
sa.Column('step', sa.SmallInteger),
- sa.Column('partition', sa.SmallInteger),
sa.Column('linegeo', Geometry),
sa.Column('postcode', sa.Text))
"""
Custom types for SQLAlchemy.
"""
+from __future__ import annotations
from typing import Callable, Any, cast
import sys
import sqlalchemy as sa
+from sqlalchemy.ext.compiler import compiles
from sqlalchemy import types
from nominatim.typing import SaColumn, SaBind
#pylint: disable=all
+class Geometry_DistanceSpheroid(sa.sql.expression.FunctionElement[float]):
+ """ Function to compute the spherical distance in meters.
+ """
+ type = sa.Float()
+ name = 'Geometry_DistanceSpheroid'
+ inherit_cache = True
+
+
+@compiles(Geometry_DistanceSpheroid) # type: ignore[no-untyped-call, misc]
+def _default_distance_spheroid(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_DistanceSpheroid(%s,"\
+ " 'SPHEROID[\"WGS 84\",6378137,298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]')"\
+ % compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_DistanceSpheroid, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _spatialite_distance_spheroid(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "COALESCE(Distance(%s, true), 0.0)" % compiler.process(element.clauses, **kw)
+
+
+class Geometry_IsLineLike(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the geometry is a line or multiline.
+ """
+ name = 'Geometry_IsLineLike'
+ inherit_cache = True
+
+
+@compiles(Geometry_IsLineLike) # type: ignore[no-untyped-call, misc]
+def _default_is_line_like(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_GeometryType(%s) IN ('ST_LineString', 'ST_MultiLineString')" % \
+ compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_IsLineLike, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _sqlite_is_line_like(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_GeometryType(%s) IN ('LINESTRING', 'MULTILINESTRING')" % \
+ compiler.process(element.clauses, **kw)
+
+
+class Geometry_IsAreaLike(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the geometry is a polygon or multipolygon.
+ """
+ name = 'Geometry_IsLineLike'
+ inherit_cache = True
+
+
+@compiles(Geometry_IsAreaLike) # type: ignore[no-untyped-call, misc]
+def _default_is_area_like(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_GeometryType(%s) IN ('ST_Polygon', 'ST_MultiPolygon')" % \
+ compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_IsAreaLike, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _sqlite_is_area_like(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_GeometryType(%s) IN ('POLYGON', 'MULTIPOLYGON')" % \
+ compiler.process(element.clauses, **kw)
+
+
+class Geometry_IntersectsBbox(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the bounding boxes of the given geometries intersect.
+ """
+ name = 'Geometry_IntersectsBbox'
+ inherit_cache = True
+
+
+@compiles(Geometry_IntersectsBbox) # type: ignore[no-untyped-call, misc]
+def _default_intersects(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "%s && %s" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(Geometry_IntersectsBbox, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _sqlite_intersects(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "MbrIntersects(%s) = 1" % compiler.process(element.clauses, **kw)
+
+
+class Geometry_ColumnIntersectsBbox(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the bounding box of the geometry intersects with the
+ given table column, using the spatial index for the column.
+
+ The index must exist or the query may return nothing.
+ """
+ name = 'Geometry_ColumnIntersectsBbox'
+ inherit_cache = True
+
+
+@compiles(Geometry_ColumnIntersectsBbox) # type: ignore[no-untyped-call, misc]
+def default_intersects_column(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "%s && %s" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(Geometry_ColumnIntersectsBbox, 'sqlite') # type: ignore[no-untyped-call, misc]
+def spatialite_intersects_column(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ arg1, arg2 = list(element.clauses)
+ return "MbrIntersects(%s, %s) = 1 and "\
+ "%s.ROWID IN (SELECT ROWID FROM SpatialIndex "\
+ "WHERE f_table_name = '%s' AND f_geometry_column = '%s' "\
+ "AND search_frame = %s)" %(
+ compiler.process(arg1, **kw),
+ compiler.process(arg2, **kw),
+ arg1.table.name, arg1.table.name, arg1.name,
+ compiler.process(arg2, **kw))
+
+
+class Geometry_ColumnDWithin(sa.sql.expression.FunctionElement[Any]):
+ """ Check if the geometry is within the distance of the
+ given table column, using the spatial index for the column.
+
+ The index must exist or the query may return nothing.
+ """
+ name = 'Geometry_ColumnDWithin'
+ inherit_cache = True
+
+
+@compiles(Geometry_ColumnDWithin) # type: ignore[no-untyped-call, misc]
+def default_dwithin_column(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ return "ST_DWithin(%s)" % compiler.process(element.clauses, **kw)
+
+@compiles(Geometry_ColumnDWithin, 'sqlite') # type: ignore[no-untyped-call, misc]
+def spatialite_dwithin_column(element: SaColumn,
+ compiler: 'sa.Compiled', **kw: Any) -> str:
+ geom1, geom2, dist = list(element.clauses)
+ return "ST_Distance(%s, %s) < %s and "\
+ "%s.ROWID IN (SELECT ROWID FROM SpatialIndex "\
+ "WHERE f_table_name = '%s' AND f_geometry_column = '%s' "\
+ "AND search_frame = ST_Expand(%s, %s))" %(
+ compiler.process(geom1, **kw),
+ compiler.process(geom2, **kw),
+ compiler.process(dist, **kw),
+ geom1.table.name, geom1.table.name, geom1.name,
+ compiler.process(geom2, **kw),
+ compiler.process(dist, **kw))
+
+
+
class Geometry(types.UserDefinedType): # type: ignore[type-arg]
""" Simplified type decorator for PostGIS geometry. This type
only supports geometries in 4326 projection.
return process
+ def column_expression(self, col: SaColumn) -> SaColumn:
+ return sa.func.ST_AsEWKB(col)
+
+
def bind_expression(self, bindvalue: SaBind) -> SaColumn:
return sa.func.ST_GeomFromText(bindvalue, sa.text('4326'), type_=self)
class comparator_factory(types.UserDefinedType.Comparator): # type: ignore[type-arg]
def intersects(self, other: SaColumn) -> 'sa.Operators':
- return self.op('&&')(other)
+ if isinstance(self.expr, sa.Column):
+ return Geometry_ColumnIntersectsBbox(self.expr, other)
+
+ return Geometry_IntersectsBbox(self.expr, other)
+
def is_line_like(self) -> SaColumn:
- return sa.func.ST_GeometryType(self, type_=sa.String).in_(('ST_LineString',
- 'ST_MultiLineString'))
+ return Geometry_IsLineLike(self)
+
def is_area(self) -> SaColumn:
- return sa.func.ST_GeometryType(self, type_=sa.String).in_(('ST_Polygon',
- 'ST_MultiPolygon'))
+ return Geometry_IsAreaLike(self)
def ST_DWithin(self, other: SaColumn, distance: SaColumn) -> SaColumn:
- return sa.func.ST_DWithin(self, other, distance, type_=sa.Boolean)
+ if isinstance(self.expr, sa.Column):
+ return Geometry_ColumnDWithin(self.expr, other, distance)
+
+ return sa.func.ST_DWithin(self.expr, other, distance)
def ST_DWithin_no_index(self, other: SaColumn, distance: SaColumn) -> SaColumn:
return sa.func.ST_DWithin(sa.func.coalesce(sa.null(), self),
- other, distance, type_=sa.Boolean)
+ other, distance)
def ST_Intersects_no_index(self, other: SaColumn) -> 'sa.Operators':
- return sa.func.coalesce(sa.null(), self).op('&&')(other)
+ return Geometry_IntersectsBbox(sa.func.coalesce(sa.null(), self), other)
def ST_Distance(self, other: SaColumn) -> SaColumn:
def ST_ClosestPoint(self, other: SaColumn) -> SaColumn:
- return sa.func.ST_ClosestPoint(self, other, type_=Geometry)
+ return sa.func.coalesce(sa.func.ST_ClosestPoint(self, other, type_=Geometry),
+ other)
def ST_Buffer(self, other: SaColumn) -> SaColumn:
def ST_LineLocatePoint(self, other: SaColumn) -> SaColumn:
return sa.func.ST_LineLocatePoint(self, other, type_=sa.Float)
+
+
+ def distance_spheroid(self, other: SaColumn) -> SaColumn:
+ return Geometry_DistanceSpheroid(self, other)
+
+
+@compiles(Geometry, 'sqlite') # type: ignore[no-untyped-call]
+def get_col_spec(self, *args, **kwargs): # type: ignore[no-untyped-def]
+ return 'GEOMETRY'
+
+
+SQLITE_FUNCTION_ALIAS = (
+ ('ST_AsEWKB', sa.Text, 'AsEWKB'),
+ ('ST_GeomFromEWKT', Geometry, 'GeomFromEWKT'),
+ ('ST_AsGeoJSON', sa.Text, 'AsGeoJSON'),
+ ('ST_AsKML', sa.Text, 'AsKML'),
+ ('ST_AsSVG', sa.Text, 'AsSVG'),
+ ('ST_LineLocatePoint', sa.Float, 'ST_Line_Locate_Point'),
+ ('ST_LineInterpolatePoint', sa.Float, 'ST_Line_Interpolate_Point'),
+)
+
+def _add_function_alias(func: str, ftype: type, alias: str) -> None:
+ _FuncDef = type(func, (sa.sql.functions.GenericFunction, ), {
+ "type": ftype(),
+ "name": func,
+ "identifier": func,
+ "inherit_cache": True})
+
+ func_templ = f"{alias}(%s)"
+
+ def _sqlite_impl(element: Any, compiler: Any, **kw: Any) -> Any:
+ return func_templ % compiler.process(element.clauses, **kw)
+
+ compiles(_FuncDef, 'sqlite')(_sqlite_impl) # type: ignore[no-untyped-call]
+
+for alias in SQLITE_FUNCTION_ALIAS:
+ _add_function_alias(*alias)
+
+
+class ST_DWithin(sa.sql.functions.GenericFunction[Any]):
+ name = 'ST_DWithin'
+ inherit_cache = True
+
+
+@compiles(ST_DWithin, 'sqlite') # type: ignore[no-untyped-call, misc]
+def default_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
+ geom1, geom2, dist = list(element.clauses)
+ return "(MbrIntersects(%s, ST_Expand(%s, %s)) = 1 AND ST_Distance(%s, %s) <= %s)" % (
+ compiler.process(geom1, **kw), compiler.process(geom2, **kw),
+ compiler.process(dist, **kw),
+ compiler.process(geom1, **kw), compiler.process(geom2, **kw),
+ compiler.process(dist, **kw))
--- /dev/null
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2023 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Exporting a Nominatim database to SQlite.
+"""
+from typing import Set
+import logging
+from pathlib import Path
+
+import sqlalchemy as sa
+
+from nominatim.typing import SaSelect
+from nominatim.db.sqlalchemy_types import Geometry
+import nominatim.api as napi
+
+LOG = logging.getLogger()
+
+async def convert(project_dir: Path, outfile: Path, options: Set[str]) -> None:
+ """ Export an existing database to sqlite. The resulting database
+ will be usable against the Python frontend of Nominatim.
+ """
+ api = napi.NominatimAPIAsync(project_dir)
+
+ try:
+ outapi = napi.NominatimAPIAsync(project_dir,
+ {'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={outfile}"})
+
+ async with api.begin() as src, outapi.begin() as dest:
+ writer = SqliteWriter(src, dest, options)
+ await writer.write()
+ finally:
+ await api.close()
+
+
+class SqliteWriter:
+ """ Worker class which creates a new SQLite database.
+ """
+
+ def __init__(self, src: napi.SearchConnection,
+ dest: napi.SearchConnection, options: Set[str]) -> None:
+ self.src = src
+ self.dest = dest
+ self.options = options
+
+
+ async def write(self) -> None:
+ """ Create the database structure and copy the data from
+ the source database to the destination.
+ """
+ await self.dest.execute(sa.select(sa.func.InitSpatialMetaData(True, 'WGS84')))
+
+ await self.create_tables()
+ await self.copy_data()
+ await self.create_indexes()
+
+
+ async def create_tables(self) -> None:
+ """ Set up the database tables.
+ """
+ if 'search' not in self.options:
+ self.dest.t.meta.remove(self.dest.t.search_name)
+
+ await self.dest.connection.run_sync(self.dest.t.meta.create_all)
+
+ # Convert all Geometry columns to Spatialite geometries
+ for table in self.dest.t.meta.sorted_tables:
+ for col in table.c:
+ if isinstance(col.type, Geometry):
+ await self.dest.execute(sa.select(
+ sa.func.RecoverGeometryColumn(table.name, col.name, 4326,
+ col.type.subtype.upper(), 'XY')))
+
+
+ async def copy_data(self) -> None:
+ """ Copy data for all registered tables.
+ """
+ for table in self.dest.t.meta.sorted_tables:
+ LOG.warning("Copying '%s'", table.name)
+ async_result = await self.src.connection.stream(self.select_from(table.name))
+
+ async for partition in async_result.partitions(10000):
+ data = [{('class_' if k == 'class' else k): getattr(r, k) for k in r._fields}
+ for r in partition]
+ await self.dest.execute(table.insert(), data)
+
+
+ async def create_indexes(self) -> None:
+ """ Add indexes necessary for the frontend.
+ """
+ # reverse place node lookup needs an extra table to simulate a
+ # partial index with adaptive buffering.
+ await self.dest.execute(sa.text(
+ """ CREATE TABLE placex_place_node_areas AS
+ SELECT place_id, ST_Expand(geometry,
+ 14.0 * exp(-0.2 * rank_search) - 0.03) as geometry
+ FROM placex
+ WHERE rank_address between 5 and 25
+ and osm_type = 'N'
+ and linked_place_id is NULL """))
+ await self.dest.execute(sa.select(
+ sa.func.RecoverGeometryColumn('placex_place_node_areas', 'geometry',
+ 4326, 'GEOMETRY', 'XY')))
+ await self.dest.execute(sa.select(sa.func.CreateSpatialIndex(
+ 'placex_place_node_areas', 'geometry')))
+
+ # Remaining indexes.
+ await self.create_spatial_index('country_grid', 'geometry')
+ await self.create_spatial_index('placex', 'geometry')
+ await self.create_spatial_index('osmline', 'linegeo')
+ await self.create_spatial_index('tiger', 'linegeo')
+ await self.create_index('placex', 'place_id')
+ await self.create_index('placex', 'parent_place_id')
+ await self.create_index('placex', 'rank_address')
+ await self.create_index('addressline', 'place_id')
+
+
+ async def create_spatial_index(self, table: str, column: str) -> None:
+ """ Create a spatial index on the given table and column.
+ """
+ await self.dest.execute(sa.select(
+ sa.func.CreateSpatialIndex(getattr(self.dest.t, table).name, column)))
+
+
+ async def create_index(self, table_name: str, column: str) -> None:
+ """ Create a simple index on the given table and column.
+ """
+ table = getattr(self.dest.t, table_name)
+ await self.dest.connection.run_sync(
+ sa.Index(f"idx_{table}_{column}", getattr(table.c, column)).create)
+
+
+ def select_from(self, table: str) -> SaSelect:
+ """ Create the SQL statement to select the source columns and rows.
+ """
+ columns = self.src.t.meta.tables[table].c
+
+ if table == 'placex':
+ # SQLite struggles with Geometries that are larger than 5MB,
+ # so simplify those.
+ return sa.select(*(c for c in columns if not isinstance(c.type, Geometry)),
+ sa.func.ST_AsText(columns.centroid).label('centroid'),
+ sa.func.ST_AsText(
+ sa.case((sa.func.ST_MemSize(columns.geometry) < 5000000,
+ columns.geometry),
+ else_=sa.func.ST_SimplifyPreserveTopology(
+ columns.geometry, 0.0001)
+ )).label('geometry'))
+
+ sql = sa.select(*(sa.func.ST_AsText(c).label(c.name)
+ if isinstance(c.type, Geometry) else c for c in columns))
+
+ return sql
flex.set_main_tags{
boundary = {administrative = 'named'},
- landuse = 'fallback',
- place = 'always'
+ landuse = {residential = 'fallback',
+ farm = 'fallback',
+ farmyard = 'fallback',
+ industrial = 'fallback',
+ commercial = 'fallback',
+ allotments = 'fallback',
+ retail = 'fallback'},
+ place = {county = 'always',
+ district = 'always',
+ municipality = 'always',
+ city = 'always',
+ town = 'always',
+ borough = 'always',
+ village = 'always',
+ suburb = 'always',
+ hamlet = 'always',
+ croft = 'always',
+ subdivision = 'always',
+ allotments = 'always',
+ neighbourhood = 'always',
+ quarter = 'always',
+ isolated_dwelling = 'always',
+ farm = 'always',
+ city_block = 'always',
+ mountain_pass = 'always',
+ square = 'always',
+ locality = 'always'}
}
flex.set_prefilters{delete_keys = {'building', 'source', 'highway',
'addr:housenumber', 'addr:street', 'addr:city',
+ 'addr:interpolation',
'source', '*source', 'type',
'is_in:postcode', '*:wikidata', '*:wikipedia',
'*:prefix', '*:suffix', 'name:prefix:*', 'name:suffix:*',
+@SQLITE
@APIDB
Feature: Localization of search results
Feature: Object details
Testing different parameter options for details API.
+ @SQLITE
Scenario: JSON Details
When sending json details query for W297699560
Then the result is valid json
| type |
| Point |
+ @SQLITE
Scenario: JSON Details with pretty printing
When sending json details query for W297699560
| pretty |
And result has attributes geometry
And result has not attributes keywords,address,linked_places,parentof
+ @SQLITE
Scenario: JSON Details with addressdetails
When sending json details query for W297699560
| addressdetails |
Then the result is valid json
And result has attributes address
+ @SQLITE
Scenario: JSON Details with linkedplaces
When sending json details query for R123924
| linkedplaces |
Then the result is valid json
And result has attributes linked_places
+ @SQLITE
Scenario: JSON Details with hierarchy
When sending json details query for W297699560
| hierarchy |
Then the result is valid json
And result has attributes hierarchy
+ @SQLITE
Scenario: JSON Details with grouped hierarchy
When sending json details query for W297699560
| hierarchy | group_hierarchy |
Then the result is valid json
And result has attributes keywords
+ @SQLITE
Scenario Outline: JSON details with full geometry
When sending json details query for <osmid>
| polygon_geojson |
+@SQLITE
@APIDB
Feature: Object details
Check details page for correctness
+@SQLITE
@APIDB
Feature: Places by osm_type and osm_id Tests
Simple tests for errors in various response formats.
+@SQLITE
@APIDB
Feature: Places by osm_type and osm_id Tests
Simple tests for response format.
+@SQLITE
@APIDB
Feature: Geometries for reverse geocoding
Tests for returning geometries with reverse
| 1 |
Then results contain
| geotext |
- | POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5226142 47.1379294,9.5226143 47.1379257,9.522615 47.137917,9.5226225 47.1379098,9.5226334 47.1379052,9.5226461 47.1379037,9.5226588 47.1379056,9.5226693 47.1379107,9.5226762 47.1379181,9.5226762 47.1379268,9.5226761 47.1379308,9.5227366 47.1379317,9.5227352 47.1379753,9.5227608 47.1379757,9.5227595 47.1380148,9.5227355 47.1380145,9.5227337 47.1380692,9.5225302 47.138066)) |
+ | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5226142 47.1379294, ?9.5226143 47.1379257, ?9.522615 47.137917, ?9.5226225 47.1379098, ?9.5226334 47.1379052, ?9.5226461 47.1379037, ?9.5226588 47.1379056, ?9.5226693 47.1379107, ?9.5226762 47.1379181, ?9.5226762 47.1379268, ?9.5226761 47.1379308, ?9.5227366 47.1379317, ?9.5227352 47.1379753, ?9.5227608 47.1379757, ?9.5227595 47.1380148, ?9.5227355 47.1380145, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
Scenario: Polygons can be slightly simplified
| 1 | 0.00001 |
Then results contain
| geotext |
- | POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5226142 47.1379294,9.5226225 47.1379098,9.5226588 47.1379056,9.5226761 47.1379308,9.5227366 47.1379317,9.5227352 47.1379753,9.5227608 47.1379757,9.5227595 47.1380148,9.5227355 47.1380145,9.5227337 47.1380692,9.5225302 47.138066)) |
+ | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5226142 47.1379294, ?9.5226225 47.1379098, ?9.5226588 47.1379056, ?9.5226761 47.1379308, ?9.5227366 47.1379317, ?9.5227352 47.1379753, ?9.5227608 47.1379757, ?9.5227595 47.1380148, ?9.5227355 47.1380145, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
Scenario: Polygons can be much simplified
| 1 | 0.9 |
Then results contain
| geotext |
- | POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5227608 47.1379757,9.5227337 47.1380692,9.5225302 47.138066)) |
+ | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5227608 47.1379757, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
Scenario: For polygons return the centroid as center point
+@SQLITE
@APIDB
Feature: Localization of reverse search results
+@SQLITE
@APIDB
Feature: Layer parameter in reverse geocoding
Testing correct function of layer selection while reverse geocoding
@v1-api-python-only
Scenario Outline: Search for mountain peaks begins at level 12
- When sending v1/reverse at 47.08221,9.56769
+ When sending v1/reverse at 47.08293,9.57109
| layer | zoom |
| natural | <zoom> |
Then results contain
@v1-api-python-only
- Scenario Outline: Reverse serach with manmade layers
+ Scenario Outline: Reverse search with manmade layers
When sending v1/reverse at 32.46904,-86.44439
| layer |
| <layer> |
| manmade | leisure | park |
| address | highway | residential |
| poi | leisure | pitch |
- | natural | waterway | stream |
+ | natural | waterway | river |
| natural,manmade | leisure | park |
+@SQLITE
@APIDB
Feature: Reverse geocoding
Testing the reverse function
+@SQLITE
@APIDB
Feature: Geocodejson for Reverse API
Testing correctness of geocodejson output (API version v1).
+@SQLITE
@APIDB
Feature: Geojson for Reverse API
Testing correctness of geojson output (API version v1).
+@SQLITE
@APIDB
Feature: Json output for Reverse API
Testing correctness of json and jsonv2 output (API version v1).
| polygon_text | 1 |
Then results contain
| geotext |
- | LINESTRING(9.5039353 47.0657546,9.5040437 47.0657781,9.5040808 47.065787,9.5054298 47.0661407) |
+ | ^LINESTRING\(9.5039353 47.0657546, ?9.5040437 47.0657781, ?9.5040808 47.065787, ?9.5054298 47.0661407\) |
Examples:
| format |
+@SQLITE
@APIDB
Feature: v1/reverse Parameter Tests
Tests for parameter inputs for the v1 reverse endpoint.
+@SQLITE
@APIDB
Feature: XML output for Reverse API
Testing correctness of xml output (API version v1).
| polygon_text | 1 |
Then results contain
| geotext |
- | LINESTRING(9.5039353 47.0657546,9.5040437 47.0657781,9.5040808 47.065787,9.5054298 47.0661407) |
+ | ^LINESTRING\(9.5039353 47.0657546, ?9.5040437 47.0657781, ?9.5040808 47.065787, ?9.5054298 47.0661407\) |
Scenario: Output of SVG
+@SQLITE
@APIDB
Feature: Status queries
Testing status query
def before_scenario(context, scenario):
- if 'DB' in context.tags:
+ if not 'SQLITE' in context.tags \
+ and context.config.userdata['API_TEST_DB'].startswith('sqlite:'):
+ context.scenario.skip("Not usable with Sqlite database.")
+ elif 'DB' in context.tags:
context.nominatim.setup_db(context)
elif 'APIDB' in context.tags:
context.nominatim.setup_api_db()
be picked up by dotenv and creates a project directory with the
appropriate website scripts.
"""
- dsn = 'pgsql:dbname={}'.format(dbname)
+ if dbname.startswith('sqlite:'):
+ dsn = 'sqlite:dbname={}'.format(dbname[7:])
+ else:
+ dsn = 'pgsql:dbname={}'.format(dbname)
if self.db_host:
dsn += ';host=' + self.db_host
if self.db_port:
"""
self.write_nominatim_config(self.api_test_db)
+ if self.api_test_db.startswith('sqlite:'):
+ return
+
if not self.api_db_done:
self.api_db_done = True
import nominatim.api as napi
from nominatim.db.sql_preprocessor import SQLPreprocessor
+from nominatim.tools import convert_sqlite
import nominatim.api.logging as loglib
class APITester:
testapi.async_to_sync(testapi.create_tables())
proc = SQLPreprocessor(temp_db_conn, testapi.api.config)
- proc.run_sql_file(temp_db_conn, 'functions/address_lookup.sql')
proc.run_sql_file(temp_db_conn, 'functions/ranking.sql')
loglib.set_log_output('text')
print(loglib.get_and_disable())
testapi.api.close()
+
+
+@pytest.fixture(params=['postgres_db', 'sqlite_db'])
+def frontend(request, event_loop, tmp_path):
+ if request.param == 'sqlite_db':
+ db = str(tmp_path / 'test_nominatim_python_unittest.sqlite')
+
+ def mkapi(apiobj, options={'reverse'}):
+ event_loop.run_until_complete(convert_sqlite.convert(Path('/invalid'),
+ db, options))
+ return napi.NominatimAPI(Path('/invalid'),
+ {'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={db}",
+ 'NOMINATIM_USE_US_TIGER_DATA': 'yes'})
+ elif request.param == 'postgres_db':
+ def mkapi(apiobj, options=None):
+ return apiobj.api
+
+ return mkapi
@pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4),
napi.OsmID('W', 4, 'highway')))
-def test_lookup_in_placex(apiobj, idobj):
+def test_lookup_in_placex(apiobj, frontend, idobj):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential',
indexed_date=import_date,
geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
- result = apiobj.api.details(idobj)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(idobj)
assert result is not None
assert result.geometry == {'type': 'ST_LineString'}
-def test_lookup_in_placex_minimal_info(apiobj):
+def test_lookup_in_placex_minimal_info(apiobj, frontend):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential',
indexed_date=import_date,
geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
- result = apiobj.api.details(napi.PlaceID(332))
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332))
assert result is not None
assert result.geometry == {'type': 'ST_LineString'}
-def test_lookup_in_placex_with_geometry(apiobj):
+def test_lookup_in_placex_with_geometry(apiobj, frontend):
apiobj.add_placex(place_id=332,
geometry='LINESTRING(23 34, 23.1 34)')
- result = apiobj.api.details(napi.PlaceID(332), geometry_output=napi.GeometryFormat.GEOJSON)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), geometry_output=napi.GeometryFormat.GEOJSON)
assert result.geometry == {'geojson': '{"type":"LineString","coordinates":[[23,34],[23.1,34]]}'}
-def test_lookup_placex_with_address_details(apiobj):
+def test_lookup_placex_with_address_details(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street',
country_code='pl',
country_code='pl',
rank_search=17, rank_address=16)
- result = apiobj.api.details(napi.PlaceID(332), address_details=True)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), address_details=True)
assert result.address_rows == [
napi.AddressLine(place_id=332, osm_object=('W', 4),
]
-def test_lookup_place_with_linked_places_none_existing(apiobj):
+def test_lookup_place_with_linked_places_none_existing(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street',
country_code='pl', linked_place_id=45,
rank_search=27, rank_address=26)
- result = apiobj.api.details(napi.PlaceID(332), linked_places=True)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), linked_places=True)
assert result.linked_rows == []
-def test_lookup_place_with_linked_places_existing(apiobj):
+def test_lookup_place_with_linked_places_existing(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street',
country_code='pl', linked_place_id=45,
country_code='pl', linked_place_id=332,
rank_search=27, rank_address=26)
- result = apiobj.api.details(napi.PlaceID(332), linked_places=True)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), linked_places=True)
assert result.linked_rows == [
napi.AddressLine(place_id=1001, osm_object=('W', 5),
]
-def test_lookup_place_with_parented_places_not_existing(apiobj):
+def test_lookup_place_with_parented_places_not_existing(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street',
country_code='pl', parent_place_id=45,
rank_search=27, rank_address=26)
- result = apiobj.api.details(napi.PlaceID(332), parented_places=True)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), parented_places=True)
assert result.parented_rows == []
-def test_lookup_place_with_parented_places_existing(apiobj):
+def test_lookup_place_with_parented_places_existing(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street',
country_code='pl', parent_place_id=45,
country_code='pl', parent_place_id=332,
rank_search=27, rank_address=26)
- result = apiobj.api.details(napi.PlaceID(332), parented_places=True)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(332), parented_places=True)
assert result.parented_rows == [
napi.AddressLine(place_id=1001, osm_object=('N', 5),
@pytest.mark.parametrize('idobj', (napi.PlaceID(4924), napi.OsmID('W', 9928)))
-def test_lookup_in_osmline(apiobj, idobj):
+def test_lookup_in_osmline(apiobj, frontend, idobj):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
apiobj.add_osmline(place_id=4924, osm_id=9928,
parent_place_id=12,
indexed_date=import_date,
geometry='LINESTRING(23 34, 23 35)')
- result = apiobj.api.details(idobj)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(idobj)
assert result is not None
assert result.geometry == {'type': 'ST_LineString'}
-def test_lookup_in_osmline_split_interpolation(apiobj):
+def test_lookup_in_osmline_split_interpolation(apiobj, frontend):
apiobj.add_osmline(place_id=1000, osm_id=9,
startnumber=2, endnumber=4, step=1)
apiobj.add_osmline(place_id=1001, osm_id=9,
apiobj.add_osmline(place_id=1002, osm_id=9,
startnumber=11, endnumber=20, step=1)
+ api = frontend(apiobj, options={'details'})
for i in range(1, 6):
- result = apiobj.api.details(napi.OsmID('W', 9, str(i)))
+ result = api.details(napi.OsmID('W', 9, str(i)))
assert result.place_id == 1000
for i in range(7, 11):
- result = apiobj.api.details(napi.OsmID('W', 9, str(i)))
+ result = api.details(napi.OsmID('W', 9, str(i)))
assert result.place_id == 1001
for i in range(12, 22):
- result = apiobj.api.details(napi.OsmID('W', 9, str(i)))
+ result = api.details(napi.OsmID('W', 9, str(i)))
assert result.place_id == 1002
-def test_lookup_osmline_with_address_details(apiobj):
+def test_lookup_osmline_with_address_details(apiobj, frontend):
apiobj.add_osmline(place_id=9000, osm_id=9,
startnumber=2, endnumber=4, step=1,
parent_place_id=332)
country_code='pl',
rank_search=17, rank_address=16)
- result = apiobj.api.details(napi.PlaceID(9000), address_details=True)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(9000), address_details=True)
assert result.address_rows == [
napi.AddressLine(place_id=332, osm_object=('W', 4),
]
-def test_lookup_in_tiger(apiobj):
+def test_lookup_in_tiger(apiobj, frontend):
apiobj.add_tiger(place_id=4924,
parent_place_id=12,
startnumber=1, endnumber=4, step=1,
osm_type='W', osm_id=6601223,
geometry='LINESTRING(23 34, 23 35)')
- result = apiobj.api.details(napi.PlaceID(4924))
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(4924))
assert result is not None
assert result.geometry == {'type': 'ST_LineString'}
-def test_lookup_tiger_with_address_details(apiobj):
+def test_lookup_tiger_with_address_details(apiobj, frontend):
apiobj.add_tiger(place_id=9000,
startnumber=2, endnumber=4, step=1,
parent_place_id=332)
country_code='us',
rank_search=17, rank_address=16)
- result = apiobj.api.details(napi.PlaceID(9000), address_details=True)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(9000), address_details=True)
assert result.address_rows == [
napi.AddressLine(place_id=332, osm_object=('W', 4),
]
-def test_lookup_in_postcode(apiobj):
+def test_lookup_in_postcode(apiobj, frontend):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
apiobj.add_postcode(place_id=554,
parent_place_id=152,
indexed_date=import_date,
geometry='POINT(-9.45 5.6)')
- result = apiobj.api.details(napi.PlaceID(554))
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(554))
assert result is not None
assert result.geometry == {'type': 'ST_Point'}
-def test_lookup_postcode_with_address_details(apiobj):
+def test_lookup_postcode_with_address_details(apiobj, frontend):
apiobj.add_postcode(place_id=9000,
parent_place_id=332,
postcode='34 425',
country_code='gb',
rank_search=17, rank_address=16)
- result = apiobj.api.details(napi.PlaceID(9000), address_details=True)
+ api = frontend(apiobj, options={'details'})
+ result = api.details(napi.PlaceID(9000), address_details=True)
assert result.address_rows == [
napi.AddressLine(place_id=9000, osm_object=None,
@pytest.mark.parametrize('objid', [napi.PlaceID(1736),
napi.OsmID('W', 55),
napi.OsmID('N', 55, 'amenity')])
-def test_lookup_missing_object(apiobj, objid):
+def test_lookup_missing_object(apiobj, frontend, objid):
apiobj.add_placex(place_id=1, osm_type='N', osm_id=55,
class_='place', type='suburb')
- assert apiobj.api.details(objid) is None
+ api = frontend(apiobj, options={'details'})
+ assert api.details(objid) is None
@pytest.mark.parametrize('gtype', (napi.GeometryFormat.KML,
napi.GeometryFormat.SVG,
napi.GeometryFormat.TEXT))
-def test_lookup_unsupported_geometry(apiobj, gtype):
+def test_lookup_unsupported_geometry(apiobj, frontend, gtype):
apiobj.add_placex(place_id=332)
+ api = frontend(apiobj, options={'details'})
with pytest.raises(ValueError):
- apiobj.api.details(napi.PlaceID(332), geometry_output=gtype)
+ api.details(napi.PlaceID(332), geometry_output=gtype)
"""
Tests for lookup API call.
"""
+import json
+
import pytest
import nominatim.api as napi
-def test_lookup_empty_list(apiobj):
- assert apiobj.api.lookup([]) == []
+def test_lookup_empty_list(apiobj, frontend):
+ api = frontend(apiobj, options={'details'})
+ assert api.lookup([]) == []
-def test_lookup_non_existing(apiobj):
- assert apiobj.api.lookup((napi.PlaceID(332), napi.OsmID('W', 4),
- napi.OsmID('W', 4, 'highway'))) == []
+def test_lookup_non_existing(apiobj, frontend):
+ api = frontend(apiobj, options={'details'})
+ assert api.lookup((napi.PlaceID(332), napi.OsmID('W', 4),
+ napi.OsmID('W', 4, 'highway'))) == []
@pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4),
napi.OsmID('W', 4, 'highway')))
-def test_lookup_single_placex(apiobj, idobj):
+def test_lookup_single_placex(apiobj, frontend, idobj):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential',
name={'name': 'Road'}, address={'city': 'Barrow'},
centroid=(23, 34),
geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
- result = apiobj.api.lookup([idobj])
+ api = frontend(apiobj, options={'details'})
+ result = api.lookup([idobj])
assert len(result) == 1
assert result.geometry == {}
-def test_lookup_multiple_places(apiobj):
+def test_lookup_multiple_places(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential',
name={'name': 'Road'}, address={'city': 'Barrow'},
geometry='LINESTRING(23 34, 23 35)')
- result = apiobj.api.lookup((napi.OsmID('W', 1),
- napi.OsmID('W', 4),
- napi.OsmID('W', 9928)))
+ api = frontend(apiobj, options={'details'})
+ result = api.lookup((napi.OsmID('W', 1),
+ napi.OsmID('W', 4),
+ napi.OsmID('W', 9928)))
assert len(result) == 2
@pytest.mark.parametrize('gtype', list(napi.GeometryFormat))
-def test_simple_place_with_geometry(apiobj, gtype):
+def test_simple_place_with_geometry(apiobj, frontend, gtype):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential',
name={'name': 'Road'}, address={'city': 'Barrow'},
centroid=(23, 34),
geometry='POLYGON((23 34, 23.1 34, 23.1 34.1, 23 34))')
- result = apiobj.api.lookup([napi.OsmID('W', 4)],
- geometry_output=gtype)
+ api = frontend(apiobj, options={'details'})
+ result = api.lookup([napi.OsmID('W', 4)], geometry_output=gtype)
assert len(result) == 1
assert result[0].place_id == 332
assert list(result[0].geometry.keys()) == [gtype.name.lower()]
-def test_simple_place_with_geometry_simplified(apiobj):
+def test_simple_place_with_geometry_simplified(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential',
name={'name': 'Road'}, address={'city': 'Barrow'},
centroid=(23, 34),
geometry='POLYGON((23 34, 22.999 34, 23.1 34, 23.1 34.1, 23 34))')
- result = apiobj.api.lookup([napi.OsmID('W', 4)],
- geometry_output=napi.GeometryFormat.TEXT,
- geometry_simplification=0.1)
+ api = frontend(apiobj, options={'details'})
+ result = api.lookup([napi.OsmID('W', 4)],
+ geometry_output=napi.GeometryFormat.GEOJSON,
+ geometry_simplification=0.1)
assert len(result) == 1
assert result[0].place_id == 332
- assert result[0].geometry == {'text': 'POLYGON((23 34,23.1 34,23.1 34.1,23 34))'}
+ geom = json.loads(result[0].geometry['geojson'])
+
+ assert geom['type'] == 'Polygon'
+ assert geom['coordinates'] == [[[23, 34], [23.1, 34], [23.1, 34.1], [23, 34]]]
import nominatim.api as napi
-def test_reverse_rank_30(apiobj):
+API_OPTIONS = {'reverse'}
+
+def test_reverse_rank_30(apiobj, frontend):
apiobj.add_placex(place_id=223, class_='place', type='house',
housenumber='1',
centroid=(1.3, 0.7),
geometry='POINT(1.3 0.7)')
- result = apiobj.api.reverse((1.3, 0.7))
+ api = frontend(apiobj, options=API_OPTIONS)
+ result = api.reverse((1.3, 0.7))
assert result is not None
assert result.place_id == 223
@pytest.mark.parametrize('country', ['de', 'us'])
-def test_reverse_street(apiobj, country):
+def test_reverse_street(apiobj, frontend, country):
apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27,
name = {'name': 'My Street'},
country_code=country,
geometry='LINESTRING(9.995 10, 10.005 10)')
- assert apiobj.api.reverse((9.995, 10)).place_id == 990
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((9.995, 10)).place_id == 990
-def test_reverse_ignore_unindexed(apiobj):
+def test_reverse_ignore_unindexed(apiobj, frontend):
apiobj.add_placex(place_id=223, class_='place', type='house',
housenumber='1',
indexed_status=2,
centroid=(1.3, 0.7),
geometry='POINT(1.3 0.7)')
- result = apiobj.api.reverse((1.3, 0.7))
+ api = frontend(apiobj, options=API_OPTIONS)
+ result = api.reverse((1.3, 0.7))
assert result is None
(0.70003, napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225),
(0.70003, napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225),
(5, napi.DataLayer.ADDRESS, 229)])
-def test_reverse_rank_30_layers(apiobj, y, layer, place_id):
+def test_reverse_rank_30_layers(apiobj, frontend, y, layer, place_id):
apiobj.add_placex(place_id=223, class_='place', type='house',
housenumber='1',
rank_address=30,
rank_search=30,
centroid=(1.3, 5))
- assert apiobj.api.reverse((1.3, y), layers=layer).place_id == place_id
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((1.3, y), layers=layer).place_id == place_id
-def test_reverse_poi_layer_with_no_pois(apiobj):
+def test_reverse_poi_layer_with_no_pois(apiobj, frontend):
apiobj.add_placex(place_id=223, class_='place', type='house',
housenumber='1',
rank_address=30,
rank_search=30,
centroid=(1.3, 0.70001))
- assert apiobj.api.reverse((1.3, 0.70001), max_rank=29,
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((1.3, 0.70001), max_rank=29,
layers=napi.DataLayer.POI) is None
-def test_reverse_housenumber_on_street(apiobj):
+def test_reverse_housenumber_on_street(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27,
name = {'name': 'My Street'},
housenumber='23',
centroid=(10.0, 10.00001))
- assert apiobj.api.reverse((10.0, 10.0), max_rank=30).place_id == 991
- assert apiobj.api.reverse((10.0, 10.0), max_rank=27).place_id == 990
- assert apiobj.api.reverse((10.0, 10.00001), max_rank=30).place_id == 991
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((10.0, 10.0), max_rank=30).place_id == 991
+ assert api.reverse((10.0, 10.0), max_rank=27).place_id == 990
+ assert api.reverse((10.0, 10.00001), max_rank=30).place_id == 991
-def test_reverse_housenumber_interpolation(apiobj):
+def test_reverse_housenumber_interpolation(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27,
name = {'name': 'My Street'},
centroid=(10.0, 10.00001),
geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
- assert apiobj.api.reverse((10.0, 10.0)).place_id == 992
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((10.0, 10.0)).place_id == 992
-def test_reverse_housenumber_point_interpolation(apiobj):
+def test_reverse_housenumber_point_interpolation(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27,
name = {'name': 'My Street'},
centroid=(10.0, 10.00001),
geometry='POINT(10.0 10.00001)')
- res = apiobj.api.reverse((10.0, 10.0))
+ api = frontend(apiobj, options=API_OPTIONS)
+ res = api.reverse((10.0, 10.0))
assert res.place_id == 992
assert res.housenumber == '42'
-def test_reverse_tiger_number(apiobj):
+def test_reverse_tiger_number(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27,
name = {'name': 'My Street'},
centroid=(10.0, 10.00001),
geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
- assert apiobj.api.reverse((10.0, 10.0)).place_id == 992
- assert apiobj.api.reverse((10.0, 10.00001)).place_id == 992
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((10.0, 10.0)).place_id == 992
+ assert api.reverse((10.0, 10.00001)).place_id == 992
-def test_reverse_point_tiger(apiobj):
+def test_reverse_point_tiger(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27,
name = {'name': 'My Street'},
centroid=(10.0, 10.00001),
geometry='POINT(10.0 10.00001)')
- res = apiobj.api.reverse((10.0, 10.0))
+ api = frontend(apiobj, options=API_OPTIONS)
+ res = api.reverse((10.0, 10.0))
assert res.place_id == 992
assert res.housenumber == '1'
-def test_reverse_low_zoom_address(apiobj):
+def test_reverse_low_zoom_address(apiobj, frontend):
apiobj.add_placex(place_id=1001, class_='place', type='house',
housenumber='1',
rank_address=30,
geometry="""POLYGON((59.3 80.70001, 59.3001 80.70001,
59.3001 80.70101, 59.3 80.70101, 59.3 80.70001))""")
- assert apiobj.api.reverse((59.30005, 80.7005)).place_id == 1001
- assert apiobj.api.reverse((59.30005, 80.7005), max_rank=18).place_id == 1002
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((59.30005, 80.7005)).place_id == 1001
+ assert api.reverse((59.30005, 80.7005), max_rank=18).place_id == 1002
-def test_reverse_place_node_in_area(apiobj):
+def test_reverse_place_node_in_area(apiobj, frontend):
apiobj.add_placex(place_id=1002, class_='place', type='town',
name={'name': 'Town Area'},
rank_address=16,
rank_search=18,
centroid=(59.30004, 80.70055))
- assert apiobj.api.reverse((59.30004, 80.70055)).place_id == 1003
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((59.30004, 80.70055)).place_id == 1003
@pytest.mark.parametrize('layer,place_id', [(napi.DataLayer.MANMADE, 225),
(napi.DataLayer.NATURAL, 227),
(napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225),
(napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225)])
-def test_reverse_larger_area_layers(apiobj, layer, place_id):
+def test_reverse_larger_area_layers(apiobj, frontend, layer, place_id):
apiobj.add_placex(place_id=225, class_='man_made', type='dam',
name={'name': 'Dam'},
rank_address=0,
rank_search=16,
centroid=(1.3, 0.70005))
- assert apiobj.api.reverse((1.3, 0.7), layers=layer).place_id == place_id
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((1.3, 0.7), layers=layer).place_id == place_id
-def test_reverse_country_lookup_no_objects(apiobj):
+def test_reverse_country_lookup_no_objects(apiobj, frontend):
apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
- assert apiobj.api.reverse((0.5, 0.5)) is None
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((0.5, 0.5)) is None
@pytest.mark.parametrize('rank', [4, 30])
-def test_reverse_country_lookup_country_only(apiobj, rank):
+def test_reverse_country_lookup_country_only(apiobj, frontend, rank):
apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
apiobj.add_placex(place_id=225, class_='place', type='country',
name={'name': 'My Country'},
country_code='xx',
centroid=(0.7, 0.7))
- assert apiobj.api.reverse((0.5, 0.5), max_rank=rank).place_id == 225
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((0.5, 0.5), max_rank=rank).place_id == 225
-def test_reverse_country_lookup_place_node_inside(apiobj):
+def test_reverse_country_lookup_place_node_inside(apiobj, frontend):
apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
apiobj.add_placex(place_id=225, class_='place', type='state',
osm_type='N',
country_code='xx',
centroid=(0.5, 0.505))
- assert apiobj.api.reverse((0.5, 0.5)).place_id == 225
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((0.5, 0.5)).place_id == 225
@pytest.mark.parametrize('gtype', list(napi.GeometryFormat))
-def test_reverse_geometry_output_placex(apiobj, gtype):
+def test_reverse_geometry_output_placex(apiobj, frontend, gtype):
apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
apiobj.add_placex(place_id=1001, class_='place', type='house',
housenumber='1',
country_code='xx',
centroid=(0.5, 0.5))
- assert apiobj.api.reverse((59.3, 80.70001), geometry_output=gtype).place_id == 1001
- assert apiobj.api.reverse((0.5, 0.5), geometry_output=gtype).place_id == 1003
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((59.3, 80.70001), geometry_output=gtype).place_id == 1001
+ assert api.reverse((0.5, 0.5), geometry_output=gtype).place_id == 1003
-def test_reverse_simplified_geometry(apiobj):
+def test_reverse_simplified_geometry(apiobj, frontend):
apiobj.add_placex(place_id=1001, class_='place', type='house',
housenumber='1',
rank_address=30,
rank_search=30,
centroid=(59.3, 80.70001))
+ api = frontend(apiobj, options=API_OPTIONS)
details = dict(geometry_output=napi.GeometryFormat.GEOJSON,
geometry_simplification=0.1)
- assert apiobj.api.reverse((59.3, 80.70001), **details).place_id == 1001
+ assert api.reverse((59.3, 80.70001), **details).place_id == 1001
-def test_reverse_interpolation_geometry(apiobj):
+def test_reverse_interpolation_geometry(apiobj, frontend):
apiobj.add_osmline(place_id=992,
parent_place_id=990,
startnumber=1, endnumber=3, step=1,
centroid=(10.0, 10.00001),
geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
- assert apiobj.api.reverse((10.0, 10.0), geometry_output=napi.GeometryFormat.TEXT)\
+ api = frontend(apiobj, options=API_OPTIONS)
+ assert api.reverse((10.0, 10.0), geometry_output=napi.GeometryFormat.TEXT)\
.geometry['text'] == 'POINT(10 10.00001)'
-def test_reverse_tiger_geometry(apiobj):
+def test_reverse_tiger_geometry(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27,
name = {'name': 'My Street'},
centroid=(10.0, 10.00001),
geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
- output = apiobj.api.reverse((10.0, 10.0),
+ api = frontend(apiobj, options=API_OPTIONS)
+ output = api.reverse((10.0, 10.0),
geometry_output=napi.GeometryFormat.GEOJSON).geometry['geojson']
assert json.loads(output) == {'coordinates': [10, 10.00001], 'type': 'Point'}
from nominatim.version import NOMINATIM_VERSION, NominatimVersion
import nominatim.api as napi
-def test_status_no_extra_info(apiobj):
- result = apiobj.api.status()
+def test_status_no_extra_info(apiobj, frontend):
+ api = frontend(apiobj)
+ result = api.status()
assert result.status == 0
assert result.message == 'OK'
assert result.data_updated is None
-def test_status_full(apiobj):
+def test_status_full(apiobj, frontend):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0, tzinfo=dt.timezone.utc)
apiobj.add_data('import_status',
[{'lastimportdate': import_date}])
apiobj.add_data('properties',
[{'property': 'database_version', 'value': '99.5.4-2'}])
- result = apiobj.api.status()
+ api = frontend(apiobj)
+ result = api.status()
assert result.status == 0
assert result.message == 'OK'