]> git.openstreetmap.org Git - nominatim.git/commitdiff
Merge remote-tracking branch 'upstream/master'
authorSarah Hoffmann <lonvia@denofr.de>
Tue, 31 Oct 2023 20:12:00 +0000 (21:12 +0100)
committerSarah Hoffmann <lonvia@denofr.de>
Tue, 31 Oct 2023 20:12:00 +0000 (21:12 +0100)
40 files changed:
.github/actions/build-nominatim/action.yml
.github/workflows/ci-tests.yml
docs/admin/Deployment-Python.md
nominatim/api/core.py
nominatim/api/lookup.py
nominatim/api/results.py
nominatim/api/reverse.py
nominatim/api/search/db_searches.py
nominatim/api/status.py
nominatim/cli.py
nominatim/clicmd/__init__.py
nominatim/clicmd/args.py
nominatim/clicmd/convert.py [new file with mode: 0644]
nominatim/db/sqlalchemy_functions.py
nominatim/db/sqlalchemy_schema.py
nominatim/db/sqlalchemy_types.py
nominatim/tools/convert_sqlite.py [new file with mode: 0644]
settings/import-admin.lua
test/bdd/api/details/language.feature
test/bdd/api/details/params.feature
test/bdd/api/details/simple.feature
test/bdd/api/errors/formats.feature
test/bdd/api/lookup/simple.feature
test/bdd/api/reverse/geometry.feature
test/bdd/api/reverse/language.feature
test/bdd/api/reverse/layers.feature
test/bdd/api/reverse/queries.feature
test/bdd/api/reverse/v1_geocodejson.feature
test/bdd/api/reverse/v1_geojson.feature
test/bdd/api/reverse/v1_json.feature
test/bdd/api/reverse/v1_params.feature
test/bdd/api/reverse/v1_xml.feature
test/bdd/api/status/simple.feature
test/bdd/environment.py
test/bdd/steps/nominatim_environment.py
test/python/api/conftest.py
test/python/api/test_api_details.py
test/python/api/test_api_lookup.py
test/python/api/test_api_reverse.py
test/python/api/test_api_status.py

index 70392d79284728218a4ad593b42323c178dc25c7..17ff0ccfc14d391958309bae4020e598523b69e5 100644 (file)
@@ -25,12 +25,12 @@ runs:
           shell: bash
         - name: Install${{ matrix.flavour }} prerequisites
           run: |
-            sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION} lua-dkjson nlohmann-json3-dev
+            sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION} lua-dkjson nlohmann-json3-dev libspatialite7 libsqlite3-mod-spatialite
             if [ "$FLAVOUR" == "oldstuff" ]; then
-                pip3 install MarkupSafe==2.0.1 python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 sqlalchemy==1.4.31 datrie asyncpg
+                pip3 install MarkupSafe==2.0.1 python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 sqlalchemy==1.4.31 datrie asyncpg aiosqlite
             else
                 sudo apt-get install -y -qq python3-icu python3-datrie python3-pyosmium python3-jinja2 python3-psutil python3-psycopg2 python3-dotenv python3-yaml
-                pip3 install sqlalchemy psycopg
+                pip3 install sqlalchemy psycopg aiosqlite
             fi
           shell: bash
           env:
index 1d34ed1ac312edcfd5cf3b5ac9b97fdfb02cbbb3..1dade3bcfa28c3b4a43e2b73aa5882ba66238d33 100644 (file)
@@ -113,18 +113,21 @@ jobs:
               if: matrix.flavour == 'oldstuff'
 
             - name: Install Python webservers
-              run: pip3 install falcon starlette
+              run: pip3 install falcon starlette asgi_lifespan
 
             - name: Install latest pylint
-              run: pip3 install -U pylint asgi_lifespan
+              run: pip3 install -U pylint
+              if: matrix.flavour != 'oldstuff'
 
             - name: PHP linting
               run: phpcs --report-width=120 .
               working-directory: Nominatim
+              if: matrix.flavour != 'oldstuff'
 
             - name: Python linting
               run: python3 -m pylint nominatim
               working-directory: Nominatim
+              if: matrix.flavour != 'oldstuff'
 
             - name: PHP unit tests
               run: phpunit ./
index 4da840086a84e87954f2578ef86e278156a3e131..6fd24167d24433d0509bec5c35f7deb8825d3f9a 100644 (file)
@@ -43,6 +43,22 @@ virtualenv /srv/nominatim-venv
 Next you need to set up the service that runs the Nominatim frontend. This is
 easiest done with a systemd job.
 
+First you need to tell systemd to create a socket file to be used by
+hunicorn. Crate the following file `/etc/systemd/system/nominatim.socket`:
+
+``` systemd
+[Unit]
+Description=Gunicorn socket for Nominatim
+
+[Socket]
+ListenStream=/run/nominatim.sock
+SocketUser=www-data
+
+[Install]
+WantedBy=multi-user.target
+```
+
+Now you can add the systemd service for Nominatim itself.
 Create the following file `/etc/systemd/system/nominatim.service`:
 
 ``` systemd
@@ -74,12 +90,14 @@ its own Python process using
 [`NOMINATIM_API_POOL_SIZE`](../customize/Settings.md#nominatim_api_pool_size)
 connections to the database to serve requests in parallel.
 
-Make the new service known to systemd and start it:
+Make the new services known to systemd and start it:
 
 ``` sh
 sudo systemctl daemon-reload
-sudo systemctl enable nominatim
-sudo systemctl start nominatim
+sudo systemctl enable nominatim.socket
+sudo systemctl start nominatim.socket
+sudo systemctl enable nominatim.service
+sudo systemctl start nominatim.service
 ```
 
 This sets the service up, so that Nominatim is automatically started
index a6b49404a02536b99a0eaf346b6ba824f98cc1bc..44ac91606fef90a746bb26d06b2a9fc6da0e61e4 100644 (file)
@@ -81,21 +81,34 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes
             if self._engine:
                 return
 
-            dsn = self.config.get_database_params()
-            pool_size = self.config.get_int('API_POOL_SIZE')
-
-            query = {k: v for k, v in dsn.items()
-                      if k not in ('user', 'password', 'dbname', 'host', 'port')}
-
-            dburl = sa.engine.URL.create(
-                       f'postgresql+{PGCORE_LIB}',
-                       database=dsn.get('dbname'),
-                       username=dsn.get('user'), password=dsn.get('password'),
-                       host=dsn.get('host'), port=int(dsn['port']) if 'port' in dsn else None,
-                       query=query)
-            engine = sa_asyncio.create_async_engine(dburl, future=True,
-                                                    max_overflow=0, pool_size=pool_size,
-                                                    echo=self.config.get_bool('DEBUG_SQL'))
+            extra_args: Dict[str, Any] = {'future': True,
+                                          'echo': self.config.get_bool('DEBUG_SQL')}
+
+            is_sqlite = self.config.DATABASE_DSN.startswith('sqlite:')
+
+            if is_sqlite:
+                params = dict((p.split('=', 1)
+                              for p in self.config.DATABASE_DSN[7:].split(';')))
+                dburl = sa.engine.URL.create('sqlite+aiosqlite',
+                                             database=params.get('dbname'))
+
+            else:
+                dsn = self.config.get_database_params()
+                query = {k: v for k, v in dsn.items()
+                         if k not in ('user', 'password', 'dbname', 'host', 'port')}
+
+                dburl = sa.engine.URL.create(
+                           f'postgresql+{PGCORE_LIB}',
+                           database=dsn.get('dbname'),
+                           username=dsn.get('user'),
+                           password=dsn.get('password'),
+                           host=dsn.get('host'),
+                           port=int(dsn['port']) if 'port' in dsn else None,
+                           query=query)
+                extra_args['max_overflow'] = 0
+                extra_args['pool_size'] = self.config.get_int('API_POOL_SIZE')
+
+            engine = sa_asyncio.create_async_engine(dburl, **extra_args)
 
             try:
                 async with engine.begin() as conn:
@@ -104,7 +117,7 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes
             except (PGCORE_ERROR, sa.exc.OperationalError):
                 server_version = 0
 
-            if server_version >= 110000:
+            if server_version >= 110000 and not is_sqlite:
                 @sa.event.listens_for(engine.sync_engine, "connect")
                 def _on_connect(dbapi_con: Any, _: Any) -> None:
                     cursor = dbapi_con.cursor()
@@ -113,6 +126,15 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes
                 # Make sure that all connections get the new settings
                 await self.close()
 
+            if is_sqlite:
+                @sa.event.listens_for(engine.sync_engine, "connect")
+                def _on_sqlite_connect(dbapi_con: Any, _: Any) -> None:
+                    dbapi_con.run_async(lambda conn: conn.enable_load_extension(True))
+                    cursor = dbapi_con.cursor()
+                    cursor.execute("SELECT load_extension('mod_spatialite')")
+                    cursor.execute('SELECT SetDecimalPrecision(7)')
+                    dbapi_con.run_async(lambda conn: conn.enable_load_extension(False))
+
             self._property_cache['DB:server_version'] = server_version
 
             self._tables = SearchTables(sa.MetaData(), engine.name) # pylint: disable=no-member
index e9181f473784aec219c91f08acc3708e7dd3e516..402b85316853173967c13c73f7f0a30aabc97c92 100644 (file)
@@ -77,8 +77,8 @@ async def find_in_osmline(conn: SearchConnection, place: ntyp.PlaceRef,
         sql = sql.where(t.c.osm_id == place.osm_id).limit(1)
         if place.osm_class and place.osm_class.isdigit():
             sql = sql.order_by(sa.func.greatest(0,
-                                    sa.func.least(int(place.osm_class) - t.c.endnumber),
-                                           t.c.startnumber - int(place.osm_class)))
+                                                int(place.osm_class) - t.c.endnumber,
+                                                t.c.startnumber - int(place.osm_class)))
     else:
         return None
 
@@ -163,11 +163,10 @@ async def get_detailed_place(conn: SearchConnection, place: ntyp.PlaceRef,
 
     if details.geometry_output & ntyp.GeometryFormat.GEOJSON:
         def _add_geometry(sql: SaSelect, column: SaColumn) -> SaSelect:
-            return sql.add_columns(sa.literal_column(f"""
-                      ST_AsGeoJSON(CASE WHEN ST_NPoints({column.name}) > 5000
-                                   THEN ST_SimplifyPreserveTopology({column.name}, 0.0001)
-                                   ELSE {column.name} END)
-                       """).label('geometry_geojson'))
+            return sql.add_columns(sa.func.ST_AsGeoJSON(
+                                    sa.case((sa.func.ST_NPoints(column) > 5000,
+                                             sa.func.ST_SimplifyPreserveTopology(column, 0.0001)),
+                                            else_=column), 7).label('geometry_geojson'))
     else:
         def _add_geometry(sql: SaSelect, column: SaColumn) -> SaSelect:
             return sql.add_columns(sa.func.ST_GeometryType(column).label('geometry_type'))
@@ -183,6 +182,9 @@ async def get_detailed_place(conn: SearchConnection, place: ntyp.PlaceRef,
 
     # add missing details
     assert result is not None
+    if 'type' in result.geometry:
+        result.geometry['type'] = GEOMETRY_TYPE_MAP.get(result.geometry['type'],
+                                                        result.geometry['type'])
     indexed_date = getattr(row, 'indexed_date', None)
     if indexed_date is not None:
         result.indexed_date = indexed_date.replace(tzinfo=dt.timezone.utc)
@@ -208,13 +210,13 @@ async def get_simple_place(conn: SearchConnection, place: ntyp.PlaceRef,
             col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification)
 
         if details.geometry_output & ntyp.GeometryFormat.GEOJSON:
-            out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson'))
+            out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
         if details.geometry_output & ntyp.GeometryFormat.TEXT:
             out.append(sa.func.ST_AsText(col).label('geometry_text'))
         if details.geometry_output & ntyp.GeometryFormat.KML:
-            out.append(sa.func.ST_AsKML(col).label('geometry_kml'))
+            out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
         if details.geometry_output & ntyp.GeometryFormat.SVG:
-            out.append(sa.func.ST_AsSVG(col).label('geometry_svg'))
+            out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
 
         return sql.add_columns(*out)
 
@@ -236,3 +238,14 @@ async def get_simple_place(conn: SearchConnection, place: ntyp.PlaceRef,
     await nres.add_result_details(conn, [result], details)
 
     return result
+
+
+GEOMETRY_TYPE_MAP = {
+    'POINT': 'ST_Point',
+    'MULTIPOINT': 'ST_MultiPoint',
+    'LINESTRING': 'ST_LineString',
+    'MULTILINESTRING': 'ST_MultiLineString',
+    'POLYGON': 'ST_Polygon',
+    'MULTIPOLYGON': 'ST_MultiPolygon',
+    'GEOMETRYCOLLECTION': 'ST_GeometryCollection'
+}
index 166f501386fe3e33785064c4de08bbd3d6c86db4..829008fb3ec316a09a753b39cb720404b1c8d98b 100644 (file)
@@ -19,7 +19,7 @@ import datetime as dt
 import sqlalchemy as sa
 
 from nominatim.typing import SaSelect, SaRow
-from nominatim.db.sqlalchemy_functions import CrosscheckNames
+from nominatim.db.sqlalchemy_types import Geometry
 from nominatim.api.types import Point, Bbox, LookupDetails
 from nominatim.api.connection import SearchConnection
 from nominatim.api.logging import log
@@ -500,7 +500,7 @@ def _get_address_lookup_id(result: BaseResultT) -> int:
 
 
 async def _finalize_entry(conn: SearchConnection, result: BaseResultT) -> None:
-    assert result.address_rows
+    assert result.address_rows is not None
     postcode = result.postcode
     if not postcode and result.address:
         postcode = result.address.get('postcode')
@@ -589,7 +589,7 @@ async def complete_address_details(conn: SearchConnection, results: List[BaseRes
     if not lookup_ids:
         return
 
-    ltab = sa.func.json_array_elements(sa.type_coerce(lookup_ids, sa.JSON))\
+    ltab = sa.func.JsonArrayEach(sa.type_coerce(lookup_ids, sa.JSON))\
              .table_valued(sa.column('value', type_=sa.JSON)) # type: ignore[no-untyped-call]
 
     t = conn.t.placex
@@ -608,7 +608,7 @@ async def complete_address_details(conn: SearchConnection, results: List[BaseRes
             .order_by('src_place_id')\
             .order_by(sa.column('rank_address').desc())\
             .order_by((taddr.c.place_id == ltab.c.value['pid'].as_integer()).desc())\
-            .order_by(sa.case((CrosscheckNames(t.c.name, ltab.c.value['names']), 2),
+            .order_by(sa.case((sa.func.CrosscheckNames(t.c.name, ltab.c.value['names']), 2),
                               (taddr.c.isaddress, 0),
                               (sa.and_(taddr.c.fromarea,
                                        t.c.geometry.ST_Contains(
@@ -652,7 +652,7 @@ async def complete_address_details(conn: SearchConnection, results: List[BaseRes
 
     parent_lookup_ids = list(filter(lambda e: e['pid'] != e['lid'], lookup_ids))
     if parent_lookup_ids:
-        ltab = sa.func.json_array_elements(sa.type_coerce(parent_lookup_ids, sa.JSON))\
+        ltab = sa.func.JsonArrayEach(sa.type_coerce(parent_lookup_ids, sa.JSON))\
                  .table_valued(sa.column('value', type_=sa.JSON)) # type: ignore[no-untyped-call]
         sql = sa.select(ltab.c.value['pid'].as_integer().label('src_place_id'),
                         t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
@@ -687,14 +687,10 @@ def _placex_select_address_row(conn: SearchConnection,
     return sa.select(t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
                      t.c.class_.label('class'), t.c.type,
                      t.c.admin_level, t.c.housenumber,
-                     sa.literal_column("""ST_GeometryType(geometry) in
-                                        ('ST_Polygon','ST_MultiPolygon')""").label('fromarea'),
+                     t.c.geometry.is_area().label('fromarea'),
                      t.c.rank_address,
-                     sa.literal_column(
-                         f"""ST_DistanceSpheroid(geometry,
-                                                 'SRID=4326;{centroid.to_wkt()}'::geometry,
-                              'SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]]')
-                         """).label('distance'))
+                     t.c.geometry.distance_spheroid(
+                       sa.bindparam('centroid', value=centroid, type_=Geometry)).label('distance'))
 
 
 async def complete_linked_places(conn: SearchConnection, result: BaseResult) -> None:
@@ -728,10 +724,10 @@ async def complete_keywords(conn: SearchConnection, result: BaseResult) -> None:
     sel = sa.select(t.c.word_id, t.c.word_token, t.c.word)
 
     for name_tokens, address_tokens in await conn.execute(sql):
-        for row in await conn.execute(sel.where(t.c.word_id == sa.any_(name_tokens))):
+        for row in await conn.execute(sel.where(t.c.word_id.in_(name_tokens))):
             result.name_keywords.append(WordInfo(*row))
 
-        for row in await conn.execute(sel.where(t.c.word_id == sa.any_(address_tokens))):
+        for row in await conn.execute(sel.where(t.c.word_id.in_(address_tokens))):
             result.address_keywords.append(WordInfo(*row))
 
 
index d46733f0d950fe9eb8a051f3e21e6b916dec14de..fb4c0b23d0f2fd4790d942b31508126f39a2d379 100644 (file)
@@ -19,7 +19,6 @@ import nominatim.api.results as nres
 from nominatim.api.logging import log
 from nominatim.api.types import AnyPoint, DataLayer, ReverseDetails, GeometryFormat, Bbox
 from nominatim.db.sqlalchemy_types import Geometry
-import nominatim.db.sqlalchemy_functions as snfn
 
 # In SQLAlchemy expression which compare with NULL need to be expressed with
 # the equal sign.
@@ -85,12 +84,6 @@ def _locate_interpolation(table: SaFromClause) -> SaLabel:
                    else_=0).label('position')
 
 
-def _is_address_point(table: SaFromClause) -> SaColumn:
-    return sa.and_(table.c.rank_address == 30,
-                   sa.or_(table.c.housenumber != None,
-                          table.c.name.has_key('addr:housename')))
-
-
 def _get_closest(*rows: Optional[SaRow]) -> Optional[SaRow]:
     return min(rows, key=lambda row: 1000 if row is None else row.distance)
 
@@ -147,13 +140,13 @@ class ReverseGeocoder:
             col = sa.func.ST_SimplifyPreserveTopology(col, self.params.geometry_simplification)
 
         if self.params.geometry_output & GeometryFormat.GEOJSON:
-            out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson'))
+            out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
         if self.params.geometry_output & GeometryFormat.TEXT:
             out.append(sa.func.ST_AsText(col).label('geometry_text'))
         if self.params.geometry_output & GeometryFormat.KML:
-            out.append(sa.func.ST_AsKML(col).label('geometry_kml'))
+            out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
         if self.params.geometry_output & GeometryFormat.SVG:
-            out.append(sa.func.ST_AsSVG(col).label('geometry_svg'))
+            out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
 
         return sql.add_columns(*out)
 
@@ -204,7 +197,7 @@ class ReverseGeocoder:
             max_rank = min(29, self.max_rank)
             restrict.append(lambda: no_index(t.c.rank_address).between(26, max_rank))
             if self.max_rank == 30:
-                restrict.append(lambda: _is_address_point(t))
+                restrict.append(lambda: sa.func.IsAddressPoint(t))
         if self.layer_enabled(DataLayer.POI) and self.max_rank == 30:
             restrict.append(lambda: sa.and_(no_index(t.c.rank_search) == 30,
                                             t.c.class_.not_in(('place', 'building')),
@@ -228,7 +221,7 @@ class ReverseGeocoder:
         sql: SaLambdaSelect = sa.lambda_stmt(lambda: _select_from_placex(t)
                 .where(t.c.geometry.ST_DWithin(WKT_PARAM, 0.001))
                 .where(t.c.parent_place_id == parent_place_id)
-                .where(_is_address_point(t))
+                .where(sa.func.IsAddressPoint(t))
                 .where(t.c.indexed_status == 0)
                 .where(t.c.linked_place_id == None)
                 .order_by('distance')
@@ -371,7 +364,7 @@ class ReverseGeocoder:
             inner = sa.select(t, sa.literal(0.0).label('distance'))\
                       .where(t.c.rank_search.between(5, MAX_RANK_PARAM))\
                       .where(t.c.geometry.intersects(WKT_PARAM))\
-                      .where(snfn.select_index_placex_geometry_reverse_lookuppolygon('placex'))\
+                      .where(sa.func.PlacexGeometryReverseLookuppolygon())\
                       .order_by(sa.desc(t.c.rank_search))\
                       .limit(50)\
                       .subquery('area')
@@ -401,10 +394,7 @@ class ReverseGeocoder:
                       .where(t.c.rank_search > address_rank)\
                       .where(t.c.rank_search <= MAX_RANK_PARAM)\
                       .where(t.c.indexed_status == 0)\
-                      .where(snfn.select_index_placex_geometry_reverse_lookupplacenode('placex'))\
-                      .where(t.c.geometry
-                                .ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
-                                .intersects(WKT_PARAM))\
+                      .where(sa.func.IntersectsReverseDistance(t, WKT_PARAM))\
                       .order_by(sa.desc(t.c.rank_search))\
                       .limit(50)\
                       .subquery('places')
@@ -413,7 +403,7 @@ class ReverseGeocoder:
                 return _select_from_placex(inner, False)\
                     .join(touter, touter.c.geometry.ST_Contains(inner.c.geometry))\
                     .where(touter.c.place_id == address_id)\
-                    .where(inner.c.distance < sa.func.reverse_place_diameter(inner.c.rank_search))\
+                    .where(sa.func.IsBelowReverseDistance(inner.c.distance, inner.c.rank_search))\
                     .order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
                     .limit(1)
 
@@ -440,10 +430,9 @@ class ReverseGeocoder:
                   .where(t.c.indexed_status == 0)\
                   .where(t.c.linked_place_id == None)\
                   .where(self._filter_by_layer(t))\
-                  .where(t.c.geometry
-                                .ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
-                                .intersects(WKT_PARAM))\
+                  .where(t.c.geometry.intersects(sa.func.ST_Expand(WKT_PARAM, 0.007)))\
                   .order_by(sa.desc(t.c.rank_search))\
+                  .order_by('distance')\
                   .limit(50)\
                   .subquery()
 
@@ -514,16 +503,13 @@ class ReverseGeocoder:
                       .where(t.c.rank_search <= MAX_RANK_PARAM)\
                       .where(t.c.indexed_status == 0)\
                       .where(t.c.country_code.in_(ccodes))\
-                      .where(snfn.select_index_placex_geometry_reverse_lookupplacenode('placex'))\
-                      .where(t.c.geometry
-                                .ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
-                                .intersects(WKT_PARAM))\
+                      .where(sa.func.IntersectsReverseDistance(t, WKT_PARAM))\
                       .order_by(sa.desc(t.c.rank_search))\
                       .limit(50)\
                       .subquery('area')
 
                 return _select_from_placex(inner, False)\
-                    .where(inner.c.distance < sa.func.reverse_place_diameter(inner.c.rank_search))\
+                    .where(sa.func.IsBelowReverseDistance(inner.c.distance, inner.c.rank_search))\
                     .order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
                     .limit(1)
 
index e07f7906fa2e8296952841659ee493659b2df8a7..97c4292e56710ea22fe3d10fd48e9dc6bb8d04c5 100644 (file)
@@ -73,13 +73,13 @@ def _add_geometry_columns(sql: SaLambdaSelect, col: SaColumn, details: SearchDet
         col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification)
 
     if details.geometry_output & GeometryFormat.GEOJSON:
-        out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson'))
+        out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
     if details.geometry_output & GeometryFormat.TEXT:
         out.append(sa.func.ST_AsText(col).label('geometry_text'))
     if details.geometry_output & GeometryFormat.KML:
-        out.append(sa.func.ST_AsKML(col).label('geometry_kml'))
+        out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
     if details.geometry_output & GeometryFormat.SVG:
-        out.append(sa.func.ST_AsSVG(col).label('geometry_svg'))
+        out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
 
     return sql.add_columns(*out)
 
index 61e36cc36488c097f58dd77d604b3975879500b4..adccf7a55409ef240f040c83a87c2f112f73af1a 100644 (file)
@@ -36,6 +36,9 @@ async def get_status(conn: SearchConnection) -> StatusResult:
     sql = sa.select(conn.t.import_status.c.lastimportdate).limit(1)
     status.data_updated = await conn.scalar(sql)
 
+    if status.data_updated is not None:
+        status.data_updated = status.data_updated.replace(tzinfo=dt.timezone.utc)
+
     # Database version
     try:
         verstr = await conn.get_property('database_version')
index 1029ee7a91247f693c75d990c2de46df7e35e3ef..88a6078284424b4dc3beacf7d45757eddc1af3bb 100644 (file)
@@ -206,6 +206,7 @@ def get_set_parser() -> CommandlineParser:
     parser.add_subcommand('admin', clicmd.AdminFuncs())
 
     parser.add_subcommand('export', clicmd.QueryExport())
+    parser.add_subcommand('convert', clicmd.ConvertDB())
     parser.add_subcommand('serve', AdminServe())
 
     parser.add_subcommand('search', clicmd.APISearch())
index 235dff0cec3a44938ea2bb144cc2d5fc8f04ef66..c8de68c144eb53704d0a96e05bd82f44f209d2df 100644 (file)
@@ -25,3 +25,4 @@ from nominatim.clicmd.admin import AdminFuncs as AdminFuncs
 from nominatim.clicmd.freeze import SetupFreeze as SetupFreeze
 from nominatim.clicmd.special_phrases import ImportSpecialPhrases as ImportSpecialPhrases
 from nominatim.clicmd.export import QueryExport as QueryExport
+from nominatim.clicmd.convert import ConvertDB as ConvertDB
index e632e4c709c0b3c14535d285189584b89a2315b3..eb3a3b6145f3c76dc569ebbcdbd6cbbc4fa48b37 100644 (file)
@@ -101,6 +101,9 @@ class NominatimArgs:
     language: Optional[str]
     restrict_to_country: Optional[str]
 
+    # Arguments to 'convert'
+    output: Path
+
     # Arguments to 'refresh'
     postcodes: bool
     word_tokens: bool
diff --git a/nominatim/clicmd/convert.py b/nominatim/clicmd/convert.py
new file mode 100644 (file)
index 0000000..26b3fb1
--- /dev/null
@@ -0,0 +1,95 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2023 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Implementation of the 'convert' subcommand.
+"""
+from typing import Set, Any, Union, Optional, Sequence
+import argparse
+import asyncio
+from pathlib import Path
+
+from nominatim.clicmd.args import NominatimArgs
+from nominatim.errors import UsageError
+
+# Do not repeat documentation of subcommand classes.
+# pylint: disable=C0111
+# Using non-top-level imports to avoid eventually unused imports.
+# pylint: disable=E0012,C0415
+
+class WithAction(argparse.Action):
+    """ Special action that saves a list of flags, given on the command-line
+        as `--with-foo` or `--without-foo`.
+    """
+    def __init__(self, option_strings: Sequence[str], dest: Any,
+                 default: bool = True, **kwargs: Any) -> None:
+        if 'nargs' in kwargs:
+            raise ValueError("nargs not allowed.")
+        if option_strings is None:
+            raise ValueError("Positional parameter not allowed.")
+
+        self.dest_set = kwargs.pop('dest_set')
+        full_option_strings = []
+        for opt in option_strings:
+            if not opt.startswith('--'):
+                raise ValueError("short-form options not allowed")
+            if default:
+                self.dest_set.add(opt[2:])
+            full_option_strings.append(f"--with-{opt[2:]}")
+            full_option_strings.append(f"--without-{opt[2:]}")
+
+        super().__init__(full_option_strings, argparse.SUPPRESS, nargs=0, **kwargs)
+
+
+    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+                 values: Union[str, Sequence[Any], None],
+                 option_string: Optional[str] = None) -> None:
+        assert option_string
+        if option_string.startswith('--with-'):
+            self.dest_set.add(option_string[7:])
+        if option_string.startswith('--without-'):
+            self.dest_set.discard(option_string[10:])
+
+
+class ConvertDB:
+    """ Convert an existing database into a different format. (EXPERIMENTAL)
+
+        Dump a read-only version of the database in a different format.
+        At the moment only a SQLite database suitable for reverse lookup
+        can be created.
+    """
+
+    def __init__(self) -> None:
+        self.options: Set[str] = set()
+
+    def add_args(self, parser: argparse.ArgumentParser) -> None:
+        parser.add_argument('--format', default='sqlite',
+                            choices=('sqlite', ),
+                            help='Format of the output database (must be sqlite currently)')
+        parser.add_argument('--output', '-o', required=True, type=Path,
+                            help='File to write the database to.')
+        group = parser.add_argument_group('Switches to define database layout'
+                                          '(currently no effect)')
+        group.add_argument('--reverse', action=WithAction, dest_set=self.options, default=True,
+                           help='Enable/disable support for reverse and lookup API'
+                                ' (default: enabled)')
+        group.add_argument('--search', action=WithAction, dest_set=self.options, default=False,
+                           help='Enable/disable support for search API (default: disabled)')
+        group.add_argument('--details', action=WithAction, dest_set=self.options, default=True,
+                           help='Enable/disable support for details API (default: enabled)')
+
+
+    def run(self, args: NominatimArgs) -> int:
+        if args.output.exists():
+            raise UsageError(f"File '{args.output}' already exists. Refusing to overwrite.")
+
+        if args.format == 'sqlite':
+            from ..tools import convert_sqlite
+
+            asyncio.run(convert_sqlite.convert(args.project_dir, args.output, self.options))
+            return 0
+
+        return 1
index 064fa6a3d6d182d603950d2d7cb5f09c160d7d3e..cb04f7626f08b97f2ee602900849e132f65f6272 100644 (file)
 """
 Custom functions and expressions for SQLAlchemy.
 """
+from __future__ import annotations
 from typing import Any
 
 import sqlalchemy as sa
-from sqlalchemy.sql.expression import FunctionElement
 from sqlalchemy.ext.compiler import compiles
 
 from nominatim.typing import SaColumn
 
-# pylint: disable=abstract-method,missing-function-docstring,consider-using-f-string
+# pylint: disable=all
 
-def select_index_placex_geometry_reverse_lookuppolygon(table: str) -> 'sa.TextClause':
-    """ Create an expression with the necessary conditions over a placex
-        table that the index 'idx_placex_geometry_reverse_lookupPolygon'
-        can be used.
+class PlacexGeometryReverseLookuppolygon(sa.sql.functions.GenericFunction[Any]):
+    """ Check for conditions that allow partial index use on
+        'idx_placex_geometry_reverse_lookupPolygon'.
+
+        Needs to be constant, so that the query planner picks them up correctly
+        in prepared statements.
     """
-    return sa.text(f"ST_GeometryType({table}.geometry) in ('ST_Polygon', 'ST_MultiPolygon')"
-                   f" AND {table}.rank_address between 4 and 25"
-                   f" AND {table}.type != 'postcode'"
-                   f" AND {table}.name is not null"
-                   f" AND {table}.indexed_status = 0"
-                   f" AND {table}.linked_place_id is null")
+    name = 'PlacexGeometryReverseLookuppolygon'
+    inherit_cache = True
+
+
+@compiles(PlacexGeometryReverseLookuppolygon) # type: ignore[no-untyped-call, misc]
+def _default_intersects(element: SaColumn,
+                        compiler: 'sa.Compiled', **kw: Any) -> str:
+    return ("(ST_GeometryType(placex.geometry) in ('ST_Polygon', 'ST_MultiPolygon')"
+            " AND placex.rank_address between 4 and 25"
+            " AND placex.type != 'postcode'"
+            " AND placex.name is not null"
+            " AND placex.indexed_status = 0"
+            " AND placex.linked_place_id is null)")
+
+
+@compiles(PlacexGeometryReverseLookuppolygon, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _sqlite_intersects(element: SaColumn,
+                       compiler: 'sa.Compiled', **kw: Any) -> str:
+    return ("(ST_GeometryType(placex.geometry) in ('POLYGON', 'MULTIPOLYGON')"
+            " AND placex.rank_address between 4 and 25"
+            " AND placex.type != 'postcode'"
+            " AND placex.name is not null"
+            " AND placex.indexed_status = 0"
+            " AND placex.linked_place_id is null)")
+
+
+class IntersectsReverseDistance(sa.sql.functions.GenericFunction[Any]):
+    name = 'IntersectsReverseDistance'
+    inherit_cache = True
+
+    def __init__(self, table: sa.Table, geom: SaColumn) -> None:
+        super().__init__(table.c.geometry, # type: ignore[no-untyped-call]
+                         table.c.rank_search, geom)
+        self.tablename = table.name
+
+
+@compiles(IntersectsReverseDistance) # type: ignore[no-untyped-call, misc]
+def default_reverse_place_diameter(element: SaColumn,
+                                   compiler: 'sa.Compiled', **kw: Any) -> str:
+    table = element.tablename
+    return f"({table}.rank_address between 4 and 25"\
+           f" AND {table}.type != 'postcode'"\
+           f" AND {table}.name is not null"\
+           f" AND {table}.linked_place_id is null"\
+           f" AND {table}.osm_type = 'N'" + \
+           " AND ST_Buffer(%s, reverse_place_diameter(%s)) && %s)" % \
+               tuple(map(lambda c: compiler.process(c, **kw), element.clauses))
+
+
+@compiles(IntersectsReverseDistance, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_reverse_place_diameter(element: SaColumn,
+                                  compiler: 'sa.Compiled', **kw: Any) -> str:
+    geom1, rank, geom2 = list(element.clauses)
+    table = element.tablename
+
+    return (f"({table}.rank_address between 4 and 25"\
+            f" AND {table}.type != 'postcode'"\
+            f" AND {table}.name is not null"\
+            f" AND {table}.linked_place_id is null"\
+            f" AND {table}.osm_type = 'N'"\
+             " AND MbrIntersects(%s, ST_Expand(%s, 14.0 * exp(-0.2 * %s) - 0.03))"\
+            f" AND {table}.place_id IN"\
+             " (SELECT place_id FROM placex_place_node_areas"\
+             "  WHERE ROWID IN (SELECT ROWID FROM SpatialIndex"\
+             "  WHERE f_table_name = 'placex_place_node_areas'"\
+             "  AND search_frame = %s)))") % (
+                compiler.process(geom1, **kw),
+                compiler.process(geom2, **kw),
+                compiler.process(rank, **kw),
+                compiler.process(geom2, **kw))
+
+
+class IsBelowReverseDistance(sa.sql.functions.GenericFunction[Any]):
+    name = 'IsBelowReverseDistance'
+    inherit_cache = True
+
+
+@compiles(IsBelowReverseDistance) # type: ignore[no-untyped-call, misc]
+def default_is_below_reverse_distance(element: SaColumn,
+                                      compiler: 'sa.Compiled', **kw: Any) -> str:
+    dist, rank = list(element.clauses)
+    return "%s < reverse_place_diameter(%s)" % (compiler.process(dist, **kw),
+                                                compiler.process(rank, **kw))
+
+
+@compiles(IsBelowReverseDistance, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_is_below_reverse_distance(element: SaColumn,
+                                     compiler: 'sa.Compiled', **kw: Any) -> str:
+    dist, rank = list(element.clauses)
+    return "%s < 14.0 * exp(-0.2 * %s) - 0.03" % (compiler.process(dist, **kw),
+                                                  compiler.process(rank, **kw))
+
 
 def select_index_placex_geometry_reverse_lookupplacenode(table: str) -> 'sa.TextClause':
     """ Create an expression with the necessary conditions over a placex
@@ -41,7 +129,36 @@ def select_index_placex_geometry_reverse_lookupplacenode(table: str) -> 'sa.Text
                    f" AND {table}.osm_type = 'N'")
 
 
-class CrosscheckNames(FunctionElement[Any]):
+class IsAddressPoint(sa.sql.functions.GenericFunction[Any]):
+    name = 'IsAddressPoint'
+    inherit_cache = True
+
+    def __init__(self, table: sa.Table) -> None:
+        super().__init__(table.c.rank_address, # type: ignore[no-untyped-call]
+                         table.c.housenumber, table.c.name)
+
+
+@compiles(IsAddressPoint) # type: ignore[no-untyped-call, misc]
+def default_is_address_point(element: SaColumn,
+                             compiler: 'sa.Compiled', **kw: Any) -> str:
+    rank, hnr, name = list(element.clauses)
+    return "(%s = 30 AND (%s IS NOT NULL OR %s ? 'addr:housename'))" % (
+                compiler.process(rank, **kw),
+                compiler.process(hnr, **kw),
+                compiler.process(name, **kw))
+
+
+@compiles(IsAddressPoint, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_is_address_point(element: SaColumn,
+                            compiler: 'sa.Compiled', **kw: Any) -> str:
+    rank, hnr, name = list(element.clauses)
+    return "(%s = 30 AND coalesce(%s, json_extract(%s, '$.addr:housename')) IS NOT NULL)" % (
+                compiler.process(rank, **kw),
+                compiler.process(hnr, **kw),
+                compiler.process(name, **kw))
+
+
+class CrosscheckNames(sa.sql.functions.GenericFunction[Any]):
     """ Check if in the given list of names in parameters 1 any of the names
         from the JSON array in parameter 2 are contained.
     """
@@ -54,3 +171,42 @@ def compile_crosscheck_names(element: SaColumn,
     arg1, arg2 = list(element.clauses)
     return "coalesce(avals(%s) && ARRAY(SELECT * FROM json_array_elements_text(%s)), false)" % (
             compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(CrosscheckNames, 'sqlite') # type: ignore[no-untyped-call, misc]
+def compile_sqlite_crosscheck_names(element: SaColumn,
+                                    compiler: 'sa.Compiled', **kw: Any) -> str:
+    arg1, arg2 = list(element.clauses)
+    return "EXISTS(SELECT *"\
+           " FROM json_each(%s) as name, json_each(%s) as match_name"\
+           " WHERE name.value = match_name.value)"\
+           % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+class JsonArrayEach(sa.sql.functions.GenericFunction[Any]):
+    """ Return elements of a json array as a set.
+    """
+    name = 'JsonArrayEach'
+    inherit_cache = True
+
+
+@compiles(JsonArrayEach) # type: ignore[no-untyped-call, misc]
+def default_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "json_array_elements(%s)" % compiler.process(element.clauses, **kw)
+
+
+@compiles(JsonArrayEach, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "json_each(%s)" % compiler.process(element.clauses, **kw)
+
+
+class Greatest(sa.sql.functions.GenericFunction[Any]):
+    """ Function to compute maximum of all its input parameters.
+    """
+    name = 'greatest'
+    inherit_cache = True
+
+
+@compiles(Greatest, 'sqlite') # type: ignore[no-untyped-call, misc]
+def sqlite_greatest(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "max(%s)" % compiler.process(element.clauses, **kw)
index 2ca518cad06cc888113434ba3a168b5247602f49..7dd1e0ce0b046182b6224eab7b5ec16769719b96 100644 (file)
@@ -13,6 +13,7 @@ import sqlalchemy as sa
 from sqlalchemy.dialects.postgresql import HSTORE, ARRAY, JSONB, array
 from sqlalchemy.dialects.sqlite import JSON as sqlite_json
 
+import nominatim.db.sqlalchemy_functions #pylint: disable=unused-import
 from nominatim.db.sqlalchemy_types import Geometry
 
 class PostgresTypes:
@@ -41,6 +42,9 @@ class SqliteTypes:
 #pylint: disable=too-many-instance-attributes
 class SearchTables:
     """ Data class that holds the tables of the Nominatim database.
+
+        This schema strictly reflects the read-access view of the database.
+        Any data used for updates only will not be visible.
     """
 
     def __init__(self, meta: sa.MetaData, engine_name: str) -> None:
@@ -63,14 +67,13 @@ class SearchTables:
             sa.Column('value', sa.Text))
 
         self.placex = sa.Table('placex', meta,
-            sa.Column('place_id', sa.BigInteger, nullable=False, unique=True),
+            sa.Column('place_id', sa.BigInteger, nullable=False),
             sa.Column('parent_place_id', sa.BigInteger),
             sa.Column('linked_place_id', sa.BigInteger),
             sa.Column('importance', sa.Float),
             sa.Column('indexed_date', sa.DateTime),
             sa.Column('rank_address', sa.SmallInteger),
             sa.Column('rank_search', sa.SmallInteger),
-            sa.Column('partition', sa.SmallInteger),
             sa.Column('indexed_status', sa.SmallInteger),
             sa.Column('osm_type', sa.String(1), nullable=False),
             sa.Column('osm_id', sa.BigInteger, nullable=False),
@@ -88,33 +91,31 @@ class SearchTables:
             sa.Column('centroid', Geometry))
 
         self.addressline = sa.Table('place_addressline', meta,
-            sa.Column('place_id', sa.BigInteger, index=True),
-            sa.Column('address_place_id', sa.BigInteger, index=True),
+            sa.Column('place_id', sa.BigInteger),
+            sa.Column('address_place_id', sa.BigInteger),
             sa.Column('distance', sa.Float),
-            sa.Column('cached_rank_address', sa.SmallInteger),
             sa.Column('fromarea', sa.Boolean),
             sa.Column('isaddress', sa.Boolean))
 
         self.postcode = sa.Table('location_postcode', meta,
-            sa.Column('place_id', sa.BigInteger, unique=True),
+            sa.Column('place_id', sa.BigInteger),
             sa.Column('parent_place_id', sa.BigInteger),
             sa.Column('rank_search', sa.SmallInteger),
             sa.Column('rank_address', sa.SmallInteger),
             sa.Column('indexed_status', sa.SmallInteger),
             sa.Column('indexed_date', sa.DateTime),
             sa.Column('country_code', sa.String(2)),
-            sa.Column('postcode', sa.Text, index=True),
+            sa.Column('postcode', sa.Text),
             sa.Column('geometry', Geometry))
 
         self.osmline = sa.Table('location_property_osmline', meta,
-            sa.Column('place_id', sa.BigInteger, nullable=False, unique=True),
+            sa.Column('place_id', sa.BigInteger, nullable=False),
             sa.Column('osm_id', sa.BigInteger),
             sa.Column('parent_place_id', sa.BigInteger),
             sa.Column('indexed_date', sa.DateTime),
             sa.Column('startnumber', sa.Integer),
             sa.Column('endnumber', sa.Integer),
             sa.Column('step', sa.SmallInteger),
-            sa.Column('partition', sa.SmallInteger),
             sa.Column('indexed_status', sa.SmallInteger),
             sa.Column('linegeo', Geometry),
             sa.Column('address', self.types.Composite),
@@ -125,7 +126,6 @@ class SearchTables:
             sa.Column('country_code', sa.String(2)),
             sa.Column('name', self.types.Composite),
             sa.Column('derived_name', self.types.Composite),
-            sa.Column('country_default_language_code', sa.Text),
             sa.Column('partition', sa.Integer))
 
         self.country_grid = sa.Table('country_osm_grid', meta,
@@ -135,12 +135,12 @@ class SearchTables:
 
         # The following tables are not necessarily present.
         self.search_name = sa.Table('search_name', meta,
-            sa.Column('place_id', sa.BigInteger, index=True),
+            sa.Column('place_id', sa.BigInteger),
             sa.Column('importance', sa.Float),
             sa.Column('search_rank', sa.SmallInteger),
             sa.Column('address_rank', sa.SmallInteger),
-            sa.Column('name_vector', self.types.IntArray, index=True),
-            sa.Column('nameaddress_vector', self.types.IntArray, index=True),
+            sa.Column('name_vector', self.types.IntArray),
+            sa.Column('nameaddress_vector', self.types.IntArray),
             sa.Column('country_code', sa.String(2)),
             sa.Column('centroid', Geometry))
 
@@ -150,6 +150,5 @@ class SearchTables:
             sa.Column('startnumber', sa.Integer),
             sa.Column('endnumber', sa.Integer),
             sa.Column('step', sa.SmallInteger),
-            sa.Column('partition', sa.SmallInteger),
             sa.Column('linegeo', Geometry),
             sa.Column('postcode', sa.Text))
index 7b9590363db2a055962e61ac2d850397cfc0e5d2..a36e8c462acfce3b4cc5e730b2eb5c008f1dfa14 100644 (file)
 """
 Custom types for SQLAlchemy.
 """
+from __future__ import annotations
 from typing import Callable, Any, cast
 import sys
 
 import sqlalchemy as sa
+from sqlalchemy.ext.compiler import compiles
 from sqlalchemy import types
 
 from nominatim.typing import SaColumn, SaBind
 
 #pylint: disable=all
 
+class Geometry_DistanceSpheroid(sa.sql.expression.FunctionElement[float]):
+    """ Function to compute the spherical distance in meters.
+    """
+    type = sa.Float()
+    name = 'Geometry_DistanceSpheroid'
+    inherit_cache = True
+
+
+@compiles(Geometry_DistanceSpheroid) # type: ignore[no-untyped-call, misc]
+def _default_distance_spheroid(element: SaColumn,
+                               compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "ST_DistanceSpheroid(%s,"\
+           " 'SPHEROID[\"WGS 84\",6378137,298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]')"\
+             % compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_DistanceSpheroid, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _spatialite_distance_spheroid(element: SaColumn,
+                                  compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "COALESCE(Distance(%s, true), 0.0)" % compiler.process(element.clauses, **kw)
+
+
+class Geometry_IsLineLike(sa.sql.expression.FunctionElement[Any]):
+    """ Check if the geometry is a line or multiline.
+    """
+    name = 'Geometry_IsLineLike'
+    inherit_cache = True
+
+
+@compiles(Geometry_IsLineLike) # type: ignore[no-untyped-call, misc]
+def _default_is_line_like(element: SaColumn,
+                          compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "ST_GeometryType(%s) IN ('ST_LineString', 'ST_MultiLineString')" % \
+               compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_IsLineLike, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _sqlite_is_line_like(element: SaColumn,
+                         compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "ST_GeometryType(%s) IN ('LINESTRING', 'MULTILINESTRING')" % \
+               compiler.process(element.clauses, **kw)
+
+
+class Geometry_IsAreaLike(sa.sql.expression.FunctionElement[Any]):
+    """ Check if the geometry is a polygon or multipolygon.
+    """
+    name = 'Geometry_IsLineLike'
+    inherit_cache = True
+
+
+@compiles(Geometry_IsAreaLike) # type: ignore[no-untyped-call, misc]
+def _default_is_area_like(element: SaColumn,
+                          compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "ST_GeometryType(%s) IN ('ST_Polygon', 'ST_MultiPolygon')" % \
+               compiler.process(element.clauses, **kw)
+
+
+@compiles(Geometry_IsAreaLike, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _sqlite_is_area_like(element: SaColumn,
+                         compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "ST_GeometryType(%s) IN ('POLYGON', 'MULTIPOLYGON')" % \
+               compiler.process(element.clauses, **kw)
+
+
+class Geometry_IntersectsBbox(sa.sql.expression.FunctionElement[Any]):
+    """ Check if the bounding boxes of the given geometries intersect.
+    """
+    name = 'Geometry_IntersectsBbox'
+    inherit_cache = True
+
+
+@compiles(Geometry_IntersectsBbox) # type: ignore[no-untyped-call, misc]
+def _default_intersects(element: SaColumn,
+                        compiler: 'sa.Compiled', **kw: Any) -> str:
+    arg1, arg2 = list(element.clauses)
+    return "%s && %s" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(Geometry_IntersectsBbox, 'sqlite') # type: ignore[no-untyped-call, misc]
+def _sqlite_intersects(element: SaColumn,
+                       compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "MbrIntersects(%s) = 1" % compiler.process(element.clauses, **kw)
+
+
+class Geometry_ColumnIntersectsBbox(sa.sql.expression.FunctionElement[Any]):
+    """ Check if the bounding box of the geometry intersects with the
+        given table column, using the spatial index for the column.
+
+        The index must exist or the query may return nothing.
+    """
+    name = 'Geometry_ColumnIntersectsBbox'
+    inherit_cache = True
+
+
+@compiles(Geometry_ColumnIntersectsBbox) # type: ignore[no-untyped-call, misc]
+def default_intersects_column(element: SaColumn,
+                              compiler: 'sa.Compiled', **kw: Any) -> str:
+    arg1, arg2 = list(element.clauses)
+    return "%s && %s" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
+
+
+@compiles(Geometry_ColumnIntersectsBbox, 'sqlite') # type: ignore[no-untyped-call, misc]
+def spatialite_intersects_column(element: SaColumn,
+                                 compiler: 'sa.Compiled', **kw: Any) -> str:
+    arg1, arg2 = list(element.clauses)
+    return "MbrIntersects(%s, %s) = 1 and "\
+           "%s.ROWID IN (SELECT ROWID FROM SpatialIndex "\
+                        "WHERE f_table_name = '%s' AND f_geometry_column = '%s' "\
+                        "AND search_frame = %s)" %(
+              compiler.process(arg1, **kw),
+              compiler.process(arg2, **kw),
+              arg1.table.name, arg1.table.name, arg1.name,
+              compiler.process(arg2, **kw))
+
+
+class Geometry_ColumnDWithin(sa.sql.expression.FunctionElement[Any]):
+    """ Check if the geometry is within the distance of the
+        given table column, using the spatial index for the column.
+
+        The index must exist or the query may return nothing.
+    """
+    name = 'Geometry_ColumnDWithin'
+    inherit_cache = True
+
+
+@compiles(Geometry_ColumnDWithin) # type: ignore[no-untyped-call, misc]
+def default_dwithin_column(element: SaColumn,
+                           compiler: 'sa.Compiled', **kw: Any) -> str:
+    return "ST_DWithin(%s)" % compiler.process(element.clauses, **kw)
+
+@compiles(Geometry_ColumnDWithin, 'sqlite') # type: ignore[no-untyped-call, misc]
+def spatialite_dwithin_column(element: SaColumn,
+                              compiler: 'sa.Compiled', **kw: Any) -> str:
+    geom1, geom2, dist = list(element.clauses)
+    return "ST_Distance(%s, %s) < %s and "\
+           "%s.ROWID IN (SELECT ROWID FROM SpatialIndex "\
+                        "WHERE f_table_name = '%s' AND f_geometry_column = '%s' "\
+                        "AND search_frame = ST_Expand(%s, %s))" %(
+              compiler.process(geom1, **kw),
+              compiler.process(geom2, **kw),
+              compiler.process(dist, **kw),
+              geom1.table.name, geom1.table.name, geom1.name,
+              compiler.process(geom2, **kw),
+              compiler.process(dist, **kw))
+
+
+
 class Geometry(types.UserDefinedType): # type: ignore[type-arg]
     """ Simplified type decorator for PostGIS geometry. This type
         only supports geometries in 4326 projection.
@@ -47,6 +196,10 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg]
         return process
 
 
+    def column_expression(self, col: SaColumn) -> SaColumn:
+        return sa.func.ST_AsEWKB(col)
+
+
     def bind_expression(self, bindvalue: SaBind) -> SaColumn:
         return sa.func.ST_GeomFromText(bindvalue, sa.text('4326'), type_=self)
 
@@ -54,28 +207,34 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg]
     class comparator_factory(types.UserDefinedType.Comparator): # type: ignore[type-arg]
 
         def intersects(self, other: SaColumn) -> 'sa.Operators':
-            return self.op('&&')(other)
+            if isinstance(self.expr, sa.Column):
+                return Geometry_ColumnIntersectsBbox(self.expr, other)
+
+            return Geometry_IntersectsBbox(self.expr, other)
+
 
         def is_line_like(self) -> SaColumn:
-            return sa.func.ST_GeometryType(self, type_=sa.String).in_(('ST_LineString',
-                                                                       'ST_MultiLineString'))
+            return Geometry_IsLineLike(self)
+
 
         def is_area(self) -> SaColumn:
-            return sa.func.ST_GeometryType(self, type_=sa.String).in_(('ST_Polygon',
-                                                                       'ST_MultiPolygon'))
+            return Geometry_IsAreaLike(self)
 
 
         def ST_DWithin(self, other: SaColumn, distance: SaColumn) -> SaColumn:
-            return sa.func.ST_DWithin(self, other, distance, type_=sa.Boolean)
+            if isinstance(self.expr, sa.Column):
+                return Geometry_ColumnDWithin(self.expr, other, distance)
+
+            return sa.func.ST_DWithin(self.expr, other, distance)
 
 
         def ST_DWithin_no_index(self, other: SaColumn, distance: SaColumn) -> SaColumn:
             return sa.func.ST_DWithin(sa.func.coalesce(sa.null(), self),
-                                      other, distance, type_=sa.Boolean)
+                                      other, distance)
 
 
         def ST_Intersects_no_index(self, other: SaColumn) -> 'sa.Operators':
-            return sa.func.coalesce(sa.null(), self).op('&&')(other)
+            return Geometry_IntersectsBbox(sa.func.coalesce(sa.null(), self), other)
 
 
         def ST_Distance(self, other: SaColumn) -> SaColumn:
@@ -91,7 +250,8 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg]
 
 
         def ST_ClosestPoint(self, other: SaColumn) -> SaColumn:
-            return sa.func.ST_ClosestPoint(self, other, type_=Geometry)
+            return sa.func.coalesce(sa.func.ST_ClosestPoint(self, other, type_=Geometry),
+                                    other)
 
 
         def ST_Buffer(self, other: SaColumn) -> SaColumn:
@@ -116,3 +276,55 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg]
 
         def ST_LineLocatePoint(self, other: SaColumn) -> SaColumn:
             return sa.func.ST_LineLocatePoint(self, other, type_=sa.Float)
+
+
+        def distance_spheroid(self, other: SaColumn) -> SaColumn:
+            return Geometry_DistanceSpheroid(self, other)
+
+
+@compiles(Geometry, 'sqlite') # type: ignore[no-untyped-call]
+def get_col_spec(self, *args, **kwargs): # type: ignore[no-untyped-def]
+    return 'GEOMETRY'
+
+
+SQLITE_FUNCTION_ALIAS = (
+    ('ST_AsEWKB', sa.Text, 'AsEWKB'),
+    ('ST_GeomFromEWKT', Geometry, 'GeomFromEWKT'),
+    ('ST_AsGeoJSON', sa.Text, 'AsGeoJSON'),
+    ('ST_AsKML', sa.Text, 'AsKML'),
+    ('ST_AsSVG', sa.Text, 'AsSVG'),
+    ('ST_LineLocatePoint', sa.Float, 'ST_Line_Locate_Point'),
+    ('ST_LineInterpolatePoint', sa.Float, 'ST_Line_Interpolate_Point'),
+)
+
+def _add_function_alias(func: str, ftype: type, alias: str) -> None:
+    _FuncDef = type(func, (sa.sql.functions.GenericFunction, ), {
+        "type": ftype(),
+        "name": func,
+        "identifier": func,
+        "inherit_cache": True})
+
+    func_templ = f"{alias}(%s)"
+
+    def _sqlite_impl(element: Any, compiler: Any, **kw: Any) -> Any:
+        return func_templ % compiler.process(element.clauses, **kw)
+
+    compiles(_FuncDef, 'sqlite')(_sqlite_impl) # type: ignore[no-untyped-call]
+
+for alias in SQLITE_FUNCTION_ALIAS:
+    _add_function_alias(*alias)
+
+
+class ST_DWithin(sa.sql.functions.GenericFunction[Any]):
+    name = 'ST_DWithin'
+    inherit_cache = True
+
+
+@compiles(ST_DWithin, 'sqlite') # type: ignore[no-untyped-call, misc]
+def default_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
+    geom1, geom2, dist = list(element.clauses)
+    return "(MbrIntersects(%s, ST_Expand(%s, %s)) = 1 AND ST_Distance(%s, %s) <= %s)" % (
+        compiler.process(geom1, **kw), compiler.process(geom2, **kw),
+        compiler.process(dist, **kw),
+        compiler.process(geom1, **kw), compiler.process(geom2, **kw),
+        compiler.process(dist, **kw))
diff --git a/nominatim/tools/convert_sqlite.py b/nominatim/tools/convert_sqlite.py
new file mode 100644 (file)
index 0000000..0702e5d
--- /dev/null
@@ -0,0 +1,156 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2023 by the Nominatim developer community.
+# For a full list of authors see the git log.
+"""
+Exporting a Nominatim database to SQlite.
+"""
+from typing import Set
+import logging
+from pathlib import Path
+
+import sqlalchemy as sa
+
+from nominatim.typing import SaSelect
+from nominatim.db.sqlalchemy_types import Geometry
+import nominatim.api as napi
+
+LOG = logging.getLogger()
+
+async def convert(project_dir: Path, outfile: Path, options: Set[str]) -> None:
+    """ Export an existing database to sqlite. The resulting database
+        will be usable against the Python frontend of Nominatim.
+    """
+    api = napi.NominatimAPIAsync(project_dir)
+
+    try:
+        outapi = napi.NominatimAPIAsync(project_dir,
+                                        {'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={outfile}"})
+
+        async with api.begin() as src, outapi.begin() as dest:
+            writer = SqliteWriter(src, dest, options)
+            await writer.write()
+    finally:
+        await api.close()
+
+
+class SqliteWriter:
+    """ Worker class which creates a new SQLite database.
+    """
+
+    def __init__(self, src: napi.SearchConnection,
+                 dest: napi.SearchConnection, options: Set[str]) -> None:
+        self.src = src
+        self.dest = dest
+        self.options = options
+
+
+    async def write(self) -> None:
+        """ Create the database structure and copy the data from
+            the source database to the destination.
+        """
+        await self.dest.execute(sa.select(sa.func.InitSpatialMetaData(True, 'WGS84')))
+
+        await self.create_tables()
+        await self.copy_data()
+        await self.create_indexes()
+
+
+    async def create_tables(self) -> None:
+        """ Set up the database tables.
+        """
+        if 'search' not in self.options:
+            self.dest.t.meta.remove(self.dest.t.search_name)
+
+        await self.dest.connection.run_sync(self.dest.t.meta.create_all)
+
+        # Convert all Geometry columns to Spatialite geometries
+        for table in self.dest.t.meta.sorted_tables:
+            for col in table.c:
+                if isinstance(col.type, Geometry):
+                    await self.dest.execute(sa.select(
+                        sa.func.RecoverGeometryColumn(table.name, col.name, 4326,
+                                                      col.type.subtype.upper(), 'XY')))
+
+
+    async def copy_data(self) -> None:
+        """ Copy data for all registered tables.
+        """
+        for table in self.dest.t.meta.sorted_tables:
+            LOG.warning("Copying '%s'", table.name)
+            async_result = await self.src.connection.stream(self.select_from(table.name))
+
+            async for partition in async_result.partitions(10000):
+                data = [{('class_' if k == 'class' else k): getattr(r, k) for k in r._fields}
+                        for r in partition]
+                await self.dest.execute(table.insert(), data)
+
+
+    async def create_indexes(self) -> None:
+        """ Add indexes necessary for the frontend.
+        """
+        # reverse place node lookup needs an extra table to simulate a
+        # partial index with adaptive buffering.
+        await self.dest.execute(sa.text(
+            """ CREATE TABLE placex_place_node_areas AS
+                  SELECT place_id, ST_Expand(geometry,
+                                             14.0 * exp(-0.2 * rank_search) - 0.03) as geometry
+                  FROM placex
+                  WHERE rank_address between 5 and 25
+                        and osm_type = 'N'
+                        and linked_place_id is NULL """))
+        await self.dest.execute(sa.select(
+            sa.func.RecoverGeometryColumn('placex_place_node_areas', 'geometry',
+                                          4326, 'GEOMETRY', 'XY')))
+        await self.dest.execute(sa.select(sa.func.CreateSpatialIndex(
+                                             'placex_place_node_areas', 'geometry')))
+
+        # Remaining indexes.
+        await self.create_spatial_index('country_grid', 'geometry')
+        await self.create_spatial_index('placex', 'geometry')
+        await self.create_spatial_index('osmline', 'linegeo')
+        await self.create_spatial_index('tiger', 'linegeo')
+        await self.create_index('placex', 'place_id')
+        await self.create_index('placex', 'parent_place_id')
+        await self.create_index('placex', 'rank_address')
+        await self.create_index('addressline', 'place_id')
+
+
+    async def create_spatial_index(self, table: str, column: str) -> None:
+        """ Create a spatial index on the given table and column.
+        """
+        await self.dest.execute(sa.select(
+                  sa.func.CreateSpatialIndex(getattr(self.dest.t, table).name, column)))
+
+
+    async def create_index(self, table_name: str, column: str) -> None:
+        """ Create a simple index on the given table and column.
+        """
+        table = getattr(self.dest.t, table_name)
+        await self.dest.connection.run_sync(
+            sa.Index(f"idx_{table}_{column}", getattr(table.c, column)).create)
+
+
+    def select_from(self, table: str) -> SaSelect:
+        """ Create the SQL statement to select the source columns and rows.
+        """
+        columns = self.src.t.meta.tables[table].c
+
+        if table == 'placex':
+            # SQLite struggles with Geometries that are larger than 5MB,
+            # so simplify those.
+            return sa.select(*(c for c in columns if not isinstance(c.type, Geometry)),
+                             sa.func.ST_AsText(columns.centroid).label('centroid'),
+                             sa.func.ST_AsText(
+                               sa.case((sa.func.ST_MemSize(columns.geometry) < 5000000,
+                                        columns.geometry),
+                                       else_=sa.func.ST_SimplifyPreserveTopology(
+                                                columns.geometry, 0.0001)
+                                )).label('geometry'))
+
+        sql = sa.select(*(sa.func.ST_AsText(c).label(c.name)
+                             if isinstance(c.type, Geometry) else c for c in columns))
+
+        return sql
index 5fca4002a6e867ad2592625e0acda3da502da447..78eac5f5221d77ed69d9234f8593ffc4ae7ec952 100644 (file)
@@ -2,12 +2,38 @@ local flex = require('flex-base')
 
 flex.set_main_tags{
     boundary = {administrative = 'named'},
-    landuse = 'fallback',
-    place = 'always'
+    landuse = {residential = 'fallback',
+               farm = 'fallback',
+               farmyard = 'fallback',
+               industrial = 'fallback',
+               commercial = 'fallback',
+               allotments = 'fallback',
+               retail = 'fallback'},
+    place = {county = 'always',
+             district = 'always',
+             municipality = 'always',
+             city = 'always',
+             town = 'always',
+             borough = 'always',
+             village = 'always',
+             suburb = 'always',
+             hamlet = 'always',
+             croft = 'always',
+             subdivision = 'always',
+             allotments = 'always',
+             neighbourhood = 'always',
+             quarter = 'always',
+             isolated_dwelling = 'always',
+             farm = 'always',
+             city_block = 'always',
+             mountain_pass = 'always',
+             square = 'always',
+             locality = 'always'}
 }
 
 flex.set_prefilters{delete_keys = {'building', 'source', 'highway',
                                    'addr:housenumber', 'addr:street', 'addr:city',
+                                   'addr:interpolation',
                                    'source', '*source', 'type',
                                    'is_in:postcode', '*:wikidata', '*:wikipedia',
                                    '*:prefix', '*:suffix', 'name:prefix:*', 'name:suffix:*',
index 6611c81b527df0f791a1078ba660c61a22741c13..5351ce417185a5a813e9ee5ffcf1ce18491828a1 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Localization of search results
 
index 3d5635de126c6136c575a2f7e860e6f7492d7234..0fb641712446a268a6a877239c83fcd6bc2c1db9 100644 (file)
@@ -2,6 +2,7 @@
 Feature: Object details
     Testing different parameter options for details API.
 
+    @SQLITE
     Scenario: JSON Details
         When sending json details query for W297699560
         Then the result is valid json
@@ -11,6 +12,7 @@ Feature: Object details
             | type  |
             | Point |
 
+    @SQLITE
     Scenario: JSON Details with pretty printing
         When sending json details query for W297699560
             | pretty |
@@ -19,6 +21,7 @@ Feature: Object details
         And result has attributes geometry
         And result has not attributes keywords,address,linked_places,parentof
 
+    @SQLITE
      Scenario: JSON Details with addressdetails
         When sending json details query for W297699560
             | addressdetails |
@@ -26,6 +29,7 @@ Feature: Object details
         Then the result is valid json
         And result has attributes address
 
+    @SQLITE
     Scenario: JSON Details with linkedplaces
         When sending json details query for R123924
             | linkedplaces |
@@ -33,6 +37,7 @@ Feature: Object details
         Then the result is valid json
         And result has attributes linked_places
 
+    @SQLITE
     Scenario: JSON Details with hierarchy
         When sending json details query for W297699560
             | hierarchy |
@@ -40,6 +45,7 @@ Feature: Object details
         Then the result is valid json
         And result has attributes hierarchy
 
+    @SQLITE
     Scenario: JSON Details with grouped hierarchy
         When sending json details query for W297699560
             | hierarchy | group_hierarchy |
@@ -69,6 +75,7 @@ Feature: Object details
         Then the result is valid json
         And result has attributes keywords
 
+    @SQLITE
     Scenario Outline: JSON details with full geometry
         When sending json details query for <osmid>
             | polygon_geojson |
index 4c54b0d67c0fe8d6a8e78153da771638003dc450..99d3422334b3baf36b18040dc883727b72c76bd7 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Object details
     Check details page for correctness
index 8a8e656113cf81daf6e8801a5007ed0f1ee58b2c..e279a8fa9e12978b8c93622538b2bffa640c8360 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Places by osm_type and osm_id Tests
     Simple tests for errors in various response formats.
index 9ea388122868c54b53d9b2211b942bfb33ecdab2..1e5b8ee77744672571a0603d9b7b0e8981f0f032 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Places by osm_type and osm_id Tests
     Simple tests for response format.
index 2c14dd5f2a5d19d40b6b7264dc41debd5074afff..33fadbbdff5a106d26c8679f8c0fbf31d270b6be 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Geometries for reverse geocoding
     Tests for returning geometries with reverse
@@ -9,7 +10,7 @@ Feature: Geometries for reverse geocoding
           | 1            |
         Then results contain
           | geotext |
-          | POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5226142 47.1379294,9.5226143 47.1379257,9.522615 47.137917,9.5226225 47.1379098,9.5226334 47.1379052,9.5226461 47.1379037,9.5226588 47.1379056,9.5226693 47.1379107,9.5226762 47.1379181,9.5226762 47.1379268,9.5226761 47.1379308,9.5227366 47.1379317,9.5227352 47.1379753,9.5227608 47.1379757,9.5227595 47.1380148,9.5227355 47.1380145,9.5227337 47.1380692,9.5225302 47.138066)) |
+          | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5226142 47.1379294, ?9.5226143 47.1379257, ?9.522615 47.137917, ?9.5226225 47.1379098, ?9.5226334 47.1379052, ?9.5226461 47.1379037, ?9.5226588 47.1379056, ?9.5226693 47.1379107, ?9.5226762 47.1379181, ?9.5226762 47.1379268, ?9.5226761 47.1379308, ?9.5227366 47.1379317, ?9.5227352 47.1379753, ?9.5227608 47.1379757, ?9.5227595 47.1380148, ?9.5227355 47.1380145, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
 
 
     Scenario: Polygons can be slightly simplified
@@ -18,7 +19,7 @@ Feature: Geometries for reverse geocoding
           | 1            | 0.00001            |
         Then results contain
           | geotext |
-          | POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5226142 47.1379294,9.5226225 47.1379098,9.5226588 47.1379056,9.5226761 47.1379308,9.5227366 47.1379317,9.5227352 47.1379753,9.5227608 47.1379757,9.5227595 47.1380148,9.5227355 47.1380145,9.5227337 47.1380692,9.5225302 47.138066)) |
+          | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5226142 47.1379294, ?9.5226225 47.1379098, ?9.5226588 47.1379056, ?9.5226761 47.1379308, ?9.5227366 47.1379317, ?9.5227352 47.1379753, ?9.5227608 47.1379757, ?9.5227595 47.1380148, ?9.5227355 47.1380145, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
 
 
     Scenario: Polygons can be much simplified
@@ -27,7 +28,7 @@ Feature: Geometries for reverse geocoding
           | 1            | 0.9               |
         Then results contain
           | geotext |
-          | POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5227608 47.1379757,9.5227337 47.1380692,9.5225302 47.138066)) |
+          | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5227608 47.1379757, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
 
 
     Scenario: For polygons return the centroid as center point
index e42689f73d12a9aeb0df1bd7e783398e274e93b4..69f84ebc4a030cdbe4cd89d47307d90b9b86c046 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Localization of reverse search results
 
index d4c334a54e170fa5ff28e908767851835458b7e2..ef02886478cbc81aae2358b99f15f2a6692c8b55 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Layer parameter in reverse geocoding
     Testing correct function of layer selection while reverse geocoding
@@ -57,7 +58,7 @@ Feature: Layer parameter in reverse geocoding
 
     @v1-api-python-only
     Scenario Outline: Search for mountain peaks begins at level 12
-        When sending v1/reverse at 47.08221,9.56769
+        When sending v1/reverse at 47.08293,9.57109
           | layer   | zoom   |
           | natural | <zoom> |
         Then results contain
@@ -71,7 +72,7 @@ Feature: Layer parameter in reverse geocoding
 
 
     @v1-api-python-only
-     Scenario Outline: Reverse serach with manmade layers
+     Scenario Outline: Reverse search with manmade layers
         When sending v1/reverse at 32.46904,-86.44439
           | layer   |
           | <layer> |
@@ -84,5 +85,5 @@ Feature: Layer parameter in reverse geocoding
           | manmade         | leisure  | park        |
           | address         | highway  | residential |
           | poi             | leisure  | pitch       |
-          | natural         | waterway | stream      |
+          | natural         | waterway | river       |
           | natural,manmade | leisure  | park        |
index d51378d6443dab6e2a0254dc7a23bf969daba2b6..37abb22d4095317a397c2bf35fff71924efdbd63 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Reverse geocoding
     Testing the reverse function
index c9112b94c591439779163a0b4f8626b71b125326..56b85e2009d2618c0db9a071eff528ab031dc7dd 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Geocodejson for Reverse API
     Testing correctness of geocodejson output (API version v1).
index 0b6ad0d3a3ff6744a277855abcec1a88d7a1b577..e705529d38be7a6e8730dc71128c1adb2dbbb6ae 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Geojson for Reverse API
     Testing correctness of geojson output (API version v1).
index ac3c799ed8d71c9c9956ad6bc450a1f8fa39f57a..1f629c0fa110941d1e4ea70e6c3b3b6fbce8d00b 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Json output for Reverse API
     Testing correctness of json and jsonv2 output (API version v1).
@@ -93,7 +94,7 @@ Feature: Json output for Reverse API
           | polygon_text | 1     |
        Then results contain
           | geotext |
-          | LINESTRING(9.5039353 47.0657546,9.5040437 47.0657781,9.5040808 47.065787,9.5054298 47.0661407) |
+          | ^LINESTRING\(9.5039353 47.0657546, ?9.5040437 47.0657781, ?9.5040808 47.065787, ?9.5054298 47.0661407\) |
 
        Examples:
           | format |
index 70a6505bfa7730e042348860d23bc58512dcb606..a1f08afd37d6919e20af7f00741012ff07ba0bae 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: v1/reverse Parameter Tests
     Tests for parameter inputs for the v1 reverse endpoint.
index 75f27220497009eb65c2d7e0daee3b0b426eed50..95e7478ca6daadf2bcbd54f23cccab3187ae0ef6 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: XML output for Reverse API
     Testing correctness of xml output (API version v1).
@@ -66,7 +67,7 @@ Feature: XML output for Reverse API
           | polygon_text | 1     |
        Then results contain
           | geotext |
-          | LINESTRING(9.5039353 47.0657546,9.5040437 47.0657781,9.5040808 47.065787,9.5054298 47.0661407) |
+          | ^LINESTRING\(9.5039353 47.0657546, ?9.5040437 47.0657781, ?9.5040808 47.065787, ?9.5054298 47.0661407\) |
 
 
     Scenario: Output of SVG
index 1323caa161ec581b2700401edb3e3f5c95b913b8..993fa1ecb48e31bcbc876831476957df3f65ab40 100644 (file)
@@ -1,3 +1,4 @@
+@SQLITE
 @APIDB
 Feature: Status queries
     Testing status query
index afaa51512a7c1041e659725e2bc6ca9bfaaae77e..664b5ac79e7d2013182ebff5036f04889870f586 100644 (file)
@@ -46,7 +46,10 @@ def before_all(context):
 
 
 def before_scenario(context, scenario):
-    if 'DB' in context.tags:
+    if not 'SQLITE' in context.tags \
+       and context.config.userdata['API_TEST_DB'].startswith('sqlite:'):
+        context.scenario.skip("Not usable with Sqlite database.")
+    elif 'DB' in context.tags:
         context.nominatim.setup_db(context)
     elif 'APIDB' in context.tags:
         context.nominatim.setup_api_db()
index 7299988b2330d5369d27d312c350afed48f53500..11dede3049854a323388fceae13fa61b428fb689 100644 (file)
@@ -86,7 +86,10 @@ class NominatimEnvironment:
             be picked up by dotenv and creates a project directory with the
             appropriate website scripts.
         """
-        dsn = 'pgsql:dbname={}'.format(dbname)
+        if dbname.startswith('sqlite:'):
+            dsn = 'sqlite:dbname={}'.format(dbname[7:])
+        else:
+            dsn = 'pgsql:dbname={}'.format(dbname)
         if self.db_host:
             dsn += ';host=' + self.db_host
         if self.db_port:
@@ -197,6 +200,9 @@ class NominatimEnvironment:
         """
         self.write_nominatim_config(self.api_test_db)
 
+        if self.api_test_db.startswith('sqlite:'):
+            return
+
         if not self.api_db_done:
             self.api_db_done = True
 
index 1b5b88ed5f15bfee12e964a4be6aaf63cc1df98c..cb7f324a393fa24e2ddb097e710b92662fc96bd2 100644 (file)
@@ -16,6 +16,7 @@ import sqlalchemy as sa
 
 import nominatim.api as napi
 from nominatim.db.sql_preprocessor import SQLPreprocessor
+from nominatim.tools import convert_sqlite
 import nominatim.api.logging as loglib
 
 class APITester:
@@ -178,7 +179,6 @@ def apiobj(temp_db_with_extensions, temp_db_conn, monkeypatch):
     testapi.async_to_sync(testapi.create_tables())
 
     proc = SQLPreprocessor(temp_db_conn, testapi.api.config)
-    proc.run_sql_file(temp_db_conn, 'functions/address_lookup.sql')
     proc.run_sql_file(temp_db_conn, 'functions/ranking.sql')
 
     loglib.set_log_output('text')
@@ -186,3 +186,21 @@ def apiobj(temp_db_with_extensions, temp_db_conn, monkeypatch):
     print(loglib.get_and_disable())
 
     testapi.api.close()
+
+
+@pytest.fixture(params=['postgres_db', 'sqlite_db'])
+def frontend(request, event_loop, tmp_path):
+    if request.param == 'sqlite_db':
+        db = str(tmp_path / 'test_nominatim_python_unittest.sqlite')
+
+        def mkapi(apiobj, options={'reverse'}):
+            event_loop.run_until_complete(convert_sqlite.convert(Path('/invalid'),
+                                                                 db, options))
+            return napi.NominatimAPI(Path('/invalid'),
+                                     {'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={db}",
+                                      'NOMINATIM_USE_US_TIGER_DATA': 'yes'})
+    elif request.param == 'postgres_db':
+        def mkapi(apiobj, options=None):
+            return apiobj.api
+
+    return mkapi
index ca14b93c178e60cbd019cd667922f5fda71c02ed..596876d4503dac42d94ab8a06d4cd3f74154a41c 100644 (file)
@@ -15,7 +15,7 @@ import nominatim.api as napi
 
 @pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4),
                                    napi.OsmID('W', 4, 'highway')))
-def test_lookup_in_placex(apiobj, idobj):
+def test_lookup_in_placex(apiobj, frontend, idobj):
     import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',
@@ -31,7 +31,8 @@ def test_lookup_in_placex(apiobj, idobj):
                      indexed_date=import_date,
                      geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
 
-    result = apiobj.api.details(idobj)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(idobj)
 
     assert result is not None
 
@@ -69,7 +70,7 @@ def test_lookup_in_placex(apiobj, idobj):
     assert result.geometry == {'type': 'ST_LineString'}
 
 
-def test_lookup_in_placex_minimal_info(apiobj):
+def test_lookup_in_placex_minimal_info(apiobj, frontend):
     import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',
@@ -79,7 +80,8 @@ def test_lookup_in_placex_minimal_info(apiobj):
                      indexed_date=import_date,
                      geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
 
-    result = apiobj.api.details(napi.PlaceID(332))
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(332))
 
     assert result is not None
 
@@ -117,16 +119,17 @@ def test_lookup_in_placex_minimal_info(apiobj):
     assert result.geometry == {'type': 'ST_LineString'}
 
 
-def test_lookup_in_placex_with_geometry(apiobj):
+def test_lookup_in_placex_with_geometry(apiobj, frontend):
     apiobj.add_placex(place_id=332,
                       geometry='LINESTRING(23 34, 23.1 34)')
 
-    result = apiobj.api.details(napi.PlaceID(332), geometry_output=napi.GeometryFormat.GEOJSON)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(332), geometry_output=napi.GeometryFormat.GEOJSON)
 
     assert result.geometry == {'geojson': '{"type":"LineString","coordinates":[[23,34],[23.1,34]]}'}
 
 
-def test_lookup_placex_with_address_details(apiobj):
+def test_lookup_placex_with_address_details(apiobj, frontend):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',  name='Street',
                      country_code='pl',
@@ -143,7 +146,8 @@ def test_lookup_placex_with_address_details(apiobj):
                               country_code='pl',
                               rank_search=17, rank_address=16)
 
-    result = apiobj.api.details(napi.PlaceID(332), address_details=True)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(332), address_details=True)
 
     assert result.address_rows == [
                napi.AddressLine(place_id=332, osm_object=('W', 4),
@@ -172,18 +176,19 @@ def test_lookup_placex_with_address_details(apiobj):
            ]
 
 
-def test_lookup_place_with_linked_places_none_existing(apiobj):
+def test_lookup_place_with_linked_places_none_existing(apiobj, frontend):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',  name='Street',
                      country_code='pl', linked_place_id=45,
                      rank_search=27, rank_address=26)
 
-    result = apiobj.api.details(napi.PlaceID(332), linked_places=True)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(332), linked_places=True)
 
     assert result.linked_rows == []
 
 
-def test_lookup_place_with_linked_places_existing(apiobj):
+def test_lookup_place_with_linked_places_existing(apiobj, frontend):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',  name='Street',
                      country_code='pl', linked_place_id=45,
@@ -197,7 +202,8 @@ def test_lookup_place_with_linked_places_existing(apiobj):
                      country_code='pl', linked_place_id=332,
                      rank_search=27, rank_address=26)
 
-    result = apiobj.api.details(napi.PlaceID(332), linked_places=True)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(332), linked_places=True)
 
     assert result.linked_rows == [
                napi.AddressLine(place_id=1001, osm_object=('W', 5),
@@ -213,18 +219,19 @@ def test_lookup_place_with_linked_places_existing(apiobj):
     ]
 
 
-def test_lookup_place_with_parented_places_not_existing(apiobj):
+def test_lookup_place_with_parented_places_not_existing(apiobj, frontend):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',  name='Street',
                      country_code='pl', parent_place_id=45,
                      rank_search=27, rank_address=26)
 
-    result = apiobj.api.details(napi.PlaceID(332), parented_places=True)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(332), parented_places=True)
 
     assert result.parented_rows == []
 
 
-def test_lookup_place_with_parented_places_existing(apiobj):
+def test_lookup_place_with_parented_places_existing(apiobj, frontend):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',  name='Street',
                      country_code='pl', parent_place_id=45,
@@ -238,7 +245,8 @@ def test_lookup_place_with_parented_places_existing(apiobj):
                      country_code='pl', parent_place_id=332,
                      rank_search=27, rank_address=26)
 
-    result = apiobj.api.details(napi.PlaceID(332), parented_places=True)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(332), parented_places=True)
 
     assert result.parented_rows == [
                napi.AddressLine(place_id=1001, osm_object=('N', 5),
@@ -250,7 +258,7 @@ def test_lookup_place_with_parented_places_existing(apiobj):
 
 
 @pytest.mark.parametrize('idobj', (napi.PlaceID(4924), napi.OsmID('W', 9928)))
-def test_lookup_in_osmline(apiobj, idobj):
+def test_lookup_in_osmline(apiobj, frontend, idobj):
     import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
     apiobj.add_osmline(place_id=4924, osm_id=9928,
                        parent_place_id=12,
@@ -260,7 +268,8 @@ def test_lookup_in_osmline(apiobj, idobj):
                        indexed_date=import_date,
                        geometry='LINESTRING(23 34, 23 35)')
 
-    result = apiobj.api.details(idobj)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(idobj)
 
     assert result is not None
 
@@ -298,7 +307,7 @@ def test_lookup_in_osmline(apiobj, idobj):
     assert result.geometry == {'type': 'ST_LineString'}
 
 
-def test_lookup_in_osmline_split_interpolation(apiobj):
+def test_lookup_in_osmline_split_interpolation(apiobj, frontend):
     apiobj.add_osmline(place_id=1000, osm_id=9,
                        startnumber=2, endnumber=4, step=1)
     apiobj.add_osmline(place_id=1001, osm_id=9,
@@ -306,18 +315,19 @@ def test_lookup_in_osmline_split_interpolation(apiobj):
     apiobj.add_osmline(place_id=1002, osm_id=9,
                        startnumber=11, endnumber=20, step=1)
 
+    api = frontend(apiobj, options={'details'})
     for i in range(1, 6):
-        result = apiobj.api.details(napi.OsmID('W', 9, str(i)))
+        result = api.details(napi.OsmID('W', 9, str(i)))
         assert result.place_id == 1000
     for i in range(7, 11):
-        result = apiobj.api.details(napi.OsmID('W', 9, str(i)))
+        result = api.details(napi.OsmID('W', 9, str(i)))
         assert result.place_id == 1001
     for i in range(12, 22):
-        result = apiobj.api.details(napi.OsmID('W', 9, str(i)))
+        result = api.details(napi.OsmID('W', 9, str(i)))
         assert result.place_id == 1002
 
 
-def test_lookup_osmline_with_address_details(apiobj):
+def test_lookup_osmline_with_address_details(apiobj, frontend):
     apiobj.add_osmline(place_id=9000, osm_id=9,
                        startnumber=2, endnumber=4, step=1,
                        parent_place_id=332)
@@ -337,7 +347,8 @@ def test_lookup_osmline_with_address_details(apiobj):
                               country_code='pl',
                               rank_search=17, rank_address=16)
 
-    result = apiobj.api.details(napi.PlaceID(9000), address_details=True)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(9000), address_details=True)
 
     assert result.address_rows == [
                napi.AddressLine(place_id=332, osm_object=('W', 4),
@@ -366,7 +377,7 @@ def test_lookup_osmline_with_address_details(apiobj):
            ]
 
 
-def test_lookup_in_tiger(apiobj):
+def test_lookup_in_tiger(apiobj, frontend):
     apiobj.add_tiger(place_id=4924,
                      parent_place_id=12,
                      startnumber=1, endnumber=4, step=1,
@@ -377,7 +388,8 @@ def test_lookup_in_tiger(apiobj):
                       osm_type='W', osm_id=6601223,
                       geometry='LINESTRING(23 34, 23 35)')
 
-    result = apiobj.api.details(napi.PlaceID(4924))
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(4924))
 
     assert result is not None
 
@@ -415,7 +427,7 @@ def test_lookup_in_tiger(apiobj):
     assert result.geometry == {'type': 'ST_LineString'}
 
 
-def test_lookup_tiger_with_address_details(apiobj):
+def test_lookup_tiger_with_address_details(apiobj, frontend):
     apiobj.add_tiger(place_id=9000,
                      startnumber=2, endnumber=4, step=1,
                      parent_place_id=332)
@@ -435,7 +447,8 @@ def test_lookup_tiger_with_address_details(apiobj):
                               country_code='us',
                               rank_search=17, rank_address=16)
 
-    result = apiobj.api.details(napi.PlaceID(9000), address_details=True)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(9000), address_details=True)
 
     assert result.address_rows == [
                napi.AddressLine(place_id=332, osm_object=('W', 4),
@@ -464,7 +477,7 @@ def test_lookup_tiger_with_address_details(apiobj):
            ]
 
 
-def test_lookup_in_postcode(apiobj):
+def test_lookup_in_postcode(apiobj, frontend):
     import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
     apiobj.add_postcode(place_id=554,
                         parent_place_id=152,
@@ -474,7 +487,8 @@ def test_lookup_in_postcode(apiobj):
                         indexed_date=import_date,
                         geometry='POINT(-9.45 5.6)')
 
-    result = apiobj.api.details(napi.PlaceID(554))
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(554))
 
     assert result is not None
 
@@ -512,7 +526,7 @@ def test_lookup_in_postcode(apiobj):
     assert result.geometry == {'type': 'ST_Point'}
 
 
-def test_lookup_postcode_with_address_details(apiobj):
+def test_lookup_postcode_with_address_details(apiobj, frontend):
     apiobj.add_postcode(place_id=9000,
                         parent_place_id=332,
                         postcode='34 425',
@@ -528,7 +542,8 @@ def test_lookup_postcode_with_address_details(apiobj):
                               country_code='gb',
                               rank_search=17, rank_address=16)
 
-    result = apiobj.api.details(napi.PlaceID(9000), address_details=True)
+    api = frontend(apiobj, options={'details'})
+    result = api.details(napi.PlaceID(9000), address_details=True)
 
     assert result.address_rows == [
                napi.AddressLine(place_id=9000, osm_object=None,
@@ -559,18 +574,20 @@ def test_lookup_postcode_with_address_details(apiobj):
 @pytest.mark.parametrize('objid', [napi.PlaceID(1736),
                                    napi.OsmID('W', 55),
                                    napi.OsmID('N', 55, 'amenity')])
-def test_lookup_missing_object(apiobj, objid):
+def test_lookup_missing_object(apiobj, frontend, objid):
     apiobj.add_placex(place_id=1, osm_type='N', osm_id=55,
                       class_='place', type='suburb')
 
-    assert apiobj.api.details(objid) is None
+    api = frontend(apiobj, options={'details'})
+    assert api.details(objid) is None
 
 
 @pytest.mark.parametrize('gtype', (napi.GeometryFormat.KML,
                                     napi.GeometryFormat.SVG,
                                     napi.GeometryFormat.TEXT))
-def test_lookup_unsupported_geometry(apiobj, gtype):
+def test_lookup_unsupported_geometry(apiobj, frontend, gtype):
     apiobj.add_placex(place_id=332)
 
+    api = frontend(apiobj, options={'details'})
     with pytest.raises(ValueError):
-        apiobj.api.details(napi.PlaceID(332), geometry_output=gtype)
+        api.details(napi.PlaceID(332), geometry_output=gtype)
index 8f5dd17c5c256a1c2de65085592a8aa022dc8256..48b0777667c8199934e4cd7994e82feffde54570 100644 (file)
@@ -7,22 +7,26 @@
 """
 Tests for lookup API call.
 """
+import json
+
 import pytest
 
 import nominatim.api as napi
 
-def test_lookup_empty_list(apiobj):
-    assert apiobj.api.lookup([]) == []
+def test_lookup_empty_list(apiobj, frontend):
+    api = frontend(apiobj, options={'details'})
+    assert api.lookup([]) == []
 
 
-def test_lookup_non_existing(apiobj):
-    assert apiobj.api.lookup((napi.PlaceID(332), napi.OsmID('W', 4),
-                              napi.OsmID('W', 4, 'highway'))) == []
+def test_lookup_non_existing(apiobj, frontend):
+    api = frontend(apiobj, options={'details'})
+    assert api.lookup((napi.PlaceID(332), napi.OsmID('W', 4),
+                       napi.OsmID('W', 4, 'highway'))) == []
 
 
 @pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4),
                                    napi.OsmID('W', 4, 'highway')))
-def test_lookup_single_placex(apiobj, idobj):
+def test_lookup_single_placex(apiobj, frontend, idobj):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',
                      name={'name': 'Road'}, address={'city': 'Barrow'},
@@ -36,7 +40,8 @@ def test_lookup_single_placex(apiobj, idobj):
                      centroid=(23, 34),
                      geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
 
-    result = apiobj.api.lookup([idobj])
+    api = frontend(apiobj, options={'details'})
+    result = api.lookup([idobj])
 
     assert len(result) == 1
 
@@ -72,7 +77,7 @@ def test_lookup_single_placex(apiobj, idobj):
     assert result.geometry == {}
 
 
-def test_lookup_multiple_places(apiobj):
+def test_lookup_multiple_places(apiobj, frontend):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',
                      name={'name': 'Road'}, address={'city': 'Barrow'},
@@ -93,9 +98,10 @@ def test_lookup_multiple_places(apiobj):
                        geometry='LINESTRING(23 34, 23 35)')
 
 
-    result = apiobj.api.lookup((napi.OsmID('W', 1),
-                                napi.OsmID('W', 4),
-                                napi.OsmID('W', 9928)))
+    api = frontend(apiobj, options={'details'})
+    result = api.lookup((napi.OsmID('W', 1),
+                         napi.OsmID('W', 4),
+                         napi.OsmID('W', 9928)))
 
     assert len(result) == 2
 
@@ -103,7 +109,7 @@ def test_lookup_multiple_places(apiobj):
 
 
 @pytest.mark.parametrize('gtype', list(napi.GeometryFormat))
-def test_simple_place_with_geometry(apiobj, gtype):
+def test_simple_place_with_geometry(apiobj, frontend, gtype):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',
                      name={'name': 'Road'}, address={'city': 'Barrow'},
@@ -117,8 +123,8 @@ def test_simple_place_with_geometry(apiobj, gtype):
                      centroid=(23, 34),
                      geometry='POLYGON((23 34, 23.1 34, 23.1 34.1, 23 34))')
 
-    result = apiobj.api.lookup([napi.OsmID('W', 4)],
-                               geometry_output=gtype)
+    api = frontend(apiobj, options={'details'})
+    result = api.lookup([napi.OsmID('W', 4)], geometry_output=gtype)
 
     assert len(result) == 1
     assert result[0].place_id == 332
@@ -129,7 +135,7 @@ def test_simple_place_with_geometry(apiobj, gtype):
         assert list(result[0].geometry.keys()) == [gtype.name.lower()]
 
 
-def test_simple_place_with_geometry_simplified(apiobj):
+def test_simple_place_with_geometry_simplified(apiobj, frontend):
     apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
                      class_='highway', type='residential',
                      name={'name': 'Road'}, address={'city': 'Barrow'},
@@ -143,11 +149,15 @@ def test_simple_place_with_geometry_simplified(apiobj):
                      centroid=(23, 34),
                      geometry='POLYGON((23 34, 22.999 34, 23.1 34, 23.1 34.1, 23 34))')
 
-    result = apiobj.api.lookup([napi.OsmID('W', 4)],
-                               geometry_output=napi.GeometryFormat.TEXT,
-                               geometry_simplification=0.1)
+    api = frontend(apiobj, options={'details'})
+    result = api.lookup([napi.OsmID('W', 4)],
+                        geometry_output=napi.GeometryFormat.GEOJSON,
+                        geometry_simplification=0.1)
 
     assert len(result) == 1
     assert result[0].place_id == 332
-    assert result[0].geometry == {'text': 'POLYGON((23 34,23.1 34,23.1 34.1,23 34))'}
 
+    geom = json.loads(result[0].geometry['geojson'])
+
+    assert geom['type']  == 'Polygon'
+    assert geom['coordinates'] == [[[23, 34], [23.1, 34], [23.1, 34.1], [23, 34]]]
index d9ab2cb87efbc945059339935838cd3924d716c5..414115e113783a575c6a8d52453e1e04f1a80a98 100644 (file)
@@ -16,20 +16,23 @@ import pytest
 
 import nominatim.api as napi
 
-def test_reverse_rank_30(apiobj):
+API_OPTIONS = {'reverse'}
+
+def test_reverse_rank_30(apiobj, frontend):
     apiobj.add_placex(place_id=223, class_='place', type='house',
                       housenumber='1',
                       centroid=(1.3, 0.7),
                       geometry='POINT(1.3 0.7)')
 
-    result = apiobj.api.reverse((1.3, 0.7))
+    api = frontend(apiobj, options=API_OPTIONS)
+    result = api.reverse((1.3, 0.7))
 
     assert result is not None
     assert result.place_id == 223
 
 
 @pytest.mark.parametrize('country', ['de', 'us'])
-def test_reverse_street(apiobj, country):
+def test_reverse_street(apiobj, frontend, country):
     apiobj.add_placex(place_id=990, class_='highway', type='service',
                       rank_search=27, rank_address=27,
                       name = {'name': 'My Street'},
@@ -37,17 +40,19 @@ def test_reverse_street(apiobj, country):
                       country_code=country,
                       geometry='LINESTRING(9.995 10, 10.005 10)')
 
-    assert apiobj.api.reverse((9.995, 10)).place_id == 990
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((9.995, 10)).place_id == 990
 
 
-def test_reverse_ignore_unindexed(apiobj):
+def test_reverse_ignore_unindexed(apiobj, frontend):
     apiobj.add_placex(place_id=223, class_='place', type='house',
                       housenumber='1',
                       indexed_status=2,
                       centroid=(1.3, 0.7),
                       geometry='POINT(1.3 0.7)')
 
-    result = apiobj.api.reverse((1.3, 0.7))
+    api = frontend(apiobj, options=API_OPTIONS)
+    result = api.reverse((1.3, 0.7))
 
     assert result is None
 
@@ -62,7 +67,7 @@ def test_reverse_ignore_unindexed(apiobj):
                                               (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225),
                                               (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225),
                                               (5, napi.DataLayer.ADDRESS, 229)])
-def test_reverse_rank_30_layers(apiobj, y, layer, place_id):
+def test_reverse_rank_30_layers(apiobj, frontend, y, layer, place_id):
     apiobj.add_placex(place_id=223, class_='place', type='house',
                       housenumber='1',
                       rank_address=30,
@@ -90,21 +95,23 @@ def test_reverse_rank_30_layers(apiobj, y, layer, place_id):
                       rank_search=30,
                       centroid=(1.3, 5))
 
-    assert apiobj.api.reverse((1.3, y), layers=layer).place_id == place_id
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((1.3, y), layers=layer).place_id == place_id
 
 
-def test_reverse_poi_layer_with_no_pois(apiobj):
+def test_reverse_poi_layer_with_no_pois(apiobj, frontend):
     apiobj.add_placex(place_id=223, class_='place', type='house',
                       housenumber='1',
                       rank_address=30,
                       rank_search=30,
                       centroid=(1.3, 0.70001))
 
-    assert apiobj.api.reverse((1.3, 0.70001), max_rank=29,
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((1.3, 0.70001), max_rank=29,
                               layers=napi.DataLayer.POI) is None
 
 
-def test_reverse_housenumber_on_street(apiobj):
+def test_reverse_housenumber_on_street(apiobj, frontend):
     apiobj.add_placex(place_id=990, class_='highway', type='service',
                       rank_search=27, rank_address=27,
                       name = {'name': 'My Street'},
@@ -116,12 +123,13 @@ def test_reverse_housenumber_on_street(apiobj):
                       housenumber='23',
                       centroid=(10.0, 10.00001))
 
-    assert apiobj.api.reverse((10.0, 10.0), max_rank=30).place_id == 991
-    assert apiobj.api.reverse((10.0, 10.0), max_rank=27).place_id == 990
-    assert apiobj.api.reverse((10.0, 10.00001), max_rank=30).place_id == 991
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((10.0, 10.0), max_rank=30).place_id == 991
+    assert api.reverse((10.0, 10.0), max_rank=27).place_id == 990
+    assert api.reverse((10.0, 10.00001), max_rank=30).place_id == 991
 
 
-def test_reverse_housenumber_interpolation(apiobj):
+def test_reverse_housenumber_interpolation(apiobj, frontend):
     apiobj.add_placex(place_id=990, class_='highway', type='service',
                       rank_search=27, rank_address=27,
                       name = {'name': 'My Street'},
@@ -138,10 +146,11 @@ def test_reverse_housenumber_interpolation(apiobj):
                        centroid=(10.0, 10.00001),
                        geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
 
-    assert apiobj.api.reverse((10.0, 10.0)).place_id == 992
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((10.0, 10.0)).place_id == 992
 
 
-def test_reverse_housenumber_point_interpolation(apiobj):
+def test_reverse_housenumber_point_interpolation(apiobj, frontend):
     apiobj.add_placex(place_id=990, class_='highway', type='service',
                       rank_search=27, rank_address=27,
                       name = {'name': 'My Street'},
@@ -153,12 +162,13 @@ def test_reverse_housenumber_point_interpolation(apiobj):
                        centroid=(10.0, 10.00001),
                        geometry='POINT(10.0 10.00001)')
 
-    res = apiobj.api.reverse((10.0, 10.0))
+    api = frontend(apiobj, options=API_OPTIONS)
+    res = api.reverse((10.0, 10.0))
     assert res.place_id == 992
     assert res.housenumber == '42'
 
 
-def test_reverse_tiger_number(apiobj):
+def test_reverse_tiger_number(apiobj, frontend):
     apiobj.add_placex(place_id=990, class_='highway', type='service',
                       rank_search=27, rank_address=27,
                       name = {'name': 'My Street'},
@@ -171,11 +181,12 @@ def test_reverse_tiger_number(apiobj):
                      centroid=(10.0, 10.00001),
                      geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
 
-    assert apiobj.api.reverse((10.0, 10.0)).place_id == 992
-    assert apiobj.api.reverse((10.0, 10.00001)).place_id == 992
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((10.0, 10.0)).place_id == 992
+    assert api.reverse((10.0, 10.00001)).place_id == 992
 
 
-def test_reverse_point_tiger(apiobj):
+def test_reverse_point_tiger(apiobj, frontend):
     apiobj.add_placex(place_id=990, class_='highway', type='service',
                       rank_search=27, rank_address=27,
                       name = {'name': 'My Street'},
@@ -188,12 +199,13 @@ def test_reverse_point_tiger(apiobj):
                      centroid=(10.0, 10.00001),
                      geometry='POINT(10.0 10.00001)')
 
-    res = apiobj.api.reverse((10.0, 10.0))
+    api = frontend(apiobj, options=API_OPTIONS)
+    res = api.reverse((10.0, 10.0))
     assert res.place_id == 992
     assert res.housenumber == '1'
 
 
-def test_reverse_low_zoom_address(apiobj):
+def test_reverse_low_zoom_address(apiobj, frontend):
     apiobj.add_placex(place_id=1001, class_='place', type='house',
                       housenumber='1',
                       rank_address=30,
@@ -207,11 +219,12 @@ def test_reverse_low_zoom_address(apiobj):
                       geometry="""POLYGON((59.3 80.70001, 59.3001 80.70001,
                                         59.3001 80.70101, 59.3 80.70101, 59.3 80.70001))""")
 
-    assert apiobj.api.reverse((59.30005, 80.7005)).place_id == 1001
-    assert apiobj.api.reverse((59.30005, 80.7005), max_rank=18).place_id == 1002
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((59.30005, 80.7005)).place_id == 1001
+    assert api.reverse((59.30005, 80.7005), max_rank=18).place_id == 1002
 
 
-def test_reverse_place_node_in_area(apiobj):
+def test_reverse_place_node_in_area(apiobj, frontend):
     apiobj.add_placex(place_id=1002, class_='place', type='town',
                       name={'name': 'Town Area'},
                       rank_address=16,
@@ -226,7 +239,8 @@ def test_reverse_place_node_in_area(apiobj):
                       rank_search=18,
                       centroid=(59.30004, 80.70055))
 
-    assert apiobj.api.reverse((59.30004, 80.70055)).place_id == 1003
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((59.30004, 80.70055)).place_id == 1003
 
 
 @pytest.mark.parametrize('layer,place_id', [(napi.DataLayer.MANMADE, 225),
@@ -234,7 +248,7 @@ def test_reverse_place_node_in_area(apiobj):
                                             (napi.DataLayer.NATURAL, 227),
                                             (napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225),
                                             (napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225)])
-def test_reverse_larger_area_layers(apiobj, layer, place_id):
+def test_reverse_larger_area_layers(apiobj, frontend, layer, place_id):
     apiobj.add_placex(place_id=225, class_='man_made', type='dam',
                       name={'name': 'Dam'},
                       rank_address=0,
@@ -251,17 +265,19 @@ def test_reverse_larger_area_layers(apiobj, layer, place_id):
                       rank_search=16,
                       centroid=(1.3, 0.70005))
 
-    assert apiobj.api.reverse((1.3, 0.7), layers=layer).place_id == place_id
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((1.3, 0.7), layers=layer).place_id == place_id
 
 
-def test_reverse_country_lookup_no_objects(apiobj):
+def test_reverse_country_lookup_no_objects(apiobj, frontend):
     apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
 
-    assert apiobj.api.reverse((0.5, 0.5)) is None
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((0.5, 0.5)) is None
 
 
 @pytest.mark.parametrize('rank', [4, 30])
-def test_reverse_country_lookup_country_only(apiobj, rank):
+def test_reverse_country_lookup_country_only(apiobj, frontend, rank):
     apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
     apiobj.add_placex(place_id=225, class_='place', type='country',
                       name={'name': 'My Country'},
@@ -270,10 +286,11 @@ def test_reverse_country_lookup_country_only(apiobj, rank):
                       country_code='xx',
                       centroid=(0.7, 0.7))
 
-    assert apiobj.api.reverse((0.5, 0.5), max_rank=rank).place_id == 225
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((0.5, 0.5), max_rank=rank).place_id == 225
 
 
-def test_reverse_country_lookup_place_node_inside(apiobj):
+def test_reverse_country_lookup_place_node_inside(apiobj, frontend):
     apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
     apiobj.add_placex(place_id=225, class_='place', type='state',
                       osm_type='N',
@@ -283,11 +300,12 @@ def test_reverse_country_lookup_place_node_inside(apiobj):
                       country_code='xx',
                       centroid=(0.5, 0.505))
 
-    assert apiobj.api.reverse((0.5, 0.5)).place_id == 225
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((0.5, 0.5)).place_id == 225
 
 
 @pytest.mark.parametrize('gtype', list(napi.GeometryFormat))
-def test_reverse_geometry_output_placex(apiobj, gtype):
+def test_reverse_geometry_output_placex(apiobj, frontend, gtype):
     apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
     apiobj.add_placex(place_id=1001, class_='place', type='house',
                       housenumber='1',
@@ -302,34 +320,37 @@ def test_reverse_geometry_output_placex(apiobj, gtype):
                       country_code='xx',
                       centroid=(0.5, 0.5))
 
-    assert apiobj.api.reverse((59.3, 80.70001), geometry_output=gtype).place_id == 1001
-    assert apiobj.api.reverse((0.5, 0.5), geometry_output=gtype).place_id == 1003
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((59.3, 80.70001), geometry_output=gtype).place_id == 1001
+    assert api.reverse((0.5, 0.5), geometry_output=gtype).place_id == 1003
 
 
-def test_reverse_simplified_geometry(apiobj):
+def test_reverse_simplified_geometry(apiobj, frontend):
     apiobj.add_placex(place_id=1001, class_='place', type='house',
                       housenumber='1',
                       rank_address=30,
                       rank_search=30,
                       centroid=(59.3, 80.70001))
 
+    api = frontend(apiobj, options=API_OPTIONS)
     details = dict(geometry_output=napi.GeometryFormat.GEOJSON,
                    geometry_simplification=0.1)
-    assert apiobj.api.reverse((59.3, 80.70001), **details).place_id == 1001
+    assert api.reverse((59.3, 80.70001), **details).place_id == 1001
 
 
-def test_reverse_interpolation_geometry(apiobj):
+def test_reverse_interpolation_geometry(apiobj, frontend):
     apiobj.add_osmline(place_id=992,
                        parent_place_id=990,
                        startnumber=1, endnumber=3, step=1,
                        centroid=(10.0, 10.00001),
                        geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
 
-    assert apiobj.api.reverse((10.0, 10.0), geometry_output=napi.GeometryFormat.TEXT)\
+    api = frontend(apiobj, options=API_OPTIONS)
+    assert api.reverse((10.0, 10.0), geometry_output=napi.GeometryFormat.TEXT)\
                      .geometry['text'] == 'POINT(10 10.00001)'
 
 
-def test_reverse_tiger_geometry(apiobj):
+def test_reverse_tiger_geometry(apiobj, frontend):
     apiobj.add_placex(place_id=990, class_='highway', type='service',
                       rank_search=27, rank_address=27,
                       name = {'name': 'My Street'},
@@ -342,7 +363,8 @@ def test_reverse_tiger_geometry(apiobj):
                      centroid=(10.0, 10.00001),
                      geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
 
-    output = apiobj.api.reverse((10.0, 10.0),
+    api = frontend(apiobj, options=API_OPTIONS)
+    output = api.reverse((10.0, 10.0),
                                 geometry_output=napi.GeometryFormat.GEOJSON).geometry['geojson']
 
     assert json.loads(output) == {'coordinates': [10, 10.00001], 'type': 'Point'}
index 036a235c9a0181d4b2a92fa853fb9215d9eff9d3..2acde2ecb49ee23f3f2c1722112f7dd1e43d0224 100644 (file)
@@ -14,8 +14,9 @@ import pytest
 from nominatim.version import NOMINATIM_VERSION, NominatimVersion
 import nominatim.api as napi
 
-def test_status_no_extra_info(apiobj):
-    result = apiobj.api.status()
+def test_status_no_extra_info(apiobj, frontend):
+    api = frontend(apiobj)
+    result = api.status()
 
     assert result.status == 0
     assert result.message == 'OK'
@@ -24,14 +25,15 @@ def test_status_no_extra_info(apiobj):
     assert result.data_updated is None
 
 
-def test_status_full(apiobj):
+def test_status_full(apiobj, frontend):
     import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0, tzinfo=dt.timezone.utc)
     apiobj.add_data('import_status',
                     [{'lastimportdate': import_date}])
     apiobj.add_data('properties',
                     [{'property': 'database_version', 'value': '99.5.4-2'}])
 
-    result = apiobj.api.status()
+    api = frontend(apiobj)
+    result = api.status()
 
     assert result.status == 0
     assert result.message == 'OK'