--- /dev/null
+# https://github.com/codespell-project/codespell
+
+[codespell]
+skip = ./man/nominatim.1,data,./docs/styles.css,lib-php,module,munin,osm2pgsql,./test,./settings/*.lua,./settings/*.yaml,./settings/**/*.yaml,./settings/icu-rules,./nominatim/tokenizer/token_analysis/config_variants.py
+# Need to be lowercase in the list
+# Unter = Unter den Linden (an example address)
+ignore-words-list = inout,unter
needs: create-archive
runs-on: ubuntu-20.04
+ strategy:
+ matrix:
+ postgresql: ["13", "16"]
+
steps:
- uses: actions/download-artifact@v4
with:
- uses: ./Nominatim/.github/actions/setup-postgresql
with:
- postgresql-version: 13
+ postgresql-version: ${{ matrix.postgresql }}
postgis-version: 3
- name: Install Postgresql server dev
- run: sudo apt-get install postgresql-server-dev-13
+ run: sudo apt-get install postgresql-server-dev-$PGVER
+ env:
+ PGVER: ${{ matrix.postgresql }}
- uses: ./Nominatim/.github/actions/build-nominatim
with:
- name: Check full import
run: nominatim admin --check-database
+
+ codespell:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: codespell-project/actions-codespell@v2
+ with:
+ only_warn: 1
project(nominatim)
set(NOMINATIM_VERSION_MAJOR 4)
-set(NOMINATIM_VERSION_MINOR 3)
+set(NOMINATIM_VERSION_MINOR 4)
set(NOMINATIM_VERSION_PATCH 0)
set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}")
+4.4.0
+ * add export to SQLite database and SQLite support for the frontend
+ * switch to Python frontend as the default frontend
+ * update to osm2pgsql 1.11.0
+ * add support for new osm2pgsql middle table format
+ * simplify geometry for large polygon objects not used in addresses
+ * various performance tweaks for search in Python frontend
+ * fix regression in search with categories where it was confused with near
+ search
+ * partially roll back use of SQLAlchemy lambda statements due to bugs
+ in SQLAchemy
+ * fix handling of timezones for timestamps from the database
+ * fix handling of full address searches in connection with a viewbox
+ * fix postcode computation of highway areas
+ * fix handling of timeout errors for Python <= 3.10
+ * fix address computation for postcode areas
+ * fix variable shadowing in osm2pgsql flex script, causing bugs with LuaJIT
+ * make sure extratags are always null when empty
+ * reduce importance of places without wikipedia reference
+ * improve performance of word count computations
+ * drop support for wikipedia tags with full URLs
+ * replace get_addressdata() SQL implementation with a Python function
+ * improve display name for non-address features
+ * fix postcode validation for postcodes with country code
+ (thanks @pawel-wroniszewski)
+ * add possibility to run imports without superuser database rights
+ (thanks @robbe-haesendonck)
+ * new CLI command for cleaning deleted relations (thanks @lujoh)
+ * add check for database version in the CLI check command
+ * updates to import styles ignoring more unused objects
+ * various typo fixes (thanks @kumarUjjawal)
+
+4.3.2
+ * fix potential SQL injection issue for 'nominatim admin --collect-os-info'
+ * PHP frontend: fix on-the-fly lookup of postcode areas near boundaries
+ * Python frontend: improve handling of viewbox
+ * Python frontend: correct deployment instructions
+
+4.3.1
+ * reintroduce result rematching
+ * improve search of multi-part names
+ * fix accidentally switched meaning of --reverse-only and --search-only in
+ warm command
+
4.3.0
* fix failing importance recalculation command
* fix merging of linked names into unnamed boundaries
| Version | End of support for security updates |
| ------- | ----------------------------------- |
+| 4.4.x | 2026-03-07 |
| 4.3.x | 2025-09-07 |
| 4.2.x | 2024-11-24 |
| 4.1.x | 2024-08-05 |
ON placex USING BTREE (parent_place_id) {{db.tablespace.search_index}}
WHERE parent_place_id IS NOT NULL;
---
+-- Used to find postcode areas after a search in location_postcode.
+CREATE INDEX IF NOT EXISTS idx_placex_postcode_areas
+ ON placex USING BTREE (country_code, postcode) {{db.tablespace.search_index}}
+ WHERE osm_type = 'R' AND class = 'boundary' AND type = 'postal_code';
+---
CREATE INDEX IF NOT EXISTS idx_placex_geometry ON placex
USING GIST (geometry) {{db.tablespace.search_index}};
-- Index is needed during import but can be dropped as soon as a full
# just use the pgxs makefile
-foreach(suffix ${PostgreSQL_ADDITIONAL_VERSIONS} "15" "14" "13" "12" "11" "10" "9.6")
+foreach(suffix ${PostgreSQL_ADDITIONAL_VERSIONS} "16" "15" "14" "13" "12" "11" "10" "9.6")
list(APPEND PG_CONFIG_HINTS
"/usr/pgsql-${suffix}/bin")
endforeach()
#include "mb/pg_wchar.h"
#include <utfasciitable.h>
-#ifdef PG_MODULE_MAGIC
-PG_MODULE_MAGIC;
+#if PG_MAJORVERSION_NUM > 15
+#include "varatt.h"
#endif
+PG_MODULE_MAGIC;
+
Datum transliteration( PG_FUNCTION_ARGS );
Datum gettokenstring( PG_FUNCTION_ARGS );
void str_replace(char* buffer, int* len, int* changes, char* from, int fromlen, char* to, int tolen, int);
# Copyright (C) 2023 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
-Convertion from token assignment to an abstract DB search.
+Conversion from token assignment to an abstract DB search.
"""
from typing import Optional, List, Tuple, Iterator, Dict
import heapq
results = nres.SearchResults()
for row in await conn.execute(sql, _details_to_bind_params(details)):
- result = nres.create_from_postcode_row(row, nres.SearchResult)
+ p = conn.t.placex
+ placex_sql = _select_placex(p).add_columns(p.c.importance)\
+ .where(sa.text("""class = 'boundary'
+ AND type = 'postal_code'
+ AND osm_type = 'R'"""))\
+ .where(p.c.country_code == row.country_code)\
+ .where(p.c.postcode == row.postcode)\
+ .limit(1)
+ for prow in await conn.execute(placex_sql, _details_to_bind_params(details)):
+ result = nres.create_from_placex_row(prow, nres.SearchResult)
+ break
+ else:
+ result = nres.create_from_postcode_row(row, nres.SearchResult)
+
assert result
- result.accuracy = row.accuracy
- results.append(result)
+ if result.place_id not in details.excluded:
+ result.accuracy = row.accuracy
+ results.append(result)
return results
""" Determine the database date and set the status accordingly.
"""
with connect(dsn) as conn:
- if not offline:
- try:
- dbdate = status.compute_database_date(conn)
- status.set_status(conn, dbdate)
- LOG.info('Database is at %s.', dbdate)
- except Exception as exc: # pylint: disable=broad-except
- LOG.error('Cannot determine date of database: %s', exc)
-
properties.set_property(conn, 'database_version', str(NOMINATIM_VERSION))
+
+ try:
+ dbdate = status.compute_database_date(conn, offline)
+ status.set_status(conn, dbdate)
+ LOG.info('Database is at %s.', dbdate)
+ except Exception as exc: # pylint: disable=broad-except
+ LOG.error('Cannot determine date of database: %s', exc)
def get_pg_env(dsn: str,
base_env: Optional[SysEnv] = None) -> Dict[str, str]:
""" Return a copy of `base_env` with the environment variables for
- PostgresSQL set up from the given database connection string.
+ PostgreSQL set up from the given database connection string.
If `base_env` is None, then the OS environment is used as a base
environment.
"""
indexed: Optional[bool]
-def compute_database_date(conn: Connection) -> dt.datetime:
+def compute_database_date(conn: Connection, offline: bool = False) -> dt.datetime:
""" Determine the date of the database from the newest object in the
data base.
"""
- # First, find the node with the highest ID in the database
+ # If there is a date from osm2pgsql available, use that.
+ if conn.table_exists('osm2pgsql_properties'):
+ with conn.cursor() as cur:
+ cur.execute(""" SELECT value FROM osm2pgsql_properties
+ WHERE property = 'current_timestamp' """)
+ row = cur.fetchone()
+ if row is not None:
+ return dt.datetime.strptime(row[0], "%Y-%m-%dT%H:%M:%SZ")\
+ .replace(tzinfo=dt.timezone.utc)
+
+ if offline:
+ raise UsageError("Cannot determine database date from data in offline mode.")
+
+ # Else, find the node with the highest ID in the database
with conn.cursor() as cur:
if conn.table_exists('place'):
osmid = cur.scalar("SELECT max(osm_id) FROM place WHERE osm_type='N'")
WHERE rank_address between 4 and 25 AND type != 'postcode'
AND name is not null AND linked_place_id is null AND osm_type = 'N'
""")
+
+@_migration(4, 4, 99, 0)
+def create_postcode_ara_lookup_index(conn: Connection, **_: Any) -> None:
+ """ Create index needed for looking up postcode areas from postocde points.
+ """
+ with conn.cursor() as cur:
+ cur.execute("""CREATE INDEX IF NOT EXISTS idx_placex_postcode_areas
+ ON placex USING BTREE (country_code, postcode)
+ WHERE osm_type = 'R' AND class = 'boundary' AND type = 'postal_code'
+ """)
return f"{self.major}.{self.minor}.{self.patch_level}-{self.db_patch_level}"
-NOMINATIM_VERSION = NominatimVersion(4, 3, 0, 0)
+NOMINATIM_VERSION = NominatimVersion(4, 4, 99, 0)
POSTGRESQL_REQUIRED_VERSION = (9, 6)
POSTGIS_REQUIRED_VERSION = (2, 2)
# Tablespace for indexes used during address computation. Used for import and update only.
NOMINATIM_TABLESPACE_ADDRESS_INDEX=
-# Tablespace for tables for auxilary data, e.g. TIGER data, postcodes.
+# Tablespace for tables for auxiliary data, e.g. TIGER data, postcodes.
NOMINATIM_TABLESPACE_AUX_DATA=
-# Tablespace for indexes for auxilary data, e.g. TIGER data, postcodes.
+# Tablespace for indexes for auxiliary data, e.g. TIGER data, postcodes.
NOMINATIM_TABLESPACE_AUX_INDEX=
# Threshold for searches by name only.
# Threshold where the lookup strategy in the database is switched. If there
-# are less occurences of a tem than given, the search does the lookup only
+# are less occurrences of a tem than given, the search does the lookup only
# against the name, otherwise it uses indexes for name and address.
NOMINATIM_SEARCH_NAME_ONLY_THRESHOLD=500
NOMINATIM_QUERY_TIMEOUT=10
# Maximum time a single request is allowed to take. When the timeout is
-# exceeeded, the available results are returned.
-# When empty, then timouts are disabled.
+# exceeded, the available results are returned.
+# When empty, then timeouts are disabled.
NOMINATIM_REQUEST_TIMEOUT=60
# Search elements just within countries
Feature: Object details
Check details page for correctness
- Scenario: Details by place ID
- When sending details query for 107077
- Then the result is valid json
- And results contain
- | place_id |
- | 107077 |
-
-
Scenario Outline: Details via OSM id
When sending details query for <type><id>
Then the result is valid json
| type | display_name |
| postcode | E4 7EA, United Kingdom |
+
+ @fail-legacy
+ @v1-api-python-only
+ Scenario: Postcode areas are preferred over postcode points
+ Given the grid with origin DE
+ | 1 | 2 |
+ | 4 | 3 |
+ Given the places
+ | osm | class | type | postcode | geometry |
+ | R23 | boundary | postal_code | 12345 | (1,2,3,4,1) |
+ When importing
+ Then location_postcode contains exactly
+ | country | postcode |
+ | de | 12345 |
+ When sending search query "12345, de"
+ Then results contain
+ | osm |
+ | R23 |
pass
+@pytest.mark.parametrize('offline', [True, False])
+def test_compute_database_date_from_osm2pgsql(table_factory, temp_db_conn, offline):
+ table_factory('osm2pgsql_properties', 'property TEXT, value TEXT',
+ content=(('current_timestamp', '2024-01-03T23:45:54Z'), ))
+
+ date = nominatim.db.status.compute_database_date(temp_db_conn, offline=offline)
+ assert date == iso_date('2024-01-03T23:45:54')
+
+
+def test_compute_database_date_from_osm2pgsql_nodata(table_factory, temp_db_conn):
+ table_factory('osm2pgsql_properties', 'property TEXT, value TEXT')
+
+ with pytest.raises(UsageError, match='Cannot determine database date from data in offline mode'):
+ nominatim.db.status.compute_database_date(temp_db_conn, offline=True)
+
+
def test_compute_database_date_place_empty(place_table, temp_db_conn):
with pytest.raises(UsageError):
nominatim.db.status.compute_database_date(temp_db_conn)