X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/a21d4d3ac483d34355efae76f77b01ad3b48407a..e0aea0f27a2f6b3475c82763ef7afbb1dabc6517:/nominatim/tools/database_import.py?ds=inline diff --git a/nominatim/tools/database_import.py b/nominatim/tools/database_import.py index eda01013..cb620d41 100644 --- a/nominatim/tools/database_import.py +++ b/nominatim/tools/database_import.py @@ -75,6 +75,11 @@ def setup_database_skeleton(dsn: str, rouser: Optional[str] = None) -> None: with conn.cursor() as cur: cur.execute('CREATE EXTENSION IF NOT EXISTS hstore') cur.execute('CREATE EXTENSION IF NOT EXISTS postgis') + + postgis_version = conn.postgis_version_tuple() + if postgis_version[0] >= 3: + cur.execute('CREATE EXTENSION IF NOT EXISTS postgis_raster') + conn.commit() _require_version('PostGIS', @@ -82,7 +87,7 @@ def setup_database_skeleton(dsn: str, rouser: Optional[str] = None) -> None: POSTGIS_REQUIRED_VERSION) -def import_osm_data(osm_files: Union[str, Sequence[str]], +def import_osm_data(osm_files: Union[Path, Sequence[Path]], options: MutableMapping[str, Any], drop: bool = False, ignore_errors: bool = False) -> None: """ Import the given OSM files. 'options' contains the list of @@ -95,7 +100,7 @@ def import_osm_data(osm_files: Union[str, Sequence[str]], if not options['flatnode_file'] and options['osm2pgsql_cache'] == 0: # Make some educated guesses about cache size based on the size # of the import file and the available memory. - mem = psutil.virtual_memory() # type: ignore[no-untyped-call] + mem = psutil.virtual_memory() fsize = 0 if isinstance(osm_files, list): for fname in osm_files: @@ -225,7 +230,8 @@ def load_data(dsn: str, threads: int) -> None: cur.execute('ANALYSE') -def create_search_indices(conn: Connection, config: Configuration, drop: bool = False) -> None: +def create_search_indices(conn: Connection, config: Configuration, + drop: bool = False, threads: int = 1) -> None: """ Create tables that have explicit partitioning. """ @@ -243,4 +249,5 @@ def create_search_indices(conn: Connection, config: Configuration, drop: bool = sql = SQLPreprocessor(conn, config) - sql.run_sql_file(conn, 'indices.sql', drop=drop) + sql.run_parallel_sql_file(config.get_libpq_dsn(), + 'indices.sql', min(8, threads), drop=drop)