Preprocessing of SQL files.
"""
from typing import Set, Dict, Any, cast
+
import jinja2
-from .connection import Connection, server_version_tuple, postgis_version_tuple
-from .async_connection import WorkerPool
+from .connection import Connection
from ..config import Configuration
+from ..db.query_pool import QueryPool
+
def _get_partitions(conn: Connection) -> Set[int]:
""" Get the set of partitions currently in use.
return set((row[0] for row in list(cur)))
+
def _get_middle_db_format(conn: Connection, tables: Set[str]) -> str:
""" Returns the version of the slim middle tables.
"""
""" Set up a dictionary with various optional Postgresql/Postgis features that
depend on the database version.
"""
- pg_version = server_version_tuple(conn)
- postgis_version = postgis_version_tuple(conn)
- pg11plus = pg_version >= (11, 0, 0)
- ps3 = postgis_version >= (3, 0)
- return {
- 'has_index_non_key_column': pg11plus,
- 'spgist_geom' : 'SPGIST' if pg11plus and ps3 else 'GIST'
- }
+ return {}
+
class SQLPreprocessor:
""" A environment for preprocessing SQL files from the
self.env.globals['db'] = db_info
self.env.globals['postgres'] = _setup_postgresql_features(conn)
-
def run_string(self, conn: Connection, template: str, **kwargs: Any) -> None:
""" Execute the given SQL template string on the connection.
The keyword arguments may supply additional parameters
cur.execute(sql)
conn.commit()
-
def run_sql_file(self, conn: Connection, name: str, **kwargs: Any) -> None:
""" Execute the given SQL file on the connection. The keyword arguments
may supply additional parameters for preprocessing.
cur.execute(sql)
conn.commit()
-
- def run_parallel_sql_file(self, dsn: str, name: str, num_threads: int = 1,
- **kwargs: Any) -> None:
+ async def run_parallel_sql_file(self, dsn: str, name: str, num_threads: int = 1,
+ **kwargs: Any) -> None:
""" Execute the given SQL files using parallel asynchronous connections.
The keyword arguments may supply additional parameters for
preprocessing.
parts = sql.split('\n---\n')
- with WorkerPool(dsn, num_threads) as pool:
+ async with QueryPool(dsn, num_threads) as pool:
for part in parts:
- pool.next_free_worker().perform(part)
+ await pool.put_query(part, None)