From 1b7c8240baeb383a2481f895fc85a74aaa16e94e Mon Sep 17 00:00:00 2001 From: Sarah Hoffmann Date: Tue, 5 Dec 2023 12:22:00 +0100 Subject: [PATCH] enable connection pools for sqlite Connecting is reasonably expensive because the spatialite extension needs to be loaded. Disable pooling for tests because there is some memory leak when quickly opening and closing QueuePools with sqlite connections. --- nominatim/api/core.py | 42 ++++++++++++++++++++++--------------- test/python/api/conftest.py | 3 ++- 2 files changed, 27 insertions(+), 18 deletions(-) diff --git a/nominatim/api/core.py b/nominatim/api/core.py index c8045c2d..b2624227 100644 --- a/nominatim/api/core.py +++ b/nominatim/api/core.py @@ -84,6 +84,14 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes extra_args: Dict[str, Any] = {'future': True, 'echo': self.config.get_bool('DEBUG_SQL')} + if self.config.get_int('API_POOL_SIZE') == 0: + extra_args['poolclass'] = sa.pool.NullPool + else: + extra_args['poolclass'] = sa.pool.QueuePool + extra_args['max_overflow'] = 0 + extra_args['pool_size'] = self.config.get_int('API_POOL_SIZE') + + is_sqlite = self.config.DATABASE_DSN.startswith('sqlite:') if is_sqlite: @@ -105,28 +113,12 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes host=dsn.get('host'), port=int(dsn['port']) if 'port' in dsn else None, query=query) - extra_args['max_overflow'] = 0 - extra_args['pool_size'] = self.config.get_int('API_POOL_SIZE') engine = sa_asyncio.create_async_engine(dburl, **extra_args) - try: - async with engine.begin() as conn: - result = await conn.scalar(sa.text('SHOW server_version_num')) - server_version = int(result) - except (PGCORE_ERROR, sa.exc.OperationalError): + if is_sqlite: server_version = 0 - if server_version >= 110000 and not is_sqlite: - @sa.event.listens_for(engine.sync_engine, "connect") - def _on_connect(dbapi_con: Any, _: Any) -> None: - cursor = dbapi_con.cursor() - cursor.execute("SET jit_above_cost TO '-1'") - cursor.execute("SET max_parallel_workers_per_gather TO '0'") - # Make sure that all connections get the new settings - await self.close() - - if is_sqlite: @sa.event.listens_for(engine.sync_engine, "connect") def _on_sqlite_connect(dbapi_con: Any, _: Any) -> None: dbapi_con.run_async(lambda conn: conn.enable_load_extension(True)) @@ -134,6 +126,22 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes cursor.execute("SELECT load_extension('mod_spatialite')") cursor.execute('SELECT SetDecimalPrecision(7)') dbapi_con.run_async(lambda conn: conn.enable_load_extension(False)) + else: + try: + async with engine.begin() as conn: + result = await conn.scalar(sa.text('SHOW server_version_num')) + server_version = int(result) + except (PGCORE_ERROR, sa.exc.OperationalError): + server_version = 0 + + if server_version >= 110000: + @sa.event.listens_for(engine.sync_engine, "connect") + def _on_connect(dbapi_con: Any, _: Any) -> None: + cursor = dbapi_con.cursor() + cursor.execute("SET jit_above_cost TO '-1'") + cursor.execute("SET max_parallel_workers_per_gather TO '0'") + # Make sure that all connections get the new settings + await engine.dispose() self._property_cache['DB:server_version'] = server_version diff --git a/test/python/api/conftest.py b/test/python/api/conftest.py index cb7f324a..8f0604d4 100644 --- a/test/python/api/conftest.py +++ b/test/python/api/conftest.py @@ -198,7 +198,8 @@ def frontend(request, event_loop, tmp_path): db, options)) return napi.NominatimAPI(Path('/invalid'), {'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={db}", - 'NOMINATIM_USE_US_TIGER_DATA': 'yes'}) + 'NOMINATIM_USE_US_TIGER_DATA': 'yes', + 'NOMINATIM_API_POOL_SIZE': '0'}) elif request.param == 'postgres_db': def mkapi(apiobj, options=None): return apiobj.api -- 2.39.5