Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ Environment variables
| `TENANT_HEADER` | `<empty>` | The name of the HTTP header which contains the tenant name for multi-tenant setups. |
| `TENANT_PATH_PREFIX` | `@service_prefix@/@tenant@` | URL path prefix for all QWC services for multi-tenant setups. |
| `TENANT_ACCESS_COOKIE_PATH` | `<tenant_path_prefix>` | Path for which the access cookie is valid for multi-tenant setups. |
| `ENABLE_POOLING` | `False` | Enable db connection pooling. Defaults are used, if the following ENV are not set. |
| `POOL_SIZE` | `5` | Maximum number of possible data base connections. |
| `MAX_OVERFLOW` | `10` | Additional connections beyond pool_size during peak load. |
| `POOL_TIMEOUT` | `30` | Time (in seconds) to wait for a connection to become available. |
Expand Down
29 changes: 17 additions & 12 deletions qwc_services_core/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,26 @@ def db_engine(self, conn_str):
see https://docs.sqlalchemy.org/en/latest/core/engines.html#postgresql
"""

db_pool_size = os.environ.get('POOL_SIZE', 5)
db_max_overflow = os.environ.get('MAX_OVERFLOW', 10)
db_pool_timeout = os.environ.get('POOL_TIMEOUT', 30)
db_pool_recycle = os.environ.get('POOL_RECYCLE', -1)
db_enable_pooling = os.environ.get('ENABLE_POOLING', 'False').lower() in ('t', 'true')
db_pool_size = int(os.environ.get('POOL_SIZE', 5))
db_max_overflow = int(os.environ.get('MAX_OVERFLOW', 10))
db_pool_timeout = int(os.environ.get('POOL_TIMEOUT', 30))
db_pool_recycle = int(os.environ.get('POOL_RECYCLE', -1))

engine = self.engines.get(conn_str)
if not engine:
engine = create_engine(
conn_str,
poolclass=QueuePool,
pool_size=db_pool_size,
max_overflow=db_max_overflow,
pool_timeout=db_pool_timeout,
pool_recycle=db_pool_recycle,
pool_pre_ping=True, echo=False)
if db_enable_pooling:
engine = create_engine(
conn_str,
poolclass=QueuePool,
pool_size=db_pool_size,
max_overflow=db_max_overflow,
pool_timeout=db_pool_timeout,
pool_recycle=db_pool_recycle,
pool_pre_ping=True, echo=False)
else:
engine = create_engine(
conn_str, pool_pre_ping=True, echo=False)
self.engines[conn_str] = engine
return engine

Expand Down