From 0325f3cafa7c27918e79cfa68a77b37e71a7e900 Mon Sep 17 00:00:00 2001 From: Artur Shiriev Date: Thu, 19 Jun 2025 10:38:50 +0300 Subject: [PATCH 1/4] init commit --- .github/workflows/ci.yml | 62 ++++++++++++++++++++++ .github/workflows/publish.yml | 20 ++++++++ .gitignore | 22 ++++++++ Dockerfile | 28 ++++++++++ Justfile | 33 ++++++++++++ docker-compose.yml | 26 ++++++++++ pg_tools/__init__.py | 14 +++++ pg_tools/connections.py | 79 ++++++++++++++++++++++++++++ pg_tools/decorators.py | 72 ++++++++++++++++++++++++++ pg_tools/helpers.py | 34 ++++++++++++ pg_tools/py.typed | 0 pg_tools/settings.py | 6 +++ pg_tools/transaction.py | 19 +++++++ pyproject.toml | 88 ++++++++++++++++++++++++++++++++ tests/__init__.py | 0 tests/conftest.py | 14 +++++ tests/test_connection_factory.py | 69 +++++++++++++++++++++++++ tests/test_decorators.py | 67 ++++++++++++++++++++++++ tests/test_helpers.py | 22 ++++++++ tests/test_transaction.py | 35 +++++++++++++ 20 files changed, 710 insertions(+) create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/publish.yml create mode 100644 .gitignore create mode 100644 Dockerfile create mode 100644 Justfile create mode 100644 docker-compose.yml create mode 100644 pg_tools/__init__.py create mode 100644 pg_tools/connections.py create mode 100644 pg_tools/decorators.py create mode 100644 pg_tools/helpers.py create mode 100644 pg_tools/py.typed create mode 100644 pg_tools/settings.py create mode 100644 pg_tools/transaction.py create mode 100644 pyproject.toml create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_connection_factory.py create mode 100644 tests/test_decorators.py create mode 100644 tests/test_helpers.py create mode 100644 tests/test_transaction.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..67a3ca2 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,62 @@ +name: main + +on: + push: + branches: + - main + pull_request: {} + +concurrency: + group: ${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: extractions/setup-just@v2 + - uses: astral-sh/setup-uv@v3 + with: + enable-cache: true + cache-dependency-glob: "**/pyproject.toml" + - run: uv python install 3.10 + - run: just install lint-ci + + pytest: + runs-on: ubuntu-latest + services: + postgres: + image: postgres:latest + env: + POSTGRES_DB: postgres + POSTGRES_PASSWORD: password + POSTGRES_USER: postgres + ports: + - 5432:5432 + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: + - uses: actions/checkout@v3 + - uses: astral-sh/setup-uv@v3 + - run: uv python install 3.13 + - run: | + uv sync --all-extras --frozen --no-install-project + uv run alembic upgrade head + uv run pytest . --cov=. --cov-report xml + env: + PYTHONDONTWRITEBYTECODE: 1 + PYTHONUNBUFFERED: 1 + DB_DSN: postgresql+asyncpg://postgres:password@127.0.0.1/postgres + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4.0.1 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + with: + files: ./coverage.xml + flags: unittests + name: codecov-${{ matrix.python-version }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..c0699ac --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,20 @@ +name: Publish Package + +on: + release: + types: + - published + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: extractions/setup-just@v2 + - uses: astral-sh/setup-uv@v3 + with: + enable-cache: true + cache-dependency-glob: "**/pyproject.toml" + - run: just publish + env: + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..068012f --- /dev/null +++ b/.gitignore @@ -0,0 +1,22 @@ +# Generic things +*.pyc +*~ +__pycache__/* +*.swp +*.sqlite3 +*.map +.vscode +.idea +.DS_Store +.env +.mypy_cache +.pytest_cache +.ruff_cache +.coverage +htmlcov/ +coverage.xml +pytest.xml +dist/ +.python-version +.venv +uv.lock diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..47087e6 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,28 @@ +FROM python:3.13-slim + +# required for psycopg2 +RUN apt update \ + && apt install -y --no-install-recommends \ + build-essential \ + libpq-dev \ + && apt clean \ + && rm -rf /var/lib/apt/lists/* + +COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv +RUN useradd --no-create-home --gid root runner + +ENV UV_PYTHON_PREFERENCE=only-system +ENV UV_NO_CACHE=true + +WORKDIR /code + +COPY pyproject.toml . +COPY uv.lock . + +RUN uv sync --all-extras --frozen + +COPY . . + +RUN chown -R runner:root /code && chmod -R g=u /code + +USER runner diff --git a/Justfile b/Justfile new file mode 100644 index 0000000..cbf8684 --- /dev/null +++ b/Justfile @@ -0,0 +1,33 @@ +default: install lint build test + +down: + docker compose down --remove-orphans + +sh: + docker compose run --service-ports application bash + +test *args: down && down + docker compose run application uv run pytest {{ args }} + + +build: + docker compose build application + +install: + uv lock --upgrade + uv sync --all-extras --frozen + +lint: + uv run --frozen ruff format + uv run --frozen ruff check --fix + uv run --frozen mypy . + +lint-ci: + uv run --frozen ruff format --check + uv run --frozen ruff check --no-fix + uv run --frozen mypy . + +publish: + rm -rf dist + uv build + uv publish --token $PYPI_TOKEN diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..f027435 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,26 @@ +services: + application: + build: + context: . + dockerfile: ./Dockerfile + restart: always + volumes: + - .:/srv/www/ + depends_on: + db: + condition: service_healthy + environment: + - DB_DSN=postgresql+asyncpg://postgres:password@db/postgres + stdin_open: true + tty: true + + db: + image: postgres + restart: always + environment: + - POSTGRES_PASSWORD=password + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres -d postgres"] + interval: 1s + timeout: 5s + retries: 15 diff --git a/pg_tools/__init__.py b/pg_tools/__init__.py new file mode 100644 index 0000000..c4372da --- /dev/null +++ b/pg_tools/__init__.py @@ -0,0 +1,14 @@ +from pg_tools.connections import build_connection_factory +from pg_tools.decorators import postgres_reconnect, transaction_retry +from pg_tools.helpers import build_db_dsn, is_dsn_multihost +from pg_tools.transaction import Transaction + + +__all__ = [ + "Transaction", + "build_connection_factory", + "build_db_dsn", + "is_dsn_multihost", + "postgres_reconnect", + "transaction_retry", +] diff --git a/pg_tools/connections.py b/pg_tools/connections.py new file mode 100644 index 0000000..db2082f --- /dev/null +++ b/pg_tools/connections.py @@ -0,0 +1,79 @@ +import logging +import random +import typing +from operator import itemgetter + +import asyncpg +import sqlalchemy +from asyncpg.connect_utils import SessionAttribute +from sqlalchemy.dialects.postgresql.asyncpg import PGDialect_asyncpg + + +if typing.TYPE_CHECKING: + ConnectionType = asyncpg.Connection[typing.Any] + + +logger = logging.getLogger(__name__) + + +def build_connection_factory( + url: sqlalchemy.URL, + timeout: float, +) -> typing.Callable[[], typing.Awaitable["ConnectionType"]]: + connect_args: typing.Final[dict[str, typing.Any]] = PGDialect_asyncpg().create_connect_args(url)[1] # type: ignore[no-untyped-call] + raw_target_session_attrs: typing.Final[str | None] = connect_args.pop("target_session_attrs", None) + target_session_attrs: typing.Final[SessionAttribute | None] = ( + SessionAttribute(raw_target_session_attrs) if raw_target_session_attrs else None + ) + + raw_hosts: typing.Final[str | list[str]] = connect_args.pop("host") + raw_ports: typing.Final[int | list[int] | None] = connect_args.pop("port", None) + hosts_and_ports: list[tuple[str, int]] + hosts: str | list[str] + ports: int | list[int] | None + if isinstance(raw_hosts, list) and isinstance(raw_ports, list): + hosts_and_ports = list(zip(raw_hosts, raw_ports, strict=True)) + random.shuffle(hosts_and_ports) + hosts = list(map(itemgetter(0), hosts_and_ports)) + ports = list(map(itemgetter(1), hosts_and_ports)) + else: + hosts_and_ports = [] + hosts = raw_hosts + ports = raw_ports + + async def _connection_factory() -> "ConnectionType": + connection: ConnectionType + nonlocal hosts_and_ports + try: + connection = await asyncpg.connect( + **connect_args, + host=hosts, + port=ports, + timeout=timeout, + target_session_attrs=target_session_attrs, + ) + return connection # noqa: TRY300 + except TimeoutError: + if not hosts_and_ports: + raise + + logger.warning("Failed to fetch asyncpg connection. Trying host by host.") + + hosts_and_ports_copy: typing.Final = hosts_and_ports.copy() + random.shuffle(hosts_and_ports_copy) + for one_host, one_port in hosts_and_ports_copy: + try: + connection = await asyncpg.connect( + **connect_args, + host=one_host, + port=one_port, + timeout=timeout, + target_session_attrs=target_session_attrs, + ) + return connection # noqa: TRY300 + except (TimeoutError, OSError, asyncpg.TargetServerAttributeNotMatched) as exc: # noqa: PERF203 + logger.warning("Failed to fetch asyncpg connection from %s, %s", one_host, exc) + msg: typing.Final = f"None of the hosts match the target attribute requirement {target_session_attrs}" + raise asyncpg.TargetServerAttributeNotMatched(msg) + + return _connection_factory diff --git a/pg_tools/decorators.py b/pg_tools/decorators.py new file mode 100644 index 0000000..4dae560 --- /dev/null +++ b/pg_tools/decorators.py @@ -0,0 +1,72 @@ +import functools +import logging +import typing + +import asyncpg +import tenacity +from sqlalchemy.exc import DBAPIError + +from pg_tools import settings + + +P = typing.ParamSpec("P") +T = typing.TypeVar("T") +logger = logging.getLogger(__name__) + + +def _connection_retry_handler(exception: BaseException) -> bool: + if ( + isinstance(exception, DBAPIError) + and hasattr(exception, "orig") + and isinstance(exception.orig.__cause__, asyncpg.PostgresConnectionError) # type: ignore[union-attr] + ): + logger.debug("postgres_reconnect, backoff triggered") + return True + + logger.debug("postgres_reconnect, giving up on backoff") + return False + + +def postgres_reconnect(func: typing.Callable[P, typing.Awaitable[T]]) -> typing.Callable[P, typing.Awaitable[T]]: + @tenacity.retry( + stop=tenacity.stop_after_attempt(settings.DB_UTILS_CONNECTION_TRIES), + wait=tenacity.wait_exponential_jitter(), + retry=tenacity.retry_if_exception(_connection_retry_handler), + reraise=True, + before=tenacity.before_log(logger, logging.DEBUG), + ) + @functools.wraps(func) + async def wrapped_method(*args: P.args, **kwargs: P.kwargs) -> T: + return await func(*args, **kwargs) + + return wrapped_method + + +def _transaction_retry_handler(exception: BaseException) -> bool: + if ( + isinstance(exception, DBAPIError) + and hasattr(exception, "orig") + and isinstance(exception.orig.__cause__, asyncpg.SerializationError) # type: ignore[union-attr] + ): + logger.debug("transaction_retry, backoff triggered") + return True + + logger.debug("transaction_retry, giving up on backoff") + return False + + +def transaction_retry( + func: typing.Callable[P, typing.Coroutine[typing.Any, typing.Any, T]], +) -> typing.Callable[P, typing.Coroutine[typing.Any, typing.Any, T]]: + @tenacity.retry( + stop=tenacity.stop_after_attempt(settings.DB_UTILS_TRANSACTIONS_TRIES), + wait=tenacity.wait_exponential_jitter(), + retry=tenacity.retry_if_exception(_transaction_retry_handler), + reraise=True, + before=tenacity.before_log(logger, logging.DEBUG), + ) + @functools.wraps(func) + async def wrapped_method(*args: P.args, **kwargs: P.kwargs) -> T: + return await func(*args, **kwargs) + + return wrapped_method diff --git a/pg_tools/helpers.py b/pg_tools/helpers.py new file mode 100644 index 0000000..273b612 --- /dev/null +++ b/pg_tools/helpers.py @@ -0,0 +1,34 @@ +import typing + +import sqlalchemy as sa + + +def build_db_dsn( + db_dsn: str, + database_name: str, + use_replica: bool = False, + drivername: str = "postgresql", +) -> sa.URL: + """Parse DSN variable and replace some parts. + + - DSN stored in format postgresql://login:password@/db_placeholder?host=host1&host=host2 + https://docs.sqlalchemy.org/en/20/dialects/postgresql.html#specifying-multiple-fallback-hosts + - 'db_placeholder' is replaced here with service database name + - `target_session_attrs` is chosen based on `use_replica` arg + """ + parsed_db_dsn: typing.Final = sa.make_url(db_dsn) + db_dsn_query: typing.Final[dict[str, typing.Any]] = dict(parsed_db_dsn.query or {}) + return parsed_db_dsn.set( + database=database_name, + drivername=drivername, + query=db_dsn_query + | { + "target_session_attrs": "prefer-standby" if use_replica else "read-write", + }, + ) + + +def is_dsn_multihost(db_dsn: str) -> bool: + parsed_db_dsn: typing.Final = sa.make_url(db_dsn) + db_dsn_query: typing.Final[dict[str, typing.Any]] = dict(parsed_db_dsn.query or {}) + return bool((hosts := db_dsn_query.get("host")) and isinstance(hosts, tuple) and len(hosts) > 1) diff --git a/pg_tools/py.typed b/pg_tools/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/pg_tools/settings.py b/pg_tools/settings.py new file mode 100644 index 0000000..e0ed936 --- /dev/null +++ b/pg_tools/settings.py @@ -0,0 +1,6 @@ +import os +import typing + + +DB_UTILS_CONNECTION_TRIES: typing.Final = int(os.getenv("DB_UTILS_CONNECTION_TRIES", "3")) +DB_UTILS_TRANSACTIONS_TRIES: typing.Final = int(os.getenv("DB_UTILS_TRANSACTIONS_TRIES", "3")) diff --git a/pg_tools/transaction.py b/pg_tools/transaction.py new file mode 100644 index 0000000..3209cc6 --- /dev/null +++ b/pg_tools/transaction.py @@ -0,0 +1,19 @@ +import dataclasses + +import typing_extensions +from sqlalchemy.ext import asyncio as sa_async + + +@dataclasses.dataclass(kw_only=True, frozen=True, slots=True) +class Transaction: + session: sa_async.AsyncSession + + async def __aenter__(self) -> typing_extensions.Self: + await self.session.begin() + return self + + async def __aexit__(self, *_: object) -> None: + await self.session.close() + + async def commit(self) -> None: + await self.session.commit() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..101f851 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,88 @@ +[project] +name = "pg-tools" +description = "PostgreSQL Tools" +authors = [ + { name = "Artur Shiriev", email = "me@shiriev.ru" }, +] +readme = "README.md" +requires-python = ">=3.10,<4" +license = "MIT" +keywords = [ + "python", + "postgresql", + "sqlalchemy", +] +classifiers = [ + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Typing :: Typed", + "Topic :: Software Development :: Libraries", +] +dynamic = ["version"] +packages = [ + { include = "pg_tools" }, +] +dependencies = [ + "tenacity", + "sqlalchemy[asyncio]", + "asyncpg", +] + +[project.urls] +repository = "https://github.com/modern-python/pg-tools" + +[tool.uv] +dev-dependencies = [ + "pytest", + "pytest-cov", + "pytest-asyncio", + "asyncpg-stubs", + "ruff", + "mypy", +] + +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[tool.hatch.version] +source = "vcs" + +[tool.mypy] +python_version = "3.10" +strict = true + +[tool.ruff] +fix = false +unsafe-fixes = true +line-length = 120 +target-version = "py310" + +[tool.ruff.format] +docstring-code-format = true + +[tool.ruff.lint] +select = ["ALL"] +ignore = [ + "D1", # allow missing docstrings + "S101", # allow asserts + "TCH", # ignore flake8-type-checking + "FBT", # allow boolean args + "D203", # "one-blank-line-before-class" conflicting with D211 + "D213", # "multi-line-summary-second-line" conflicting with D212 + "COM812", # flake8-commas "Trailing comma missing" + "ISC001", # flake8-implicit-str-concat + "G004", # allow f-strings in logging +] +isort.lines-after-imports = 2 +isort.no-lines-before = ["standard-library", "local-folder"] + +[tool.pytest.ini_options] +addopts = "--cov=. --cov-report term-missing" +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.coverage.run] +concurrency = ["thread", "greenlet"] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..534a99d --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,14 @@ +import os +import typing + +import pytest +from sqlalchemy.ext import asyncio as sa_async + + +@pytest.fixture +async def async_engine() -> typing.AsyncIterator[sa_async.AsyncEngine]: + engine: typing.Final = sa_async.create_async_engine(url=os.getenv("DB_DSN", ""), echo=True, echo_pool=True) + try: + yield engine + finally: + await engine.dispose() diff --git a/tests/test_connection_factory.py b/tests/test_connection_factory.py new file mode 100644 index 0000000..e7cf197 --- /dev/null +++ b/tests/test_connection_factory.py @@ -0,0 +1,69 @@ +import os +import typing +from unittest import mock + +import asyncpg +import pytest +import sqlalchemy +from sqlalchemy.ext import asyncio as sa_async + +from pg_tools.connections import build_connection_factory + + +async def test_connection_factory_success() -> None: + url: typing.Final = sqlalchemy.make_url(os.getenv("DB_DSN", "")) + engine: typing.Final = sa_async.create_async_engine( + url=url, echo=True, echo_pool=True, async_creator=build_connection_factory(url=url, timeout=1.0) + ) + try: + async with engine.connect() as connection: + await connection.execute(sqlalchemy.text("""SELECT 1""")) + finally: + await engine.dispose() + + +async def test_connection_factory_failure_single_host(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr("asyncpg.connect", mock.Mock(side_effect=TimeoutError)) + url: typing.Final = sqlalchemy.make_url(os.getenv("DB_DSN", "")) + engine: typing.Final = sa_async.create_async_engine( + url=url, echo=True, echo_pool=True, async_creator=build_connection_factory(url=url, timeout=1.0) + ) + try: + with pytest.raises(TimeoutError): + await engine.connect().__aenter__() + finally: + await engine.dispose() + + +@pytest.mark.parametrize("target_session_attrs", ["read-only", "read-write"]) +async def test_connection_factory_failure_several_hosts( + monkeypatch: pytest.MonkeyPatch, target_session_attrs: str +) -> None: + monkeypatch.setattr("asyncpg.connect", mock.Mock(side_effect=TimeoutError)) + url: typing.Final = sqlalchemy.make_url( + f"postgresql+asyncpg://user:password@/database?host=host1:5432&host=host2:5432&" + f"target_session_attrs={target_session_attrs}" + ) + engine: typing.Final = sa_async.create_async_engine( + url=url, echo=True, echo_pool=True, async_creator=build_connection_factory(url=url, timeout=1.0) + ) + try: + with pytest.raises(asyncpg.TargetServerAttributeNotMatched): + await engine.connect().__aenter__() + finally: + await engine.dispose() + + +async def test_connection_factory_failure_and_success(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr("asyncpg.connect", mock.AsyncMock(side_effect=(TimeoutError, ""))) + url: typing.Final = sqlalchemy.make_url( + "postgresql+asyncpg://user:password@/database?host=host1:5432&host=host2:5432" + ) + engine: typing.Final = sa_async.create_async_engine( + url=url, echo=True, echo_pool=True, async_creator=build_connection_factory(url=url, timeout=1.0) + ) + try: + with pytest.raises(AttributeError): + await engine.connect().__aenter__() + finally: + await engine.dispose() diff --git a/tests/test_decorators.py b/tests/test_decorators.py new file mode 100644 index 0000000..d586460 --- /dev/null +++ b/tests/test_decorators.py @@ -0,0 +1,67 @@ +import pytest +import sqlalchemy +from sqlalchemy.exc import DBAPIError +from sqlalchemy.ext import asyncio as sa_async + +from pg_tools.decorators import postgres_reconnect, transaction_retry + + +@pytest.mark.parametrize( + "error_code", + [ + "40001", # SerializationError - backoff triggered + "40002", # StatementCompletionUnknownError - backoff not triggered + ], +) +async def test_transaction_retry(async_engine: sa_async.AsyncEngine, error_code: str) -> None: + async with async_engine.connect() as connection: + await connection.execute( + sqlalchemy.text( + f""" + CREATE OR REPLACE FUNCTION raise_error() + RETURNS VOID AS $$ + BEGIN + RAISE SQLSTATE '{error_code}'; + END; + $$ LANGUAGE plpgsql; + """, + ), + ) + + @transaction_retry + async def raise_error() -> None: + await connection.execute(sqlalchemy.text("SELECT raise_error()")) + + with pytest.raises(DBAPIError): + await raise_error() + + +@pytest.mark.parametrize( + "error_code", + [ + "08000", # PostgresConnectionError - backoff triggered + "08003", # subclass of PostgresConnectionError - backoff triggered + "03000", # backoff not triggered + ], +) +async def test_postgres_reconnect(async_engine: sa_async.AsyncEngine, error_code: str) -> None: + async with async_engine.connect() as connection: + await connection.execute( + sqlalchemy.text( + f""" + CREATE OR REPLACE FUNCTION raise_error() + RETURNS VOID AS $$ + BEGIN + RAISE SQLSTATE '{error_code}'; + END; + $$ LANGUAGE plpgsql; + """, + ), + ) + + @postgres_reconnect + async def other_error() -> None: + await connection.execute(sqlalchemy.text("SELECT raise_error()")) + + with pytest.raises(DBAPIError): + await other_error() diff --git a/tests/test_helpers.py b/tests/test_helpers.py new file mode 100644 index 0000000..ff01c69 --- /dev/null +++ b/tests/test_helpers.py @@ -0,0 +1,22 @@ +import typing + +from pg_tools import helpers, is_dsn_multihost + + +def test_build_db_dsn() -> None: + database_name: typing.Final = "new_db_name" + drivername: typing.Final = "postgresql+asyncpg" + result_dsn: typing.Final = helpers.build_db_dsn( + db_dsn="postgresql://login:password@/db_placeholder?host=host1&host=host2", + database_name=database_name, + drivername=drivername, + ) + + assert database_name in result_dsn + assert drivername in result_dsn + + +def test_is_dsn_multihost() -> None: + assert is_dsn_multihost("postgresql://login:password@/db_placeholder?host=host1&host=host2") + assert not is_dsn_multihost("postgresql://login:password@/db_placeholder?host=host1") + assert not is_dsn_multihost("postgresql://login:password@host/db_placeholder") diff --git a/tests/test_transaction.py b/tests/test_transaction.py new file mode 100644 index 0000000..2621619 --- /dev/null +++ b/tests/test_transaction.py @@ -0,0 +1,35 @@ +import contextlib +import typing + +import pytest +from sqlalchemy.ext import asyncio as sa_async + +from pg_tools import Transaction + + +@pytest.fixture +async def transaction(async_engine: sa_async.AsyncEngine) -> typing.AsyncIterator[Transaction]: + async with sa_async.AsyncSession(async_engine, expire_on_commit=False, autoflush=False) as session: + yield Transaction(session=session) + + +async def test_transaction_with_commit(transaction: Transaction) -> None: + async with transaction: + assert transaction.session.in_transaction() + await transaction.commit() + assert not transaction.session.in_transaction() + + +async def test_transaction_without_commit(transaction: Transaction) -> None: + async with transaction: + assert transaction.session.in_transaction() + assert not transaction.session.in_transaction() + + +async def test_transaction_with_exception(transaction: Transaction) -> None: + with contextlib.suppress(Exception): + async with transaction: + assert transaction.session.in_transaction() + msg: typing.Final = "some error" + raise Exception(msg) # noqa: TRY002 + assert not transaction.session.in_transaction() From 8bd9de358d1eed3aaff6ee5650879157afd365e4 Mon Sep 17 00:00:00 2001 From: Artur Shiriev Date: Thu, 19 Jun 2025 11:41:19 +0300 Subject: [PATCH 2/4] fix --- .dockerignore | 15 +++++++++++++++ Dockerfile | 2 +- Justfile | 3 +-- pyproject.toml | 5 +++++ 4 files changed, 22 insertions(+), 3 deletions(-) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..4caaa77 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,15 @@ +.env +.coverage +.gitignore +.idea +.mypy_cache +.ruff_cache +.vscode +.git +.pytest_cache +.DS_Store +*.yml +Dockerfile +**/__pycache__ +.hypothesis +.venv diff --git a/Dockerfile b/Dockerfile index 47087e6..87fbb82 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,7 +19,7 @@ WORKDIR /code COPY pyproject.toml . COPY uv.lock . -RUN uv sync --all-extras --frozen +RUN uv sync --all-extras --frozen --no-install-project COPY . . diff --git a/Justfile b/Justfile index cbf8684..d8dfd81 100644 --- a/Justfile +++ b/Justfile @@ -7,8 +7,7 @@ sh: docker compose run --service-ports application bash test *args: down && down - docker compose run application uv run pytest {{ args }} - + docker compose run application uv run --no-sync pytest {{ args }} build: docker compose build application diff --git a/pyproject.toml b/pyproject.toml index 101f851..d019a5e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -86,3 +86,8 @@ asyncio_default_fixture_loop_scope = "function" [tool.coverage.run] concurrency = ["thread", "greenlet"] + +[tool.coverage.report] +exclude_also = [ + "if typing.TYPE_CHECKING:", +] From 78a0844092951cf2f1b757873ccf16d0eeb454b9 Mon Sep 17 00:00:00 2001 From: Artur Shiriev Date: Thu, 19 Jun 2025 11:50:30 +0300 Subject: [PATCH 3/4] fix --- .github/workflows/ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67a3ca2..75ec6d3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,8 +46,7 @@ jobs: - run: uv python install 3.13 - run: | uv sync --all-extras --frozen --no-install-project - uv run alembic upgrade head - uv run pytest . --cov=. --cov-report xml + uv run --no-sync pytest . --cov=. --cov-report xml env: PYTHONDONTWRITEBYTECODE: 1 PYTHONUNBUFFERED: 1 From 50341e0ae0bbc841369dacd14ef65f80e5ef3bf8 Mon Sep 17 00:00:00 2001 From: Artur Shiriev Date: Thu, 19 Jun 2025 11:54:03 +0300 Subject: [PATCH 4/4] fix --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 75ec6d3..a26d4c1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -45,7 +45,7 @@ jobs: - uses: astral-sh/setup-uv@v3 - run: uv python install 3.13 - run: | - uv sync --all-extras --frozen --no-install-project + uv sync --all-extras --no-install-project uv run --no-sync pytest . --cov=. --cov-report xml env: PYTHONDONTWRITEBYTECODE: 1