Skip to content

Commit a86e236

Browse files
authored
Merge pull request #1 from modern-python/init
init commit
2 parents a496445 + 50341e0 commit a86e236

21 files changed

+728
-0
lines changed

.dockerignore

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
.env
2+
.coverage
3+
.gitignore
4+
.idea
5+
.mypy_cache
6+
.ruff_cache
7+
.vscode
8+
.git
9+
.pytest_cache
10+
.DS_Store
11+
*.yml
12+
Dockerfile
13+
**/__pycache__
14+
.hypothesis
15+
.venv

.github/workflows/ci.yml

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
name: main
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
pull_request: {}
8+
9+
concurrency:
10+
group: ${{ github.head_ref || github.run_id }}
11+
cancel-in-progress: true
12+
13+
jobs:
14+
lint:
15+
runs-on: ubuntu-latest
16+
steps:
17+
- uses: actions/checkout@v4
18+
- uses: extractions/setup-just@v2
19+
- uses: astral-sh/setup-uv@v3
20+
with:
21+
enable-cache: true
22+
cache-dependency-glob: "**/pyproject.toml"
23+
- run: uv python install 3.10
24+
- run: just install lint-ci
25+
26+
pytest:
27+
runs-on: ubuntu-latest
28+
services:
29+
postgres:
30+
image: postgres:latest
31+
env:
32+
POSTGRES_DB: postgres
33+
POSTGRES_PASSWORD: password
34+
POSTGRES_USER: postgres
35+
ports:
36+
- 5432:5432
37+
# Set health checks to wait until postgres has started
38+
options: >-
39+
--health-cmd pg_isready
40+
--health-interval 10s
41+
--health-timeout 5s
42+
--health-retries 5
43+
steps:
44+
- uses: actions/checkout@v3
45+
- uses: astral-sh/setup-uv@v3
46+
- run: uv python install 3.13
47+
- run: |
48+
uv sync --all-extras --no-install-project
49+
uv run --no-sync pytest . --cov=. --cov-report xml
50+
env:
51+
PYTHONDONTWRITEBYTECODE: 1
52+
PYTHONUNBUFFERED: 1
53+
DB_DSN: postgresql+asyncpg://postgres:[email protected]/postgres
54+
- name: Upload coverage to Codecov
55+
uses: codecov/[email protected]
56+
env:
57+
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
58+
with:
59+
files: ./coverage.xml
60+
flags: unittests
61+
name: codecov-${{ matrix.python-version }}

.github/workflows/publish.yml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
name: Publish Package
2+
3+
on:
4+
release:
5+
types:
6+
- published
7+
8+
jobs:
9+
publish:
10+
runs-on: ubuntu-latest
11+
steps:
12+
- uses: actions/checkout@v4
13+
- uses: extractions/setup-just@v2
14+
- uses: astral-sh/setup-uv@v3
15+
with:
16+
enable-cache: true
17+
cache-dependency-glob: "**/pyproject.toml"
18+
- run: just publish
19+
env:
20+
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}

.gitignore

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
# Generic things
2+
*.pyc
3+
*~
4+
__pycache__/*
5+
*.swp
6+
*.sqlite3
7+
*.map
8+
.vscode
9+
.idea
10+
.DS_Store
11+
.env
12+
.mypy_cache
13+
.pytest_cache
14+
.ruff_cache
15+
.coverage
16+
htmlcov/
17+
coverage.xml
18+
pytest.xml
19+
dist/
20+
.python-version
21+
.venv
22+
uv.lock

Dockerfile

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
FROM python:3.13-slim
2+
3+
# required for psycopg2
4+
RUN apt update \
5+
&& apt install -y --no-install-recommends \
6+
build-essential \
7+
libpq-dev \
8+
&& apt clean \
9+
&& rm -rf /var/lib/apt/lists/*
10+
11+
COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv
12+
RUN useradd --no-create-home --gid root runner
13+
14+
ENV UV_PYTHON_PREFERENCE=only-system
15+
ENV UV_NO_CACHE=true
16+
17+
WORKDIR /code
18+
19+
COPY pyproject.toml .
20+
COPY uv.lock .
21+
22+
RUN uv sync --all-extras --frozen --no-install-project
23+
24+
COPY . .
25+
26+
RUN chown -R runner:root /code && chmod -R g=u /code
27+
28+
USER runner

Justfile

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
default: install lint build test
2+
3+
down:
4+
docker compose down --remove-orphans
5+
6+
sh:
7+
docker compose run --service-ports application bash
8+
9+
test *args: down && down
10+
docker compose run application uv run --no-sync pytest {{ args }}
11+
12+
build:
13+
docker compose build application
14+
15+
install:
16+
uv lock --upgrade
17+
uv sync --all-extras --frozen
18+
19+
lint:
20+
uv run --frozen ruff format
21+
uv run --frozen ruff check --fix
22+
uv run --frozen mypy .
23+
24+
lint-ci:
25+
uv run --frozen ruff format --check
26+
uv run --frozen ruff check --no-fix
27+
uv run --frozen mypy .
28+
29+
publish:
30+
rm -rf dist
31+
uv build
32+
uv publish --token $PYPI_TOKEN

docker-compose.yml

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
services:
2+
application:
3+
build:
4+
context: .
5+
dockerfile: ./Dockerfile
6+
restart: always
7+
volumes:
8+
- .:/srv/www/
9+
depends_on:
10+
db:
11+
condition: service_healthy
12+
environment:
13+
- DB_DSN=postgresql+asyncpg://postgres:password@db/postgres
14+
stdin_open: true
15+
tty: true
16+
17+
db:
18+
image: postgres
19+
restart: always
20+
environment:
21+
- POSTGRES_PASSWORD=password
22+
healthcheck:
23+
test: ["CMD-SHELL", "pg_isready -U postgres -d postgres"]
24+
interval: 1s
25+
timeout: 5s
26+
retries: 15

pg_tools/__init__.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
from pg_tools.connections import build_connection_factory
2+
from pg_tools.decorators import postgres_reconnect, transaction_retry
3+
from pg_tools.helpers import build_db_dsn, is_dsn_multihost
4+
from pg_tools.transaction import Transaction
5+
6+
7+
__all__ = [
8+
"Transaction",
9+
"build_connection_factory",
10+
"build_db_dsn",
11+
"is_dsn_multihost",
12+
"postgres_reconnect",
13+
"transaction_retry",
14+
]

pg_tools/connections.py

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
import logging
2+
import random
3+
import typing
4+
from operator import itemgetter
5+
6+
import asyncpg
7+
import sqlalchemy
8+
from asyncpg.connect_utils import SessionAttribute
9+
from sqlalchemy.dialects.postgresql.asyncpg import PGDialect_asyncpg
10+
11+
12+
if typing.TYPE_CHECKING:
13+
ConnectionType = asyncpg.Connection[typing.Any]
14+
15+
16+
logger = logging.getLogger(__name__)
17+
18+
19+
def build_connection_factory(
20+
url: sqlalchemy.URL,
21+
timeout: float,
22+
) -> typing.Callable[[], typing.Awaitable["ConnectionType"]]:
23+
connect_args: typing.Final[dict[str, typing.Any]] = PGDialect_asyncpg().create_connect_args(url)[1] # type: ignore[no-untyped-call]
24+
raw_target_session_attrs: typing.Final[str | None] = connect_args.pop("target_session_attrs", None)
25+
target_session_attrs: typing.Final[SessionAttribute | None] = (
26+
SessionAttribute(raw_target_session_attrs) if raw_target_session_attrs else None
27+
)
28+
29+
raw_hosts: typing.Final[str | list[str]] = connect_args.pop("host")
30+
raw_ports: typing.Final[int | list[int] | None] = connect_args.pop("port", None)
31+
hosts_and_ports: list[tuple[str, int]]
32+
hosts: str | list[str]
33+
ports: int | list[int] | None
34+
if isinstance(raw_hosts, list) and isinstance(raw_ports, list):
35+
hosts_and_ports = list(zip(raw_hosts, raw_ports, strict=True))
36+
random.shuffle(hosts_and_ports)
37+
hosts = list(map(itemgetter(0), hosts_and_ports))
38+
ports = list(map(itemgetter(1), hosts_and_ports))
39+
else:
40+
hosts_and_ports = []
41+
hosts = raw_hosts
42+
ports = raw_ports
43+
44+
async def _connection_factory() -> "ConnectionType":
45+
connection: ConnectionType
46+
nonlocal hosts_and_ports
47+
try:
48+
connection = await asyncpg.connect(
49+
**connect_args,
50+
host=hosts,
51+
port=ports,
52+
timeout=timeout,
53+
target_session_attrs=target_session_attrs,
54+
)
55+
return connection # noqa: TRY300
56+
except TimeoutError:
57+
if not hosts_and_ports:
58+
raise
59+
60+
logger.warning("Failed to fetch asyncpg connection. Trying host by host.")
61+
62+
hosts_and_ports_copy: typing.Final = hosts_and_ports.copy()
63+
random.shuffle(hosts_and_ports_copy)
64+
for one_host, one_port in hosts_and_ports_copy:
65+
try:
66+
connection = await asyncpg.connect(
67+
**connect_args,
68+
host=one_host,
69+
port=one_port,
70+
timeout=timeout,
71+
target_session_attrs=target_session_attrs,
72+
)
73+
return connection # noqa: TRY300
74+
except (TimeoutError, OSError, asyncpg.TargetServerAttributeNotMatched) as exc: # noqa: PERF203
75+
logger.warning("Failed to fetch asyncpg connection from %s, %s", one_host, exc)
76+
msg: typing.Final = f"None of the hosts match the target attribute requirement {target_session_attrs}"
77+
raise asyncpg.TargetServerAttributeNotMatched(msg)
78+
79+
return _connection_factory

pg_tools/decorators.py

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
import functools
2+
import logging
3+
import typing
4+
5+
import asyncpg
6+
import tenacity
7+
from sqlalchemy.exc import DBAPIError
8+
9+
from pg_tools import settings
10+
11+
12+
P = typing.ParamSpec("P")
13+
T = typing.TypeVar("T")
14+
logger = logging.getLogger(__name__)
15+
16+
17+
def _connection_retry_handler(exception: BaseException) -> bool:
18+
if (
19+
isinstance(exception, DBAPIError)
20+
and hasattr(exception, "orig")
21+
and isinstance(exception.orig.__cause__, asyncpg.PostgresConnectionError) # type: ignore[union-attr]
22+
):
23+
logger.debug("postgres_reconnect, backoff triggered")
24+
return True
25+
26+
logger.debug("postgres_reconnect, giving up on backoff")
27+
return False
28+
29+
30+
def postgres_reconnect(func: typing.Callable[P, typing.Awaitable[T]]) -> typing.Callable[P, typing.Awaitable[T]]:
31+
@tenacity.retry(
32+
stop=tenacity.stop_after_attempt(settings.DB_UTILS_CONNECTION_TRIES),
33+
wait=tenacity.wait_exponential_jitter(),
34+
retry=tenacity.retry_if_exception(_connection_retry_handler),
35+
reraise=True,
36+
before=tenacity.before_log(logger, logging.DEBUG),
37+
)
38+
@functools.wraps(func)
39+
async def wrapped_method(*args: P.args, **kwargs: P.kwargs) -> T:
40+
return await func(*args, **kwargs)
41+
42+
return wrapped_method
43+
44+
45+
def _transaction_retry_handler(exception: BaseException) -> bool:
46+
if (
47+
isinstance(exception, DBAPIError)
48+
and hasattr(exception, "orig")
49+
and isinstance(exception.orig.__cause__, asyncpg.SerializationError) # type: ignore[union-attr]
50+
):
51+
logger.debug("transaction_retry, backoff triggered")
52+
return True
53+
54+
logger.debug("transaction_retry, giving up on backoff")
55+
return False
56+
57+
58+
def transaction_retry(
59+
func: typing.Callable[P, typing.Coroutine[typing.Any, typing.Any, T]],
60+
) -> typing.Callable[P, typing.Coroutine[typing.Any, typing.Any, T]]:
61+
@tenacity.retry(
62+
stop=tenacity.stop_after_attempt(settings.DB_UTILS_TRANSACTIONS_TRIES),
63+
wait=tenacity.wait_exponential_jitter(),
64+
retry=tenacity.retry_if_exception(_transaction_retry_handler),
65+
reraise=True,
66+
before=tenacity.before_log(logger, logging.DEBUG),
67+
)
68+
@functools.wraps(func)
69+
async def wrapped_method(*args: P.args, **kwargs: P.kwargs) -> T:
70+
return await func(*args, **kwargs)
71+
72+
return wrapped_method

0 commit comments

Comments
 (0)