Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 13 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,6 @@ ci:
autoupdate_commit_msg: "[pre-commit.ci] pre-commit suggestions"
autoupdate_schedule: quarterly
# submodules: true
# docformatter v1.7.7 transitively pulls `untokenize`, whose setup.py
# uses `ast.Constant.s` (removed in Python 3.12+) and fails to install
# on pre-commit.ci's runners. The `pre-commit` GitHub Actions job still
# runs docformatter, so coverage isn't lost. Remove this once docformatter
# ships v1.7.8 (which drops the untokenize dep).
skip: [docformatter]

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand All @@ -36,11 +30,23 @@ repos:
args: []

- repo: https://github.com/PyCQA/docformatter
rev: v1.7.7
rev: v1.7.8
hooks:
- id: docformatter
additional_dependencies: [tomli]
args: ["--in-place"]
# docformatter v1.7.8 disagrees with ruff-format on these files
# (blank lines after docstring-only function bodies, blank lines
# between module docstring and first class, and a multi-line
# string literal used as an `exec()` argument that docformatter
# incorrectly treats as a docstring). Exclude until upstream
# reconciles the conventions or the patterns are restructured.
exclude: |
(?x)^(
src/cachier/exporters/prometheus\.py|
tests/mongo_tests/clients\.py|
tests/test_varargs\.py
)$

- repo: https://github.com/executablebooks/mdformat
rev: 1.0.0
Expand Down
15 changes: 6 additions & 9 deletions src/cachier/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,10 @@
class _CachierWrappedFunc(Protocol[_P, _R_co]):
"""Callable returned by ``@cachier`` with the decorated function's signature.

Preserves the original function's parameter and return types via ``ParamSpec``
while also exposing the cache-management attributes attached by the decorator.
Per-call cachier options such as ``max_age`` and ``cachier__skip_cache`` are
accepted at runtime but are not surfaced in the ``__call__`` signature here;
PEP 612 does not permit mixing ParamSpec kwargs with additional keyword-only
parameters.
Preserves the original function's parameter and return types via ``ParamSpec`` while also exposing the cache-
management attributes attached by the decorator. Per-call cachier options such as ``max_age`` and
``cachier__skip_cache`` are accepted at runtime but are not surfaced in the ``__call__`` signature here; PEP 612
does not permit mixing ParamSpec kwargs with additional keyword-only parameters.

"""

Expand Down Expand Up @@ -85,9 +83,8 @@ async def _background_recalc_async(
) -> None:
"""Run async recomputation in background and clear processing flag.

This helper ensures that the cache entry's "being calculated" state is
cleared only after the background recomputation and cache update
(performed by ``_function_thread_async``) have completed.
This helper ensures that the cache entry's "being calculated" state is cleared only after the background
recomputation and cache update (performed by ``_function_thread_async``) have completed.

"""
try:
Expand Down
4 changes: 2 additions & 2 deletions src/cachier/cores/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,8 @@ def check_calc_timeout(self, time_spent):
def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]:
"""Get entry based on given key.
Return the key and the :class:`~cachier.config.CacheEntry` mapped
to the given key in this core's cache, if such a mapping exists.
Return the key and the :class:`~cachier.config.CacheEntry` mapped to the given key in this core's cache, if such
a mapping exists.
"""

Expand Down
15 changes: 7 additions & 8 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,13 @@ def _build_worker_url(original_url: str, schema_name: str) -> str:
def worker_sql_connection(request: pytest.FixtureRequest) -> Optional[str]:
"""Create the worker-specific PostgreSQL schema once per xdist worker session.

Returns the worker-specific connection URL, or None when schema isolation is not
needed (serial run or non-PostgreSQL backend). The schema is created with
``CREATE SCHEMA IF NOT EXISTS`` so this fixture is safe to run even if the schema
already exists from a previous interrupted run.

A non-None return value means "use this URL"; schema creation is attempted but may
fail silently (e.g. if SQLAlchemy is not installed or the DB is unreachable). Tests
that depend on the schema will fail at the DB level with a diagnostic error.
Returns the worker-specific connection URL, or None when schema isolation is not needed (serial run or non-
PostgreSQL backend). The schema is created with ``CREATE SCHEMA IF NOT EXISTS`` so this fixture is safe to run even
if the schema already exists from a previous interrupted run.

A non-None return value means "use this URL"; schema creation is attempted but may fail silently (e.g. if SQLAlchemy
is not installed or the DB is unreachable). Tests that depend on the schema will fail at the DB level with a
diagnostic error.

"""
# Avoid touching SQL backends entirely when no SQL tests are collected.
Expand Down
4 changes: 2 additions & 2 deletions tests/test_general.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def _calls_wait_for_calc_timeout_fast(res_queue):
res = _wait_for_calc_timeout_fast(1, 2)
res_queue.put(res)

""" Testing calls that avoid timeouts store the values in cache. """
# Testing calls that avoid timeouts store the values in cache.
_wait_for_calc_timeout_fast.clear_cache()
val1 = _wait_for_calc_timeout_fast(1, 2)
val2 = _wait_for_calc_timeout_fast(1, 2)
Expand Down Expand Up @@ -123,7 +123,7 @@ def _calls_wait_for_calc_timeout_slow(res_queue):
res = _wait_for_calc_timeout_slow(1, 2)
res_queue.put(res)

"""Testing for calls timing out to be performed twice when needed."""
# Testing for calls timing out to be performed twice when needed.
_wait_for_calc_timeout_slow.clear_cache()
res_queue = queue.Queue()
thread1 = threading.Thread(
Expand Down
Loading