Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
5498214
Initial plan
Copilot Jan 27, 2026
c4fb65f
Add asyncio support for caching coroutines
Copilot Jan 27, 2026
ad4e27c
Fix async concurrent call handling and update tests/examples
Copilot Jan 27, 2026
1626b92
Fix linting issues in async implementation
Copilot Jan 27, 2026
90a0dd3
Add comment explaining next_time mark/unmark pattern
Copilot Jan 27, 2026
4094e94
add pytest-asyncio
Borda Jan 27, 2026
daab0dd
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 27, 2026
a41c5fe
Merge branch 'master' into copilot/add-async-support-for-caching
Borda Jan 27, 2026
f1bca87
Update examples/async_example.py
Borda Jan 27, 2026
23125e3
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 27, 2026
496cb20
Merge branch 'master' into copilot/add-async-support-for-caching
Borda Jan 27, 2026
a72adb6
Address PR review feedback: fix test assertions, add allow_none test,…
Copilot Jan 27, 2026
6393d95
Refactor async caching tests: introduce test classes and improve orga…
Borda Jan 27, 2026
07328a6
Merge branch 'master' into copilot/add-async-support-for-caching
Borda Jan 27, 2026
daa5d1a
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 27, 2026
2481527
Fix redundant test assertions and add edge case tests for async funct…
Copilot Jan 28, 2026
1d706fe
Simplify line breaks in async_example.py for 120 char line length
Copilot Jan 28, 2026
21e8474
linting
Borda Jan 28, 2026
e9d4142
linting
Borda Jan 28, 2026
e6a9938
Update src/cachier/core.py
Borda Jan 28, 2026
8d99753
Merge branch 'master' into copilot/add-async-support-for-caching
Borda Jan 28, 2026
1cf9d27
Fix redundant test assertion in test_uses_cache_before_expiry
Copilot Jan 28, 2026
5fc456d
Add comprehensive tests for missing async code coverage
Copilot Jan 28, 2026
91a26b3
Apply suggestions from code review
Borda Jan 28, 2026
a2cd7a6
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 28, 2026
06c3ee8
Add tests for missing code coverage: exception handling and stale pro…
Copilot Jan 28, 2026
c61a1bf
linting
Borda Jan 28, 2026
935ea40
Add tests to cover remaining uncovered async code paths
Copilot Jan 29, 2026
40fd702
Remove line number references from test docstrings
Copilot Jan 29, 2026
6be5c10
Remove duplicate tests and consolidate stale entry processing tests
Copilot Jan 29, 2026
c81f4a9
Remove unreachable async code path for stale entry processing
Copilot Jan 30, 2026
093dbd3
Move common pytest marks to class level for cleaner test code
Copilot Jan 30, 2026
71cb19c
linting
Borda Jan 30, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
178 changes: 178 additions & 0 deletions examples/async_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
"""Example demonstrating async/coroutine support in Cachier.

This example shows how to use the @cachier decorator with async functions to cache the results of HTTP requests or other
async operations.

"""

import asyncio
import time
from datetime import timedelta

from cachier import cachier


# Example 1: Basic async function caching
@cachier(backend="pickle", stale_after=timedelta(hours=1))
async def fetch_user_data(user_id: int) -> dict:
"""Simulate fetching user data from an API."""
print(f" Fetching user {user_id} from API...")
await asyncio.sleep(1) # Simulate network delay
return {"id": user_id, "name": f"User{user_id}", "email": f"user{user_id}@example.com"}


# Example 2: Async function with memory backend (faster, but not persistent)
@cachier(backend="memory")
async def calculate_complex_result(x: int, y: int) -> int:
"""Simulate a complex calculation."""
print(f" Computing {x} ** {y}...")
await asyncio.sleep(0.5) # Simulate computation time
return x**y


# Example 3: Async function with stale_after (without next_time for simplicity)
@cachier(backend="memory", stale_after=timedelta(seconds=3), next_time=False)
async def get_weather_data(city: str) -> dict:
"""Simulate fetching weather data with automatic refresh when stale."""
print(f" Fetching weather for {city}...")
await asyncio.sleep(0.5)
return {"city": city, "temp": 72, "condition": "sunny", "timestamp": time.time()}


# Example 4: Real-world HTTP request caching (requires httpx)
async def demo_http_caching():
"""Demonstrate caching actual HTTP requests."""
print("\n=== HTTP Request Caching Example ===")
try:
import httpx

@cachier(backend="pickle", stale_after=timedelta(minutes=5))
async def fetch_github_user(username: str) -> dict:
"""Fetch GitHub user data with caching."""
print(f" Making API request for {username}...")
async with httpx.AsyncClient() as client:
response = await client.get(f"https://api.github.com/users/{username}")
return response.json()

# First call - makes actual HTTP request
start = time.time()
user1 = await fetch_github_user("torvalds")
duration1 = time.time() - start
print(f" First call took {duration1:.2f}s")
user_name = user1.get("name", "N/A")
user_repos = user1.get("public_repos", "N/A")
print(f" User: {user_name}, Repos: {user_repos}")

# Second call - uses cache (much faster)
start = time.time()
await fetch_github_user("torvalds")
duration2 = time.time() - start
print(f" Second call took {duration2:.2f}s (from cache)")
if duration2 > 0:
print(f" Cache speedup: {duration1 / duration2:.1f}x")
else:
print(" Cache speedup: instantaneous (duration too small to measure)")

except ImportError:
print(" (Skipping - httpx not installed. Install with: pip install httpx)")


async def main():
"""Run all async caching examples."""
print("=" * 60)
print("Cachier Async/Coroutine Support Examples")
print("=" * 60)

# Example 1: Basic async caching
print("\n=== Example 1: Basic Async Caching ===")
start = time.time()
user = await fetch_user_data(42)
duration1 = time.time() - start
print(f"First call: {user} (took {duration1:.2f}s)")

start = time.time()
user = await fetch_user_data(42)
duration2 = time.time() - start
print(f"Second call: {user} (took {duration2:.2f}s)")
if duration2 > 0:
print(f"Speedup: {duration1 / duration2:.1f}x faster!")
else:
print("Speedup: instantaneous (duration too small to measure)")

# Example 2: Memory backend
print("\n=== Example 2: Memory Backend (Fast, Non-Persistent) ===")
start = time.time()
result = await calculate_complex_result(2, 20)
duration1 = time.time() - start
print(f"First call: 2^20 = {result} (took {duration1:.2f}s)")

start = time.time()
result = await calculate_complex_result(2, 20)
duration2 = time.time() - start
print(f"Second call: 2^20 = {result} (took {duration2:.2f}s)")

# Example 3: Stale-after
print("\n=== Example 3: Stale-After ===")
weather = await get_weather_data("San Francisco")
print(f"First call: {weather}")

weather = await get_weather_data("San Francisco")
print(f"Second call (cached): {weather}")

print("Waiting 4 seconds for cache to become stale...")
await asyncio.sleep(4)

weather = await get_weather_data("San Francisco")
print(f"Third call (recalculates because stale): {weather}")

# Example 4: Concurrent requests
print("\n=== Example 4: Concurrent Async Requests ===")
print("Making 5 concurrent requests...")
print("(First 3 are unique and will execute, last 2 are duplicates)")
start = time.time()
await asyncio.gather(
fetch_user_data(1),
fetch_user_data(2),
fetch_user_data(3),
fetch_user_data(1), # Duplicate - will execute in parallel with first
fetch_user_data(2), # Duplicate - will execute in parallel with second
)
duration = time.time() - start
print(f"All requests completed in {duration:.2f}s")

# Now test that subsequent calls use cache
print("\nMaking the same requests again (should use cache):")
start = time.time()
await asyncio.gather(
fetch_user_data(1),
fetch_user_data(2),
fetch_user_data(3),
)
duration2 = time.time() - start
print(f"Completed in {duration2:.2f}s - much faster!")

# Example 5: HTTP caching (if httpx is available)
await demo_http_caching()

# Clean up
print("\n=== Cleanup ===")
fetch_user_data.clear_cache()
calculate_complex_result.clear_cache()
get_weather_data.clear_cache()
print("All caches cleared!")

print("\n" + "=" * 60)
print("Key Features Demonstrated:")
print(" - Async function caching with @cachier decorator")
print(" - Multiple backends (pickle, memory)")
print(" - Automatic cache invalidation (stale_after)")
print(" - Concurrent request handling")
print(" - Significant performance improvements")
print("\nNote: For async functions, concurrent calls with the same")
print("arguments will execute in parallel initially. Subsequent calls")
print("will use the cached result for significant speedup.")
print("=" * 60)


if __name__ == "__main__":
asyncio.run(main())
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,8 @@ lint.mccabe.max-complexity = 10
[tool.docformatter]
recursive = true
# some docstring start with r"""
wrap-summaries = 79
wrap-descriptions = 79
wrap-summaries = 120
wrap-descriptions = 120
blank = true

# === Testing ===
Expand Down Expand Up @@ -178,6 +178,7 @@ markers = [
"redis: test the Redis core",
"sql: test the SQL core",
"maxage: test the max_age functionality",
"asyncio: marks tests as async",
]

# --- coverage ---
Expand Down
122 changes: 119 additions & 3 deletions src/cachier/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# http://www.opensource.org/licenses/MIT-license
# Copyright (c) 2016, Shay Palachy <shaypal5@gmail.com>

import asyncio
import inspect
import os
import threading
Expand Down Expand Up @@ -56,6 +57,14 @@ def _function_thread(core, key, func, args, kwds):
print(f"Function call failed with the following exception:\n{exc}")


async def _function_thread_async(core, key, func, args, kwds):
try:
func_res = await func(*args, **kwds)
core.set_entry(key, func_res)
except BaseException as exc:
print(f"Function call failed with the following exception:\n{exc}")


def _calc_entry(core, key, func, args, kwds, printer=lambda *_: None) -> Optional[Any]:
core.mark_entry_being_calculated(key)
try:
Expand All @@ -68,6 +77,18 @@ def _calc_entry(core, key, func, args, kwds, printer=lambda *_: None) -> Optiona
core.mark_entry_not_calculated(key)


async def _calc_entry_async(core, key, func, args, kwds, printer=lambda *_: None) -> Optional[Any]:
core.mark_entry_being_calculated(key)
try:
func_res = await func(*args, **kwds)
stored = core.set_entry(key, func_res)
if not stored:
printer("Result exceeds entry_size_limit; not cached")
return func_res
finally:
core.mark_entry_not_calculated(key)


def _convert_args_kwargs(func, _is_method: bool, args: tuple, kwds: dict) -> dict:
"""Convert mix of positional and keyword arguments to aggregated kwargs."""
# unwrap if the function is functools.partial
Expand Down Expand Up @@ -390,13 +411,108 @@ def _call(*args, max_age: Optional[timedelta] = None, **kwds):
_print("No entry found. No current calc. Calling like a boss.")
return _calc_entry(core, key, func, args, kwds, _print)

async def _call_async(*args, max_age: Optional[timedelta] = None, **kwds):
# NOTE: For async functions, wait_for_calc_timeout is not honored.
# Instead of blocking the event loop waiting for concurrent
# calculations, async functions will recalculate in parallel.
# This avoids deadlocks and maintains async efficiency.
nonlocal allow_none, last_cleanup
_allow_none = _update_with_defaults(allow_none, "allow_none", kwds)
# print('Inside async wrapper for {}.'.format(func.__name__))
ignore_cache = _pop_kwds_with_deprecation(kwds, "ignore_cache", False)
overwrite_cache = _pop_kwds_with_deprecation(kwds, "overwrite_cache", False)
verbose = _pop_kwds_with_deprecation(kwds, "verbose_cache", False)
ignore_cache = kwds.pop("cachier__skip_cache", ignore_cache)
overwrite_cache = kwds.pop("cachier__overwrite_cache", overwrite_cache)
verbose = kwds.pop("cachier__verbose", verbose)
_stale_after = _update_with_defaults(stale_after, "stale_after", kwds)
_next_time = _update_with_defaults(next_time, "next_time", kwds)
_cleanup_flag = _update_with_defaults(cleanup_stale, "cleanup_stale", kwds)
_cleanup_interval_val = _update_with_defaults(cleanup_interval, "cleanup_interval", kwds)
# merge args expanded as kwargs and the original kwds
kwargs = _convert_args_kwargs(func, _is_method=core.func_is_method, args=args, kwds=kwds)

if _cleanup_flag:
now = datetime.now()
with cleanup_lock:
if now - last_cleanup >= _cleanup_interval_val:
last_cleanup = now
_get_executor().submit(core.delete_stale_entries, _stale_after)

_print = print if verbose else lambda x: None

# Check current global caching state dynamically
from .config import _global_params

if ignore_cache or not _global_params.caching_enabled:
return await func(args[0], **kwargs) if core.func_is_method else await func(**kwargs)
key, entry = core.get_entry((), kwargs)
if overwrite_cache:
result = await _calc_entry_async(core, key, func, args, kwds, _print)
return result
if entry is None or (not entry._completed and not entry._processing):
_print("No entry found. No current calc. Calling like a boss.")
result = await _calc_entry_async(core, key, func, args, kwds, _print)
return result
_print("Entry found.")
if _allow_none or entry.value is not None:
_print("Cached result found.")
now = datetime.now()
max_allowed_age = _stale_after
nonneg_max_age = True
if max_age is not None:
if max_age < ZERO_TIMEDELTA:
_print("max_age is negative. Cached result considered stale.")
nonneg_max_age = False
else:
assert max_age is not None # noqa: S101
max_allowed_age = min(_stale_after, max_age)
# note: if max_age < 0, we always consider a value stale
if nonneg_max_age and (now - entry.time <= max_allowed_age):
_print("And it is fresh!")
return entry.value
_print("But it is stale... :(")
if _next_time:
_print("Async calc and return stale")
# Mark entry as being calculated then immediately unmark
# This matches sync behavior and ensures entry exists
# Background task will update cache when complete
core.mark_entry_being_calculated(key)
# Use asyncio.create_task for background execution
asyncio.create_task(_function_thread_async(core, key, func, args, kwds))
core.mark_entry_not_calculated(key)
return entry.value
_print("Calling decorated function and waiting")
result = await _calc_entry_async(core, key, func, args, kwds, _print)
return result
if entry._processing:
msg = "No value but being calculated. Recalculating"
_print(f"{msg} (async - no wait).")
# For async, don't wait - just recalculate
# This avoids blocking the event loop
result = await _calc_entry_async(core, key, func, args, kwds, _print)
return result
_print("No entry found. No current calc. Calling like a boss.")
return await _calc_entry_async(core, key, func, args, kwds, _print)

# MAINTAINER NOTE: The main function wrapper is now a standard function
# that passes *args and **kwargs to _call. This ensures that user
# arguments are not shifted, and max_age is only settable via keyword
# argument.
@wraps(func)
def func_wrapper(*args, **kwargs):
return _call(*args, **kwargs)
# For async functions, we create an async wrapper that calls
# _call_async.
is_coroutine = inspect.iscoroutinefunction(func)

if is_coroutine:

@wraps(func)
async def func_wrapper(*args, **kwargs):
return await _call_async(*args, **kwargs)
else:

@wraps(func)
def func_wrapper(*args, **kwargs):
return _call(*args, **kwargs)

def _clear_cache():
"""Clear the cache."""
Expand Down
11 changes: 4 additions & 7 deletions src/cachier/cores/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,8 @@ class RecalculationNeeded(Exception):
def _get_func_str(func: Callable) -> str:
"""Return a string identifier for the function (module + name).

We accept Any here because static analysis can't always prove that the
runtime object will have __module__ and __name__, but at runtime the
decorated functions always do.
We accept Any here because static analysis can't always prove that the runtime object will have __module__ and
__name__, but at runtime the decorated functions always do.

"""
return f".{func.__module__}.{func.__name__}"
Expand All @@ -52,8 +51,7 @@ def __init__(
def set_func(self, func):
"""Set the function this core will use.

This has to be set before any method is called. Also determine if the
function is an object method.
This has to be set before any method is called. Also determine if the function is an object method.

"""
# unwrap if the function is functools.partial
Expand All @@ -70,8 +68,7 @@ def get_key(self, args, kwds):
def get_entry(self, args, kwds) -> Tuple[str, Optional[CacheEntry]]:
"""Get entry based on given arguments.

Return the result mapped to the given arguments in this core's cache,
if such a mapping exists.
Return the result mapped to the given arguments in this core's cache, if such a mapping exists.

"""
key = self.get_key(args, kwds)
Expand Down
1 change: 1 addition & 0 deletions tests/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
pytest
coverage
pytest-cov
pytest-asyncio
birch
# to be able to run `python setup.py checkdocs`
collective.checkdocs
Expand Down
Loading