From 134f44d9c6e03d9349aed25627b5657a0c120628 Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Mon, 4 Aug 2025 21:54:20 -0400 Subject: [PATCH 1/9] some typing updates Change-Id: I30619425e1e1e4ca43286be35a8c4a85658a8acb --- dogpile/cache/backends/memcached.py | 1 - dogpile/cache/backends/redis.py | 104 ++++++++++++++++++++-------- tests/cache/test_redis_backend.py | 28 +++++--- 3 files changed, 94 insertions(+), 39 deletions(-) diff --git a/dogpile/cache/backends/memcached.py b/dogpile/cache/backends/memcached.py index eee3448..1efe366 100644 --- a/dogpile/cache/backends/memcached.py +++ b/dogpile/cache/backends/memcached.py @@ -18,7 +18,6 @@ from ..api import NO_VALUE from ... import util - if typing.TYPE_CHECKING: import bmemcached import memcache diff --git a/dogpile/cache/backends/redis.py b/dogpile/cache/backends/redis.py index 68f84f5..01f266a 100644 --- a/dogpile/cache/backends/redis.py +++ b/dogpile/cache/backends/redis.py @@ -6,13 +6,29 @@ """ -import typing +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Dict +from typing import List +from typing import Mapping +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypedDict import warnings +from typing_extensions import NotRequired + from ..api import BytesBackend +from ..api import CacheMutex +from ..api import KeyType from ..api import NO_VALUE +from ..api import SerializedReturnType -if typing.TYPE_CHECKING: +if TYPE_CHECKING: import redis else: # delayed import @@ -21,6 +37,41 @@ __all__ = ("RedisBackend", "RedisSentinelBackend", "RedisClusterBackend") +class RedisKwargs(TypedDict): + """ + TypedDict of kwargs for `RedisBackend` and derived classes + .. versionadded:: 1.4.1 + """ + + url: NotRequired[str] + host: NotRequired[str] + username: NotRequired[Optional[str]] + password: NotRequired[Optional[str]] + port: NotRequired[int] + db: NotRequired[int] + redis_expiration_time: NotRequired[int] + distributed_lock: NotRequired[bool] + lock_timeout: NotRequired[int] + socket_timeout: NotRequired[float] + socket_connect_timeout: NotRequired[float] + socket_keepalive: NotRequired[bool] + socket_keepalive_options: NotRequired[Dict] + lock_sleep: NotRequired[int] + connection_pool: NotRequired["redis.ConnectionPool"] + thread_local_lock: NotRequired[bool] + connection_kwargs: NotRequired[Dict[str, Any]] + + +class RedisKwargs_Sentinel(RedisKwargs): + sentinels: List[Tuple[str, str]] + service_name: NotRequired[str] + sentinel_kwargs: NotRequired[Dict[str, Any]] + + +class RedisKwargs_Cluster(RedisKwargs): + startup_nodes: List["redis.cluster.ClusterNode"] + + class RedisBackend(BytesBackend): r"""A `Redis `_ backend, using the `redis-py `_ driver. @@ -114,12 +165,9 @@ class RedisBackend(BytesBackend): .. versionadded:: 1.1.6 - - - """ - def __init__(self, arguments): + def __init__(self, arguments: RedisKwargs): arguments = arguments.copy() self._imports() self.url = arguments.pop("url", None) @@ -152,12 +200,12 @@ def __init__(self, arguments): self.connection_pool = arguments.pop("connection_pool", None) self._create_client() - def _imports(self): + def _imports(self) -> None: # defer imports until backend is used global redis import redis # noqa - def _create_client(self): + def _create_client(self) -> None: if self.connection_pool is not None: # the connection pool already has all other connection # options present within, so here we disregard socket_timeout @@ -195,7 +243,7 @@ def _create_client(self): self.writer_client = redis.StrictRedis(**args) self.reader_client = self.writer_client - def get_mutex(self, key): + def get_mutex(self, key: KeyType) -> Optional[_RedisLockWrapper]: if self.distributed_lock: return _RedisLockWrapper( self.writer_client.lock( @@ -208,25 +256,27 @@ def get_mutex(self, key): else: return None - def get_serialized(self, key): + def get_serialized(self, key: KeyType) -> SerializedReturnType: value = self.reader_client.get(key) if value is None: return NO_VALUE - return value + return cast(SerializedReturnType, value) - def get_serialized_multi(self, keys): + def get_serialized_multi( + self, keys: Sequence[KeyType] + ) -> Sequence[SerializedReturnType]: if not keys: return [] values = self.reader_client.mget(keys) return [v if v is not None else NO_VALUE for v in values] - def set_serialized(self, key, value): + def set_serialized(self, key: KeyType, value: bytes) -> None: if self.redis_expiration_time: self.writer_client.setex(key, self.redis_expiration_time, value) else: self.writer_client.set(key, value) - def set_serialized_multi(self, mapping): + def set_serialized_multi(self, mapping: Mapping[KeyType, bytes]) -> None: if not self.redis_expiration_time: self.writer_client.mset(mapping) else: @@ -235,23 +285,23 @@ def set_serialized_multi(self, mapping): pipe.setex(key, self.redis_expiration_time, value) pipe.execute() - def delete(self, key): + def delete(self, key: KeyType) -> None: self.writer_client.delete(key) - def delete_multi(self, keys): + def delete_multi(self, keys: Sequence[KeyType]) -> None: self.writer_client.delete(*keys) -class _RedisLockWrapper: +class _RedisLockWrapper(CacheMutex): __slots__ = ("mutex", "__weakref__") - def __init__(self, mutex: typing.Any): + def __init__(self, mutex: Any): self.mutex = mutex - def acquire(self, wait: bool = True) -> typing.Any: + def acquire(self, wait: bool = True) -> Any: return self.mutex.acquire(blocking=wait) - def release(self) -> typing.Any: + def release(self) -> Any: return self.mutex.release() def locked(self) -> bool: @@ -356,7 +406,7 @@ class RedisSentinelBackend(RedisBackend): """ - def __init__(self, arguments): + def __init__(self, arguments: RedisKwargs_Sentinel): arguments = arguments.copy() self.sentinels = arguments.pop("sentinels", None) @@ -371,7 +421,7 @@ def __init__(self, arguments): } ) - def _imports(self): + def _imports(self) -> None: # defer imports until backend is used global redis import redis.sentinel # noqa @@ -545,17 +595,17 @@ class RedisClusterBackend(RedisBackend): """ - def __init__(self, arguments): + def __init__(self, arguments: RedisKwargs_Cluster): arguments = arguments.copy() self.startup_nodes = arguments.pop("startup_nodes", None) super().__init__(arguments) - def _imports(self): + def _imports(self) -> None: global redis import redis.cluster - def _create_client(self): - redis_cluster: redis.cluster.RedisCluster[typing.Any] + def _create_client(self) -> None: + redis_cluster: redis.cluster.RedisCluster[Any] if self.url is not None: redis_cluster = redis.cluster.RedisCluster.from_url( self.url, **self.connection_kwargs @@ -565,5 +615,5 @@ def _create_client(self): startup_nodes=self.startup_nodes, **self.connection_kwargs, ) - self.writer_client = typing.cast("redis.Redis[bytes]", redis_cluster) + self.writer_client = cast("redis.Redis[bytes]", redis_cluster) self.reader_client = self.writer_client diff --git a/tests/cache/test_redis_backend.py b/tests/cache/test_redis_backend.py index 01472a9..b2990f1 100644 --- a/tests/cache/test_redis_backend.py +++ b/tests/cache/test_redis_backend.py @@ -7,6 +7,7 @@ import pytest +from dogpile.cache.backends.redis import RedisKwargs from dogpile.cache.region import _backend_loader from dogpile.testing import eq_ from dogpile.testing.fixtures import _GenericBackendFixture @@ -147,7 +148,7 @@ def _test_helper(self, mock_obj, expected_args, connection_args=None): def test_connect_with_defaults(self, MockStrictRedis): # The defaults, used if keys are missing from the arguments dict. - arguments = { + arguments: RedisKwargs = { "host": "localhost", "port": 6379, "db": 0, @@ -157,7 +158,7 @@ def test_connect_with_defaults(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_basics(self, MockStrictRedis): - arguments = { + arguments: RedisKwargs = { "host": "127.0.0.1", "port": 6379, "db": 0, @@ -167,7 +168,7 @@ def test_connect_with_basics(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_password(self, MockStrictRedis): - arguments = { + arguments: RedisKwargs = { "host": "127.0.0.1", "password": "some password", "port": 6379, @@ -182,7 +183,7 @@ def test_connect_with_password(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_username_and_password(self, MockStrictRedis): - arguments = { + arguments: RedisKwargs = { "host": "127.0.0.1", "username": "redis", "password": "some password", @@ -192,7 +193,7 @@ def test_connect_with_username_and_password(self, MockStrictRedis): self._test_helper(MockStrictRedis, arguments) def test_connect_with_socket_timeout(self, MockStrictRedis): - arguments = { + arguments: RedisKwargs = { "host": "127.0.0.1", "port": 6379, "socket_timeout": 0.5, @@ -203,7 +204,7 @@ def test_connect_with_socket_timeout(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_socket_connect_timeout(self, MockStrictRedis): - arguments = { + arguments: RedisKwargs = { "host": "127.0.0.1", "port": 6379, "socket_timeout": 1.0, @@ -214,7 +215,7 @@ def test_connect_with_socket_connect_timeout(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_socket_keepalive(self, MockStrictRedis): - arguments = { + arguments: RedisKwargs = { "host": "127.0.0.1", "port": 6379, "socket_keepalive": True, @@ -225,7 +226,7 @@ def test_connect_with_socket_keepalive(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_socket_keepalive_options(self, MockStrictRedis): - arguments = { + arguments: RedisKwargs = { "host": "127.0.0.1", "port": 6379, "socket_keepalive": True, @@ -239,18 +240,23 @@ def test_connect_with_socket_keepalive_options(self, MockStrictRedis): def test_connect_with_connection_pool(self, MockStrictRedis): pool = Mock() - arguments = {"connection_pool": pool, "socket_timeout": 0.5} + arguments: RedisKwargs = { + "connection_pool": pool, + "socket_timeout": 0.5, + } expected_args = {"connection_pool": pool} self._test_helper( MockStrictRedis, expected_args, connection_args=arguments ) def test_connect_with_url(self, MockStrictRedis): - arguments = {"url": "redis://redis:password@127.0.0.1:6379/0"} + arguments: RedisKwargs = { + "url": "redis://redis:password@127.0.0.1:6379/0" + } self._test_helper(MockStrictRedis.from_url, arguments) def test_extra_arbitrary_args(self, MockStrictRedis): - arguments = { + arguments: RedisKwargs = { "url": "redis://redis:password@127.0.0.1:6379/0", "connection_kwargs": { "ssl": True, From 2d81ec14df0fc89d273a90c5defa53b73d1cb28c Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Mon, 4 Aug 2025 21:59:36 -0400 Subject: [PATCH 2/9] extend proxy docs with typing include missing NO_VALUE on docstrings Change-Id: I79c4657d5ea8778e2932bae3e81725af27bbef85 --- dogpile/cache/api.py | 6 ++++-- dogpile/cache/proxy.py | 10 +++++----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/dogpile/cache/api.py b/dogpile/cache/api.py index 6fbe842..f0c79dd 100644 --- a/dogpile/cache/api.py +++ b/dogpile/cache/api.py @@ -348,7 +348,8 @@ def get_serialized_multi( :meth:`.CacheRegion.get_multi` method, which will also be processed by the "key mangling" function if one was present. - :return: list of bytes objects + :return: list of bytes objects or the :data:`.NO_VALUE` contant + if not present. The default implementation of this method for :class:`.CacheBackend` returns the value of the :meth:`.CacheBackend.get_multi` method. @@ -543,7 +544,8 @@ def get_serialized_multi( :meth:`.CacheRegion.get_multi` method, which will also be processed by the "key mangling" function if one was present. - :return: list of bytes objects + :return: list of bytes objects or the :data:`.NO_VALUE` + constant if not present. .. versionadded:: 1.1 diff --git a/dogpile/cache/proxy.py b/dogpile/cache/proxy.py index 08f598e..6d096c7 100644 --- a/dogpile/cache/proxy.py +++ b/dogpile/cache/proxy.py @@ -34,24 +34,24 @@ class ProxyBackend(CacheBackend): from dogpile.cache.proxy import ProxyBackend class MyFirstProxy(ProxyBackend): - def get_serialized(self, key): + def get_serialized(self, key: KeyType) -> SerializedReturnType: # ... custom code goes here ... return self.proxied.get_serialized(key) - def get(self, key): + def get(self, key: KeyType) -> BackendFormatted: # ... custom code goes here ... return self.proxied.get(key) - def set(self, key, value): + def set(self, key: KeyType, value: BackendSetType) -> None: # ... custom code goes here ... self.proxied.set(key) class MySecondProxy(ProxyBackend): - def get_serialized(self, key): + def get_serialized(self, key: KeyType) -> SerializedReturnType: # ... custom code goes here ... return self.proxied.get_serialized(key) - def get(self, key): + def get(self, key: KeyType) -> BackendFormatted: # ... custom code goes here ... return self.proxied.get(key) From ca15a91a712d50c770ce5d6e19e5200cb2393f0b Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Mon, 4 Aug 2025 23:11:29 -0400 Subject: [PATCH 3/9] more typing Change-Id: I8cd5d4fbf7eabf03e48f5434e6c50dacd07dfd3c --- dogpile/cache/region.py | 50 +++++++++++++++++++++++++++++------------ dogpile/cache/util.py | 19 +++++++++------- 2 files changed, 47 insertions(+), 22 deletions(-) diff --git a/dogpile/cache/region.py b/dogpile/cache/region.py index a446e4c..75d7c72 100644 --- a/dogpile/cache/region.py +++ b/dogpile/cache/region.py @@ -12,6 +12,8 @@ from typing import Any from typing import Callable from typing import cast +from typing import Dict +from typing import List from typing import Mapping from typing import Optional from typing import Sequence @@ -25,6 +27,7 @@ from . import exception from .api import BackendArguments from .api import BackendFormatted +from .api import CacheBackend from .api import CachedValue from .api import CacheMutex from .api import CacheReturnType @@ -553,7 +556,7 @@ def wrap(self, proxy: Union[ProxyBackend, Type[ProxyBackend]]) -> None: self.backend = proxy_instance.wrap(self.backend) - def _mutex(self, key): + def _mutex(self, key: KeyType) -> Any: return self._lock_registry.get(key) class _LockWrapper(CacheMutex): @@ -571,7 +574,7 @@ def release(self): def locked(self): return self.lock.locked() - def _create_mutex(self, key): + def _create_mutex(self, key: KeyType) -> Optional[Any]: mutex = self.backend.get_mutex(key) if mutex is not None: return mutex @@ -579,10 +582,10 @@ def _create_mutex(self, key): return self._LockWrapper() # cached value - _actual_backend = None + _actual_backend: Optional[CacheBackend] = None @property - def actual_backend(self): + def actual_backend(self) -> CacheBackend: """Return the ultimate backend underneath any proxies. The backend might be the result of one or more ``proxy.wrap`` @@ -596,9 +599,11 @@ def actual_backend(self): while hasattr(_backend, "proxied"): _backend = _backend.proxied self._actual_backend = _backend + if TYPE_CHECKING: + assert self._actual_backend return self._actual_backend - def invalidate(self, hard=True): + def invalidate(self, hard: bool = True) -> None: """Invalidate this :class:`.CacheRegion`. The default invalidation system works by setting @@ -648,7 +653,11 @@ def invalidate(self, hard=True): """ self.region_invalidator.invalidate(hard) - def configure_from_config(self, config_dict, prefix): + def configure_from_config( + self, + config_dict: Dict[str, Any], + prefix: str, + ) -> Self: """Configure from a configuration dictionary and a prefix. @@ -680,20 +689,20 @@ def configure_from_config(self, config_dict, prefix): ), _config_argument_dict=config_dict, _config_prefix="%sarguments." % prefix, - wrap=config_dict.get("%swrap" % prefix, None), + wrap=config_dict.get("%swrap" % prefix, ()), replace_existing_backend=config_dict.get( "%sreplace_existing_backend" % prefix, False ), ) @memoized_property - def backend(self): + def backend(self) -> CacheBackend: raise exception.RegionNotConfigured( "No backend is configured on this region." ) @property - def is_configured(self): + def is_configured(self) -> bool: """Return True if the backend has been configured via the :meth:`.CacheRegion.configure` method already. @@ -823,7 +832,11 @@ def _get_cache_value( ) return value - def _unexpired_value_fn(self, expiration_time, ignore_expiration): + def _unexpired_value_fn( + self, + expiration_time: Optional[float], + ignore_expiration: bool = False, + ) -> Callable[[CacheReturnType], CacheReturnType]: if ignore_expiration: return lambda value: value else: @@ -849,7 +862,12 @@ def value_fn(value): return value_fn - def get_multi(self, keys, expiration_time=None, ignore_expiration=False): + def get_multi( + self, + keys: Sequence[KeyType], + expiration_time: Optional[float] = None, + ignore_expiration: bool = False, + ) -> List[Union[ValuePayload, NoValueType]]: """Return multiple values from the cache, based on the given keys. Returns values as a list matching the keys given. @@ -900,7 +918,7 @@ def get_multi(self, keys, expiration_time=None, ignore_expiration=False): ] @contextlib.contextmanager - def _log_time(self, keys): + def _log_time(self, keys: Sequence[KeyType]): start_time = time.time() yield seconds = time.time() - start_time @@ -910,7 +928,11 @@ def _log_time(self, keys): {"seconds": seconds, "keys": repr_obj(keys)}, ) - def _is_cache_miss(self, value, orig_key): + def _is_cache_miss( + self, + value: CacheReturnType, + orig_key: KeyType, + ) -> bool: if value is NO_VALUE: log.debug("No value present for key: %r", orig_key) elif value.metadata["v"] != value_version: @@ -1152,7 +1174,7 @@ def get_or_create_multi( """ - def get_value(key): + def get_value(key: KeyType) -> Tuple[Any, Union[float, int]]: value = values.get(key, NO_VALUE) if self._is_cache_miss(value, orig_key): diff --git a/dogpile/cache/util.py b/dogpile/cache/util.py index 7fddaa5..c1ca8ba 100644 --- a/dogpile/cache/util.py +++ b/dogpile/cache/util.py @@ -1,10 +1,12 @@ from hashlib import sha1 +from typing import Callable +from .api import KeyType from ..util import compat from ..util import langhelpers -def function_key_generator(namespace, fn, to_str=str): +def function_key_generator(namespace: str, fn, to_str=str) -> Callable: """Return a function that generates a string key, based on a given function as well as arguments to the returned function itself. @@ -45,7 +47,7 @@ def generate_key(*args, **kw): return generate_key -def function_multi_key_generator(namespace, fn, to_str=str): +def function_multi_key_generator(namespace: str, fn, to_str=str) -> Callable: if namespace is None: namespace = "%s:%s" % (fn.__module__, fn.__name__) else: @@ -67,7 +69,7 @@ def generate_keys(*args, **kw): return generate_keys -def kwarg_function_key_generator(namespace, fn, to_str=str): +def kwarg_function_key_generator(namespace: str, fn, to_str=str) -> Callable: """Return a function that generates a string key, based on a given function as well as arguments to the returned function itself. @@ -127,16 +129,17 @@ def generate_key(*args, **kwargs): return generate_key -def sha1_mangle_key(key): +def sha1_mangle_key(key: KeyType) -> str: """a SHA1 key mangler.""" - if isinstance(key, str): - key = key.encode("utf-8") + bkey = key.encode("utf-8") if isinstance(key, str) else key - return sha1(key).hexdigest() + return sha1(bkey).hexdigest() -def length_conditional_mangler(length, mangler): +def length_conditional_mangler( + length: int, mangler: Callable[[KeyType], str] +) -> Callable[[KeyType], str]: """a key mangler that mangles if the length of the key is past a certain threshold. From 1821cb91da5782cb9beb3c54e06188b089324e72 Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Tue, 5 Aug 2025 09:53:04 -0400 Subject: [PATCH 4/9] remove CacheMutex as base class Change-Id: Ib1c0a38f072676f33254a91312e7237c129850f2 --- dogpile/cache/backends/redis.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dogpile/cache/backends/redis.py b/dogpile/cache/backends/redis.py index 01f266a..79c152c 100644 --- a/dogpile/cache/backends/redis.py +++ b/dogpile/cache/backends/redis.py @@ -23,7 +23,6 @@ from typing_extensions import NotRequired from ..api import BytesBackend -from ..api import CacheMutex from ..api import KeyType from ..api import NO_VALUE from ..api import SerializedReturnType @@ -292,7 +291,7 @@ def delete_multi(self, keys: Sequence[KeyType]) -> None: self.writer_client.delete(*keys) -class _RedisLockWrapper(CacheMutex): +class _RedisLockWrapper: __slots__ = ("mutex", "__weakref__") def __init__(self, mutex: Any): From 23e69edcfe2fcb049d89b505ef7af45cb04b1073 Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Sun, 10 Aug 2025 16:43:33 -0400 Subject: [PATCH 5/9] * swtich TypedDicts to use total=False instead of NotRequired * applied typing to other backends * standardized all backends to operate on a copy of the Dict passed in Some backends did this, others did not. Change-Id: I90084c6d0ad23d7bb9988664c2cef1a739fa3ff1 --- dogpile/cache/backends/file.py | 23 ++++- dogpile/cache/backends/memcached.py | 138 ++++++++++++++++++++-------- dogpile/cache/backends/memory.py | 15 ++- dogpile/cache/backends/null.py | 8 +- dogpile/cache/backends/redis.py | 108 +++++++++++----------- dogpile/cache/backends/valkey.py | 109 +++++++++++++++------- 6 files changed, 263 insertions(+), 138 deletions(-) diff --git a/dogpile/cache/backends/file.py b/dogpile/cache/backends/file.py index 06047ae..1d70e5c 100644 --- a/dogpile/cache/backends/file.py +++ b/dogpile/cache/backends/file.py @@ -6,10 +6,15 @@ """ +from __future__ import annotations + from contextlib import contextmanager import dbm import os import threading +from typing import Literal +from typing import TypedDict +from typing import Union from ..api import BytesBackend from ..api import NO_VALUE @@ -18,6 +23,13 @@ __all__ = ["DBMBackend", "FileLock", "AbstractFileLock"] +class DBMBackendArguments(TypedDict, total=False): + filename: str + lock_factory: "AbstractFileLock" + rw_lockfile: Union[str, Literal[False], None] + dogpile_lockfile: Union[str, Literal[False], None] + + class DBMBackend(BytesBackend): """A file-backend using a dbm file to store keys. @@ -137,18 +149,19 @@ def release_write_lock(self): """ - def __init__(self, arguments): + def __init__(self, arguments: DBMBackendArguments): + _arguments = arguments.copy() self.filename = os.path.abspath( - os.path.normpath(arguments["filename"]) + os.path.normpath(_arguments["filename"]) ) dir_, filename = os.path.split(self.filename) - self.lock_factory = arguments.get("lock_factory", FileLock) + self.lock_factory = _arguments.get("lock_factory", FileLock) self._rw_lock = self._init_lock( - arguments.get("rw_lockfile"), ".rw.lock", dir_, filename + _arguments.get("rw_lockfile"), ".rw.lock", dir_, filename ) self._dogpile_lock = self._init_lock( - arguments.get("dogpile_lockfile"), + _arguments.get("dogpile_lockfile"), ".dogpile.lock", dir_, filename, diff --git a/dogpile/cache/backends/memcached.py b/dogpile/cache/backends/memcached.py index 1efe366..39c53a4 100644 --- a/dogpile/cache/backends/memcached.py +++ b/dogpile/cache/backends/memcached.py @@ -6,12 +6,18 @@ """ +from __future__ import annotations + import random import threading import time import typing from typing import Any from typing import Mapping +from typing import Optional +from typing import Sequence +from typing import TypedDict +from typing import Union import warnings from ..api import CacheBackend @@ -19,6 +25,8 @@ from ... import util if typing.TYPE_CHECKING: + import ssl + import bmemcached import memcache import pylibmc @@ -40,6 +48,50 @@ ) +class GenericMemcachedBackendArguments(TypedDict, total=False): + url: str + distributed_lock: bool + lock_timeout: int + + +class MemcachedArgsArguments(GenericMemcachedBackendArguments, total=False): + min_compress_len: int + memcached_expire_time: int + + +class MemcachedBackendArguments(GenericMemcachedBackendArguments, total=False): + min_compress_len: int + memcached_expire_time: int + dead_retry: int + socket_timeout: int + + +class BMemcachedBackendArguments( + GenericMemcachedBackendArguments, total=False +): + username: Optional[str] + password: Optional[bool] + tls_context: Optional["ssl.SSLContext"] + + +class PyMemcacheBackendArguments( + GenericMemcachedBackendArguments, total=False +): + serde: Optional[Any] + default_noreply: bool + tls_context: Optional["ssl.SSLContext"] + socket_keepalive: "pymemcache.client.base.KeepaliveOpts" + enable_retry_client: bool + retry_attempts: Optional[int] + retry_delay: Union[int, float, None] + retry_for: Optional[Sequence[Exception]] + do_not_retry_for: Optional[Sequence[Exception]] + hashclient_retry_attempts: int + hashclient_retry_timeout: int + dead_timeout: int + memcached_expire_time: int + + class MemcachedLock: """Simple distributed lock using memcached.""" @@ -116,16 +168,17 @@ class GenericMemcachedBackend(CacheBackend): serializer = None deserializer = None - def __init__(self, arguments): + def __init__(self, arguments: GenericMemcachedBackendArguments): + _arguments = arguments.copy() self._imports() # using a plain threading.local here. threading.local # automatically deletes the __dict__ when a thread ends, # so the idea is that this is superior to pylibmc's # own ThreadMappedPool which doesn't handle this # automatically. - self.url = util.to_list(arguments["url"]) - self.distributed_lock = arguments.get("distributed_lock", False) - self.lock_timeout = arguments.get("lock_timeout", 0) + self.url = util.to_list(_arguments["url"]) + self.distributed_lock = _arguments.get("distributed_lock", False) + self.lock_timeout = _arguments.get("lock_timeout", 0) def has_lock_timeout(self): return self.lock_timeout != 0 @@ -225,17 +278,18 @@ class MemcacheArgs(GenericMemcachedBackend): of the value using the compressor """ - def __init__(self, arguments): - self.min_compress_len = arguments.get("min_compress_len", 0) + def __init__(self, arguments: MemcachedArgsArguments): + _arguments = arguments.copy() + self.min_compress_len = _arguments.get("min_compress_len", 0) self.set_arguments = {} - if "memcached_expire_time" in arguments: - self.set_arguments["time"] = arguments["memcached_expire_time"] - if "min_compress_len" in arguments: - self.set_arguments["min_compress_len"] = arguments[ + if "memcached_expire_time" in _arguments: + self.set_arguments["time"] = _arguments["memcached_expire_time"] + if "min_compress_len" in _arguments: + self.set_arguments["min_compress_len"] = _arguments[ "min_compress_len" ] - super(MemcacheArgs, self).__init__(arguments) + super(MemcacheArgs, self).__init__(_arguments) class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend): @@ -273,9 +327,10 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend): """ def __init__(self, arguments): - self.binary = arguments.get("binary", False) - self.behaviors = arguments.get("behaviors", {}) - super(PylibmcBackend, self).__init__(arguments) + _arguments = arguments.copy() + self.binary = _arguments.get("binary", False) + self.behaviors = _arguments.get("behaviors", {}) + super(PylibmcBackend, self).__init__(_arguments) def _imports(self): global pylibmc @@ -323,10 +378,11 @@ class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend): """ - def __init__(self, arguments): - self.dead_retry = arguments.get("dead_retry", 30) - self.socket_timeout = arguments.get("socket_timeout", 3) - super(MemcachedBackend, self).__init__(arguments) + def __init__(self, arguments: MemcachedBackendArguments): + _arguments = arguments.copy() + self.dead_retry = _arguments.get("dead_retry", 30) + self.socket_timeout = _arguments.get("socket_timeout", 3) + super(MemcachedBackend, self).__init__(_arguments) def _imports(self): global memcache @@ -399,11 +455,12 @@ class BMemcachedBackend(GenericMemcachedBackend): """ - def __init__(self, arguments): - self.username = arguments.get("username", None) - self.password = arguments.get("password", None) - self.tls_context = arguments.get("tls_context", None) - super(BMemcachedBackend, self).__init__(arguments) + def __init__(self, arguments: BMemcachedBackendArguments): + _arguments = arguments.copy() + self.username = _arguments.get("username", None) + self.password = _arguments.get("password", None) + self.tls_context = _arguments.get("tls_context", None) + super(BMemcachedBackend, self).__init__(_arguments) def _imports(self): global bmemcached @@ -588,25 +645,26 @@ class PyMemcacheBackend(GenericMemcachedBackend): """ # noqa E501 - def __init__(self, arguments): - super().__init__(arguments) - - self.serde = arguments.get("serde", pymemcache.serde.pickle_serde) - self.default_noreply = arguments.get("default_noreply", False) - self.tls_context = arguments.get("tls_context", None) - self.socket_keepalive = arguments.get("socket_keepalive", None) - self.enable_retry_client = arguments.get("enable_retry_client", False) - self.retry_attempts = arguments.get("retry_attempts", None) - self.retry_delay = arguments.get("retry_delay", None) - self.retry_for = arguments.get("retry_for", None) - self.do_not_retry_for = arguments.get("do_not_retry_for", None) - self.hashclient_retry_attempts = arguments.get( + def __init__(self, arguments: PyMemcacheBackendArguments): + _arguments = arguments.copy() + super().__init__(_arguments) + + self.serde = _arguments.get("serde", pymemcache.serde.pickle_serde) + self.default_noreply = _arguments.get("default_noreply", False) + self.tls_context = _arguments.get("tls_context", None) + self.socket_keepalive = _arguments.get("socket_keepalive", None) + self.enable_retry_client = _arguments.get("enable_retry_client", False) + self.retry_attempts = _arguments.get("retry_attempts", None) + self.retry_delay = _arguments.get("retry_delay", None) + self.retry_for = _arguments.get("retry_for", None) + self.do_not_retry_for = _arguments.get("do_not_retry_for", None) + self.hashclient_retry_attempts = _arguments.get( "hashclient_retry_attempts", 2 ) - self.hashclient_retry_timeout = arguments.get( + self.hashclient_retry_timeout = _arguments.get( "hashclient_retry_timeout", 1 ) - self.dead_timeout = arguments.get("hashclient_dead_timeout", 60) + self.dead_timeout = _arguments.get("hashclient_dead_timeout", 60) if ( self.retry_delay is not None or self.retry_attempts is not None @@ -618,8 +676,8 @@ def __init__(self, arguments): "will be ignored" ) self.set_arguments = {} - if "memcached_expire_time" in arguments: - self.set_arguments["expire"] = arguments["memcached_expire_time"] + if "memcached_expire_time" in _arguments: + self.set_arguments["expire"] = _arguments["memcached_expire_time"] def _imports(self): global pymemcache diff --git a/dogpile/cache/backends/memory.py b/dogpile/cache/backends/memory.py index f62c9d4..f5f3bda 100644 --- a/dogpile/cache/backends/memory.py +++ b/dogpile/cache/backends/memory.py @@ -10,11 +10,21 @@ """ +from __future__ import annotations + +from typing import Any +from typing import Dict +from typing import TypedDict + from ..api import CacheBackend from ..api import DefaultSerialization from ..api import NO_VALUE +class MemoryBackendArguments(TypedDict): + cache_dict: Dict[Any, Any] + + class MemoryBackend(CacheBackend): """A backend that uses a plain dictionary. @@ -49,8 +59,9 @@ class MemoryBackend(CacheBackend): """ - def __init__(self, arguments): - self._cache = arguments.pop("cache_dict", {}) + def __init__(self, arguments: MemoryBackendArguments): + _arguments = arguments.copy() + self._cache = _arguments.pop("cache_dict", {}) def get(self, key): return self._cache.get(key, NO_VALUE) diff --git a/dogpile/cache/backends/null.py b/dogpile/cache/backends/null.py index b4ad0fb..566aae0 100644 --- a/dogpile/cache/backends/null.py +++ b/dogpile/cache/backends/null.py @@ -10,6 +10,11 @@ """ +from __future__ import annotations + +from typing import Any +from typing import Dict + from ..api import CacheBackend from ..api import NO_VALUE @@ -41,7 +46,8 @@ class NullBackend(CacheBackend): """ - def __init__(self, arguments): + def __init__(self, arguments: Dict[str, Any]): + # _arguments = arguments.copy() pass def get_mutex(self, key): diff --git a/dogpile/cache/backends/redis.py b/dogpile/cache/backends/redis.py index 79c152c..49992b8 100644 --- a/dogpile/cache/backends/redis.py +++ b/dogpile/cache/backends/redis.py @@ -20,8 +20,6 @@ from typing import TypedDict import warnings -from typing_extensions import NotRequired - from ..api import BytesBackend from ..api import KeyType from ..api import NO_VALUE @@ -36,38 +34,38 @@ __all__ = ("RedisBackend", "RedisSentinelBackend", "RedisClusterBackend") -class RedisKwargs(TypedDict): +class RedisBackendKwargs(TypedDict, total=False): """ TypedDict of kwargs for `RedisBackend` and derived classes .. versionadded:: 1.4.1 """ - url: NotRequired[str] - host: NotRequired[str] - username: NotRequired[Optional[str]] - password: NotRequired[Optional[str]] - port: NotRequired[int] - db: NotRequired[int] - redis_expiration_time: NotRequired[int] - distributed_lock: NotRequired[bool] - lock_timeout: NotRequired[int] - socket_timeout: NotRequired[float] - socket_connect_timeout: NotRequired[float] - socket_keepalive: NotRequired[bool] - socket_keepalive_options: NotRequired[Dict] - lock_sleep: NotRequired[int] - connection_pool: NotRequired["redis.ConnectionPool"] - thread_local_lock: NotRequired[bool] - connection_kwargs: NotRequired[Dict[str, Any]] - - -class RedisKwargs_Sentinel(RedisKwargs): + url: Optional[str] + host: str + username: Optional[str] + password: Optional[str] + port: int + db: int + redis_expiration_time: int + distributed_lock: bool + lock_timeout: int + socket_timeout: Optional[float] + socket_connect_timeout: Optional[float] + socket_keepalive: bool + socket_keepalive_options: Optional[Dict[str, Any]] + lock_sleep: float + connection_pool: Optional["redis.ConnectionPool"] + thread_local_lock: bool + connection_kwargs: Dict[str, Any] + + +class RedisSentinelBackendKwargs(RedisBackendKwargs): sentinels: List[Tuple[str, str]] - service_name: NotRequired[str] - sentinel_kwargs: NotRequired[Dict[str, Any]] + service_name: str + sentinel_kwargs: Dict[str, Any] -class RedisKwargs_Cluster(RedisKwargs): +class RedisClusterBackendKwargs(RedisBackendKwargs): startup_nodes: List["redis.cluster.ClusterNode"] @@ -166,28 +164,28 @@ class RedisBackend(BytesBackend): """ - def __init__(self, arguments: RedisKwargs): - arguments = arguments.copy() + def __init__(self, arguments: RedisBackendKwargs): + _arguments = arguments.copy() self._imports() - self.url = arguments.pop("url", None) - self.host = arguments.pop("host", "localhost") - self.username = arguments.pop("username", None) - self.password = arguments.pop("password", None) - self.port = arguments.pop("port", 6379) - self.db = arguments.pop("db", 0) - self.distributed_lock = arguments.pop("distributed_lock", False) - self.socket_timeout = arguments.pop("socket_timeout", None) - self.socket_connect_timeout = arguments.pop( + self.url = _arguments.pop("url", None) + self.host = _arguments.pop("host", "localhost") + self.username = _arguments.pop("username", None) + self.password = _arguments.pop("password", None) + self.port = _arguments.pop("port", 6379) + self.db = _arguments.pop("db", 0) + self.distributed_lock = _arguments.pop("distributed_lock", False) + self.socket_timeout = _arguments.pop("socket_timeout", None) + self.socket_connect_timeout = _arguments.pop( "socket_connect_timeout", None ) - self.socket_keepalive = arguments.pop("socket_keepalive", False) - self.socket_keepalive_options = arguments.pop( + self.socket_keepalive = _arguments.pop("socket_keepalive", False) + self.socket_keepalive_options = _arguments.pop( "socket_keepalive_options", None ) - self.lock_timeout = arguments.pop("lock_timeout", None) - self.lock_sleep = arguments.pop("lock_sleep", 0.1) - self.thread_local_lock = arguments.pop("thread_local_lock", True) - self.connection_kwargs = arguments.pop("connection_kwargs", {}) + self.lock_timeout = _arguments.pop("lock_timeout", None) + self.lock_sleep = _arguments.pop("lock_sleep", 0.1) + self.thread_local_lock = _arguments.pop("thread_local_lock", True) + self.connection_kwargs = _arguments.pop("connection_kwargs", {}) if self.distributed_lock and self.thread_local_lock: warnings.warn( @@ -195,8 +193,8 @@ def __init__(self, arguments: RedisKwargs): "set to False when distributed_lock is True" ) - self.redis_expiration_time = arguments.pop("redis_expiration_time", 0) - self.connection_pool = arguments.pop("connection_pool", None) + self.redis_expiration_time = _arguments.pop("redis_expiration_time", 0) + self.connection_pool = _arguments.pop("connection_pool", None) self._create_client() def _imports(self) -> None: @@ -405,18 +403,18 @@ class RedisSentinelBackend(RedisBackend): """ - def __init__(self, arguments: RedisKwargs_Sentinel): - arguments = arguments.copy() + def __init__(self, arguments: RedisSentinelBackendKwargs): + _arguments = arguments.copy() - self.sentinels = arguments.pop("sentinels", None) - self.service_name = arguments.pop("service_name", "mymaster") - self.sentinel_kwargs = arguments.pop("sentinel_kwargs", {}) + self.sentinels = _arguments.pop("sentinels", None) + self.service_name = _arguments.pop("service_name", "mymaster") + self.sentinel_kwargs = _arguments.pop("sentinel_kwargs", {}) super().__init__( arguments={ "distributed_lock": True, "thread_local_lock": False, - **arguments, + **_arguments, } ) @@ -594,10 +592,10 @@ class RedisClusterBackend(RedisBackend): """ - def __init__(self, arguments: RedisKwargs_Cluster): - arguments = arguments.copy() - self.startup_nodes = arguments.pop("startup_nodes", None) - super().__init__(arguments) + def __init__(self, arguments: RedisClusterBackendKwargs): + _arguments = arguments.copy() + self.startup_nodes = _arguments.pop("startup_nodes", None) + super().__init__(_arguments) def _imports(self) -> None: global redis diff --git a/dogpile/cache/backends/valkey.py b/dogpile/cache/backends/valkey.py index 8604173..a3d6d8a 100644 --- a/dogpile/cache/backends/valkey.py +++ b/dogpile/cache/backends/valkey.py @@ -6,13 +6,22 @@ """ -import typing +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypedDict import warnings from ..api import BytesBackend from ..api import NO_VALUE -if typing.TYPE_CHECKING: +if TYPE_CHECKING: import valkey else: # delayed import @@ -21,6 +30,36 @@ __all__ = ("ValkeyBackend", "ValkeySentinelBackend", "ValkeyClusterBackend") +class ValkeyBackendArguments(TypedDict, total=False): + url: Optional[str] + host: str + username: Optional[str] + password: Optional[str] + port: int + db: int + valkey_expiration_time: int + distributed_lock: bool + lock_timeout: Optional[int] + socket_timeout: Optional[float] + socket_connect_timeout: Optional[float] + socket_keepalive: bool + socket_keepalive_options: Optional[Dict[str, Any]] + lock_sleep: Optional[int] + thread_local_lock: bool + connection_kwargs: Dict[str, Any] + connection_pool: Optional["valkey.ConnectionPool"] + + +class ValkeySentinelBackendArguments(ValkeyBackendArguments): + sentinels: Optional[List[Tuple[str, int]]] + service_name: str + sentinel_kwargs: Dict[str, Any] + + +class ValkeyClusterBackendBackendArguments(ValkeyBackendArguments): + startup_nodes: List["valkey.cluster.ClusterNode"] + + class ValkeyBackend(BytesBackend): r"""A `Valkey `_ backend, using the `valkey-py `_ driver. @@ -110,27 +149,27 @@ class ValkeyBackend(BytesBackend): """ def __init__(self, arguments): - arguments = arguments.copy() + _arguments = arguments.copy() self._imports() - self.url = arguments.pop("url", None) - self.host = arguments.pop("host", "localhost") - self.username = arguments.pop("username", None) - self.password = arguments.pop("password", None) - self.port = arguments.pop("port", 6379) - self.db = arguments.pop("db", 0) - self.distributed_lock = arguments.pop("distributed_lock", False) - self.socket_timeout = arguments.pop("socket_timeout", None) - self.socket_connect_timeout = arguments.pop( + self.url = _arguments.pop("url", None) + self.host = _arguments.pop("host", "localhost") + self.username = _arguments.pop("username", None) + self.password = _arguments.pop("password", None) + self.port = _arguments.pop("port", 6379) + self.db = _arguments.pop("db", 0) + self.distributed_lock = _arguments.pop("distributed_lock", False) + self.socket_timeout = _arguments.pop("socket_timeout", None) + self.socket_connect_timeout = _arguments.pop( "socket_connect_timeout", None ) - self.socket_keepalive = arguments.pop("socket_keepalive", False) - self.socket_keepalive_options = arguments.pop( + self.socket_keepalive = _arguments.pop("socket_keepalive", False) + self.socket_keepalive_options = _arguments.pop( "socket_keepalive_options", None ) - self.lock_timeout = arguments.pop("lock_timeout", None) - self.lock_sleep = arguments.pop("lock_sleep", 0.1) - self.thread_local_lock = arguments.pop("thread_local_lock", True) - self.connection_kwargs = arguments.pop("connection_kwargs", {}) + self.lock_timeout = _arguments.pop("lock_timeout", None) + self.lock_sleep = _arguments.pop("lock_sleep", 0.1) + self.thread_local_lock = _arguments.pop("thread_local_lock", True) + self.connection_kwargs = _arguments.pop("connection_kwargs", {}) if self.distributed_lock and self.thread_local_lock: warnings.warn( @@ -138,10 +177,10 @@ def __init__(self, arguments): "set to False when distributed_lock is True" ) - self.valkey_expiration_time = arguments.pop( + self.valkey_expiration_time = _arguments.pop( "valkey_expiration_time", 0 ) - self.connection_pool = arguments.pop("connection_pool", None) + self.connection_pool = _arguments.pop("connection_pool", None) self._create_client() def _imports(self): @@ -237,13 +276,13 @@ def delete_multi(self, keys): class _ValkeyLockWrapper: __slots__ = ("mutex", "__weakref__") - def __init__(self, mutex: typing.Any): + def __init__(self, mutex: Any): self.mutex = mutex - def acquire(self, wait: bool = True) -> typing.Any: + def acquire(self, wait: bool = True) -> Any: return self.mutex.acquire(blocking=wait) - def release(self) -> typing.Any: + def release(self) -> Any: return self.mutex.release() def locked(self) -> bool: @@ -348,18 +387,18 @@ class ValkeySentinelBackend(ValkeyBackend): """ - def __init__(self, arguments): - arguments = arguments.copy() + def __init__(self, arguments: ValkeySentinelBackendArguments): + _arguments = arguments.copy() - self.sentinels = arguments.pop("sentinels", None) - self.service_name = arguments.pop("service_name", "mymaster") - self.sentinel_kwargs = arguments.pop("sentinel_kwargs", {}) + self.sentinels = _arguments.pop("sentinels", None) + self.service_name = _arguments.pop("service_name", "mymaster") + self.sentinel_kwargs = _arguments.pop("sentinel_kwargs", {}) super().__init__( arguments={ "distributed_lock": True, "thread_local_lock": False, - **arguments, + **_arguments, } ) @@ -537,17 +576,17 @@ class ValkeyClusterBackend(ValkeyBackend): """ # noqa: E501 - def __init__(self, arguments): - arguments = arguments.copy() - self.startup_nodes = arguments.pop("startup_nodes", None) - super().__init__(arguments) + def __init__(self, arguments: ValkeyClusterBackendBackendArguments): + _arguments = arguments.copy() + self.startup_nodes = _arguments.pop("startup_nodes", None) + super().__init__(_arguments) def _imports(self): global valkey import valkey.cluster def _create_client(self): - valkey_cluster: valkey.cluster.ValkeyCluster[typing.Any] # type: ignore # noqa: E501 + valkey_cluster: valkey.cluster.ValkeyCluster[Any] # type: ignore # noqa: E501 if self.url is not None: valkey_cluster = valkey.cluster.ValkeyCluster.from_url( self.url, **self.connection_kwargs @@ -557,5 +596,5 @@ def _create_client(self): startup_nodes=self.startup_nodes, **self.connection_kwargs, ) - self.writer_client = typing.cast(valkey.Valkey[bytes], valkey_cluster) # type: ignore # noqa: E501 + self.writer_client = cast(valkey.Valkey[bytes], valkey_cluster) # type: ignore # noqa: E501 self.reader_client = self.writer_client From 9e4f016dfaf2609ea3555721e4120241e19d92e6 Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Mon, 11 Aug 2025 12:20:57 -0400 Subject: [PATCH 6/9] more standardization and typing Change-Id: I4b912cfdb7fdae970bd95d418e059bb9965b5832 --- dogpile/cache/backends/file.py | 4 ++- dogpile/cache/backends/memcached.py | 39 ++++++++++++++++----------- dogpile/cache/backends/memory.py | 3 ++- dogpile/cache/backends/null.py | 2 +- dogpile/cache/backends/redis.py | 14 +++++----- dogpile/cache/backends/valkey.py | 20 +++++++------- dogpile/cache/util.py | 19 ++++++++++--- dogpile/testing/fixtures.py | 6 ++++- tests/cache/test_memcached_backend.py | 30 +++++++++++++++------ tests/cache/test_redis_backend.py | 30 ++++++++++++--------- tests/cache/test_region.py | 2 +- tests/cache/test_valkey_backend.py | 6 +++++ 12 files changed, 116 insertions(+), 59 deletions(-) diff --git a/dogpile/cache/backends/file.py b/dogpile/cache/backends/file.py index 1d70e5c..6b9b487 100644 --- a/dogpile/cache/backends/file.py +++ b/dogpile/cache/backends/file.py @@ -12,6 +12,8 @@ import dbm import os import threading +from typing import cast +from typing import Dict from typing import Literal from typing import TypedDict from typing import Union @@ -150,7 +152,7 @@ def release_write_lock(self): """ def __init__(self, arguments: DBMBackendArguments): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self.filename = os.path.abspath( os.path.normpath(_arguments["filename"]) ) diff --git a/dogpile/cache/backends/memcached.py b/dogpile/cache/backends/memcached.py index 39c53a4..a88212a 100644 --- a/dogpile/cache/backends/memcached.py +++ b/dogpile/cache/backends/memcached.py @@ -13,6 +13,8 @@ import time import typing from typing import Any +from typing import cast +from typing import Dict from typing import Mapping from typing import Optional from typing import Sequence @@ -88,7 +90,7 @@ class PyMemcacheBackendArguments( do_not_retry_for: Optional[Sequence[Exception]] hashclient_retry_attempts: int hashclient_retry_timeout: int - dead_timeout: int + hashclient_dead_timeout: int memcached_expire_time: int @@ -169,7 +171,7 @@ class GenericMemcachedBackend(CacheBackend): deserializer = None def __init__(self, arguments: GenericMemcachedBackendArguments): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self._imports() # using a plain threading.local here. threading.local # automatically deletes the __dict__ when a thread ends, @@ -279,7 +281,7 @@ class MemcacheArgs(GenericMemcachedBackend): """ def __init__(self, arguments: MemcachedArgsArguments): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self.min_compress_len = _arguments.get("min_compress_len", 0) self.set_arguments = {} @@ -289,7 +291,8 @@ def __init__(self, arguments: MemcachedArgsArguments): self.set_arguments["min_compress_len"] = _arguments[ "min_compress_len" ] - super(MemcacheArgs, self).__init__(_arguments) + _arguments_super = cast(GenericMemcachedBackendArguments, _arguments) + super(MemcacheArgs, self).__init__(_arguments_super) class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend): @@ -327,10 +330,11 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend): """ def __init__(self, arguments): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self.binary = _arguments.get("binary", False) self.behaviors = _arguments.get("behaviors", {}) - super(PylibmcBackend, self).__init__(_arguments) + _arguments_super = cast(MemcachedArgsArguments, _arguments) + super(PylibmcBackend, self).__init__(_arguments_super) def _imports(self): global pylibmc @@ -379,10 +383,11 @@ class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend): """ def __init__(self, arguments: MemcachedBackendArguments): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self.dead_retry = _arguments.get("dead_retry", 30) self.socket_timeout = _arguments.get("socket_timeout", 3) - super(MemcachedBackend, self).__init__(_arguments) + _arguments_super = cast(MemcachedArgsArguments, _arguments) + super(MemcachedBackend, self).__init__(_arguments_super) def _imports(self): global memcache @@ -456,11 +461,12 @@ class BMemcachedBackend(GenericMemcachedBackend): """ def __init__(self, arguments: BMemcachedBackendArguments): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self.username = _arguments.get("username", None) self.password = _arguments.get("password", None) self.tls_context = _arguments.get("tls_context", None) - super(BMemcachedBackend, self).__init__(_arguments) + _arguments_super = cast(GenericMemcachedBackendArguments, _arguments) + super(BMemcachedBackend, self).__init__(_arguments_super) def _imports(self): global bmemcached @@ -616,7 +622,7 @@ class PyMemcacheBackend(GenericMemcachedBackend): .. versionadded:: 1.1.5 - :param dead_timeout: Time in seconds before attempting to add a node + :param hashclient_dead_timeout: Time in seconds before attempting to add a node back in the pool in the HashClient's internal mechanisms. .. versionadded:: 1.1.5 @@ -646,8 +652,9 @@ class PyMemcacheBackend(GenericMemcachedBackend): """ # noqa E501 def __init__(self, arguments: PyMemcacheBackendArguments): - _arguments = arguments.copy() - super().__init__(_arguments) + _arguments = cast(Dict, arguments.copy()) + _arguments_super = cast(GenericMemcachedBackendArguments, _arguments) + super().__init__(_arguments_super) self.serde = _arguments.get("serde", pymemcache.serde.pickle_serde) self.default_noreply = _arguments.get("default_noreply", False) @@ -664,7 +671,9 @@ def __init__(self, arguments: PyMemcacheBackendArguments): self.hashclient_retry_timeout = _arguments.get( "hashclient_retry_timeout", 1 ) - self.dead_timeout = _arguments.get("hashclient_dead_timeout", 60) + self.hashclient_dead_timeout = _arguments.get( + "hashclient_dead_timeout", 60 + ) if ( self.retry_delay is not None or self.retry_attempts is not None @@ -690,7 +699,7 @@ def _create_client(self): "tls_context": self.tls_context, "retry_attempts": self.hashclient_retry_attempts, "retry_timeout": self.hashclient_retry_timeout, - "dead_timeout": self.dead_timeout, + "dead_timeout": self.hashclient_dead_timeout, } if self.socket_keepalive is not None: _kwargs.update({"socket_keepalive": self.socket_keepalive}) diff --git a/dogpile/cache/backends/memory.py b/dogpile/cache/backends/memory.py index f5f3bda..a5ca635 100644 --- a/dogpile/cache/backends/memory.py +++ b/dogpile/cache/backends/memory.py @@ -13,6 +13,7 @@ from __future__ import annotations from typing import Any +from typing import cast from typing import Dict from typing import TypedDict @@ -60,7 +61,7 @@ class MemoryBackend(CacheBackend): """ def __init__(self, arguments: MemoryBackendArguments): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self._cache = _arguments.pop("cache_dict", {}) def get(self, key): diff --git a/dogpile/cache/backends/null.py b/dogpile/cache/backends/null.py index 566aae0..b84e12e 100644 --- a/dogpile/cache/backends/null.py +++ b/dogpile/cache/backends/null.py @@ -47,7 +47,7 @@ class NullBackend(CacheBackend): """ def __init__(self, arguments: Dict[str, Any]): - # _arguments = arguments.copy() + # _arguments = typing.cast(typing.Dict, arguments.copy()) pass def get_mutex(self, key): diff --git a/dogpile/cache/backends/redis.py b/dogpile/cache/backends/redis.py index 49992b8..9dabfa0 100644 --- a/dogpile/cache/backends/redis.py +++ b/dogpile/cache/backends/redis.py @@ -18,6 +18,7 @@ from typing import Tuple from typing import TYPE_CHECKING from typing import TypedDict +from typing import Union import warnings from ..api import BytesBackend @@ -52,7 +53,7 @@ class RedisBackendKwargs(TypedDict, total=False): socket_timeout: Optional[float] socket_connect_timeout: Optional[float] socket_keepalive: bool - socket_keepalive_options: Optional[Dict[str, Any]] + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] lock_sleep: float connection_pool: Optional["redis.ConnectionPool"] thread_local_lock: bool @@ -165,7 +166,7 @@ class RedisBackend(BytesBackend): """ def __init__(self, arguments: RedisBackendKwargs): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self._imports() self.url = _arguments.pop("url", None) self.host = _arguments.pop("host", "localhost") @@ -404,12 +405,10 @@ class RedisSentinelBackend(RedisBackend): """ def __init__(self, arguments: RedisSentinelBackendKwargs): - _arguments = arguments.copy() - + _arguments = cast(Dict, arguments.copy()) self.sentinels = _arguments.pop("sentinels", None) self.service_name = _arguments.pop("service_name", "mymaster") self.sentinel_kwargs = _arguments.pop("sentinel_kwargs", {}) - super().__init__( arguments={ "distributed_lock": True, @@ -593,9 +592,10 @@ class RedisClusterBackend(RedisBackend): """ def __init__(self, arguments: RedisClusterBackendKwargs): - _arguments = arguments.copy() + _arguments = cast(Dict, arguments.copy()) self.startup_nodes = _arguments.pop("startup_nodes", None) - super().__init__(_arguments) + _arguments_super = cast(RedisBackendKwargs, _arguments) + super().__init__(_arguments_super) def _imports(self) -> None: global redis diff --git a/dogpile/cache/backends/valkey.py b/dogpile/cache/backends/valkey.py index a3d6d8a..ffa3da9 100644 --- a/dogpile/cache/backends/valkey.py +++ b/dogpile/cache/backends/valkey.py @@ -12,10 +12,12 @@ from typing import cast from typing import Dict from typing import List +from typing import Mapping from typing import Optional from typing import Tuple from typing import TYPE_CHECKING from typing import TypedDict +from typing import Union import warnings from ..api import BytesBackend @@ -43,7 +45,7 @@ class ValkeyBackendArguments(TypedDict, total=False): socket_timeout: Optional[float] socket_connect_timeout: Optional[float] socket_keepalive: bool - socket_keepalive_options: Optional[Dict[str, Any]] + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] lock_sleep: Optional[int] thread_local_lock: bool connection_kwargs: Dict[str, Any] @@ -56,7 +58,7 @@ class ValkeySentinelBackendArguments(ValkeyBackendArguments): sentinel_kwargs: Dict[str, Any] -class ValkeyClusterBackendBackendArguments(ValkeyBackendArguments): +class ValkeyClusterBackendArguments(ValkeyBackendArguments): startup_nodes: List["valkey.cluster.ClusterNode"] @@ -148,8 +150,8 @@ class ValkeyBackend(BytesBackend): """ - def __init__(self, arguments): - _arguments = arguments.copy() + def __init__(self, arguments: ValkeyBackendArguments): + _arguments = cast(Dict, arguments.copy()) self._imports() self.url = _arguments.pop("url", None) self.host = _arguments.pop("host", "localhost") @@ -388,8 +390,7 @@ class ValkeySentinelBackend(ValkeyBackend): """ def __init__(self, arguments: ValkeySentinelBackendArguments): - _arguments = arguments.copy() - + _arguments = cast(Dict, arguments.copy()) self.sentinels = _arguments.pop("sentinels", None) self.service_name = _arguments.pop("service_name", "mymaster") self.sentinel_kwargs = _arguments.pop("sentinel_kwargs", {}) @@ -576,10 +577,11 @@ class ValkeyClusterBackend(ValkeyBackend): """ # noqa: E501 - def __init__(self, arguments: ValkeyClusterBackendBackendArguments): - _arguments = arguments.copy() + def __init__(self, arguments: ValkeyClusterBackendArguments): + _arguments = cast(Dict, arguments.copy()) self.startup_nodes = _arguments.pop("startup_nodes", None) - super().__init__(_arguments) + _arguments_super = cast(ValkeyBackendArguments, _arguments) + super().__init__(_arguments_super) def _imports(self): global valkey diff --git a/dogpile/cache/util.py b/dogpile/cache/util.py index c1ca8ba..560de14 100644 --- a/dogpile/cache/util.py +++ b/dogpile/cache/util.py @@ -1,4 +1,5 @@ from hashlib import sha1 +from typing import Any from typing import Callable from .api import KeyType @@ -6,7 +7,11 @@ from ..util import langhelpers -def function_key_generator(namespace: str, fn, to_str=str) -> Callable: +def function_key_generator( + namespace: str, + fn: Callable, + to_str: Callable[[Any], str] = str, +) -> Callable: """Return a function that generates a string key, based on a given function as well as arguments to the returned function itself. @@ -47,7 +52,11 @@ def generate_key(*args, **kw): return generate_key -def function_multi_key_generator(namespace: str, fn, to_str=str) -> Callable: +def function_multi_key_generator( + namespace: str, + fn: Callable, + to_str: Callable[[Any], str] = str, +) -> Callable: if namespace is None: namespace = "%s:%s" % (fn.__module__, fn.__name__) else: @@ -69,7 +78,11 @@ def generate_keys(*args, **kw): return generate_keys -def kwarg_function_key_generator(namespace: str, fn, to_str=str) -> Callable: +def kwarg_function_key_generator( + namespace: str, + fn: Callable, + to_str: Callable[[Any], str] = str, +) -> Callable: """Return a function that generates a string key, based on a given function as well as arguments to the returned function itself. diff --git a/dogpile/testing/fixtures.py b/dogpile/testing/fixtures.py index 3c364a4..db69865 100644 --- a/dogpile/testing/fixtures.py +++ b/dogpile/testing/fixtures.py @@ -1,5 +1,7 @@ # mypy: ignore-errors +from __future__ import annotations + import collections import itertools import json @@ -7,6 +9,8 @@ from threading import Lock from threading import Thread import time +from typing import Any +from typing import Dict import uuid import pytest @@ -50,7 +54,7 @@ def teardown_method(self, method): def _check_backend_available(cls, backend): pass - region_args = {} + region_args: Dict[str, Any] = {} config_args = {} extra_arguments = {} diff --git a/tests/cache/test_memcached_backend.py b/tests/cache/test_memcached_backend.py index f3b106f..d1e2afa 100644 --- a/tests/cache/test_memcached_backend.py +++ b/tests/cache/test_memcached_backend.py @@ -10,8 +10,10 @@ from dogpile.cache.backends.memcached import GenericMemcachedBackend from dogpile.cache.backends.memcached import MemcachedBackend +from dogpile.cache.backends.memcached import MemcachedBackendArguments from dogpile.cache.backends.memcached import PylibmcBackend from dogpile.cache.backends.memcached import PyMemcacheBackend +from dogpile.cache.backends.memcached import PyMemcacheBackendArguments from dogpile.testing import eq_ from dogpile.testing import is_ from dogpile.testing.fixtures import _GenericBackendTestSuite @@ -242,7 +244,7 @@ def _mock_pymemcache_fixture(self): ) def test_pymemcache_hashclient_retry_attempts(self): - config_args = { + config_args: PyMemcacheBackendArguments = { "url": "127.0.0.1:11211", "hashclient_retry_attempts": 4, } @@ -265,7 +267,10 @@ def test_pymemcache_hashclient_retry_attempts(self): eq_(self.retrying_client.mock_calls, []) def test_pymemcache_hashclient_retry_timeout(self): - config_args = {"url": "127.0.0.1:11211", "hashclient_retry_timeout": 4} + config_args: PyMemcacheBackendArguments = { + "url": "127.0.0.1:11211", + "hashclient_retry_timeout": 4, + } with self._mock_pymemcache_fixture(): backend = MockPyMemcacheBackend(config_args) is_(backend._create_client(), self.hash_client()) @@ -284,7 +289,7 @@ def test_pymemcache_hashclient_retry_timeout(self): eq_(self.retrying_client.mock_calls, []) def test_pymemcache_hashclient_retry_timeout_w_enable_retry(self): - config_args = { + config_args: PyMemcacheBackendArguments = { "url": "127.0.0.1:11211", "hashclient_retry_timeout": 4, "enable_retry_client": True, @@ -317,7 +322,10 @@ def test_pymemcache_hashclient_retry_timeout_w_enable_retry(self): ) def test_pymemcache_dead_timeout(self): - config_args = {"url": "127.0.0.1:11211", "hashclient_dead_timeout": 4} + config_args: PyMemcacheBackendArguments = { + "url": "127.0.0.1:11211", + "hashclient_dead_timeout": 4, + } with self._mock_pymemcache_fixture(): backend = MockPyMemcacheBackend(config_args) backend._create_client() @@ -338,7 +346,10 @@ def test_pymemcache_dead_timeout(self): eq_(self.retrying_client.mock_calls, []) def test_pymemcache_enable_retry_client_not_set(self): - config_args = {"url": "127.0.0.1:11211", "retry_attempts": 2} + config_args: PyMemcacheBackendArguments = { + "url": "127.0.0.1:11211", + "retry_attempts": 2, + } with self._mock_pymemcache_fixture(): with mock.patch("warnings.warn") as warn_mock: @@ -352,7 +363,10 @@ def test_pymemcache_enable_retry_client_not_set(self): ) def test_pymemcache_memacached_expire_time(self): - config_args = {"url": "127.0.0.1:11211", "memcached_expire_time": 20} + config_args: PyMemcacheBackendArguments = { + "url": "127.0.0.1:11211", + "memcached_expire_time": 20, + } with self._mock_pymemcache_fixture(): backend = MockPyMemcacheBackend(config_args) backend.set("foo", "bar") @@ -446,7 +460,7 @@ def delete(self, key): class MemcachedBackendTest: def test_memcached_dead_retry(self): - config_args = { + config_args: MemcachedBackendArguments = { "url": "127.0.0.1:11211", "dead_retry": 4, } @@ -454,7 +468,7 @@ def test_memcached_dead_retry(self): eq_(backend._create_client().kw["dead_retry"], 4) def test_memcached_socket_timeout(self): - config_args = { + config_args: MemcachedBackendArguments = { "url": "127.0.0.1:11211", "socket_timeout": 6, } diff --git a/tests/cache/test_redis_backend.py b/tests/cache/test_redis_backend.py index b2990f1..c0520cc 100644 --- a/tests/cache/test_redis_backend.py +++ b/tests/cache/test_redis_backend.py @@ -2,12 +2,14 @@ import os from threading import Event import time +from typing import Type +from typing import TYPE_CHECKING from unittest.mock import Mock from unittest.mock import patch import pytest -from dogpile.cache.backends.redis import RedisKwargs +from dogpile.cache.backends.redis import RedisBackendKwargs from dogpile.cache.region import _backend_loader from dogpile.testing import eq_ from dogpile.testing.fixtures import _GenericBackendFixture @@ -15,6 +17,9 @@ from dogpile.testing.fixtures import _GenericMutexTestSuite from dogpile.testing.fixtures import _GenericSerializerTestSuite +if TYPE_CHECKING: + import redis + REDIS_HOST = "127.0.0.1" REDIS_PORT = int(os.getenv("DOGPILE_REDIS_PORT", "6379")) expect_redis_running = os.getenv("DOGPILE_REDIS_PORT") is not None @@ -130,6 +135,7 @@ def blah(k): @patch("redis.StrictRedis", autospec=True) class RedisConnectionTest: backend = "dogpile.cache.redis" + backend_cls = Type["redis.StrictRedis"] @classmethod def setup_class(cls): @@ -148,7 +154,7 @@ def _test_helper(self, mock_obj, expected_args, connection_args=None): def test_connect_with_defaults(self, MockStrictRedis): # The defaults, used if keys are missing from the arguments dict. - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "host": "localhost", "port": 6379, "db": 0, @@ -158,7 +164,7 @@ def test_connect_with_defaults(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_basics(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "host": "127.0.0.1", "port": 6379, "db": 0, @@ -168,7 +174,7 @@ def test_connect_with_basics(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_password(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "host": "127.0.0.1", "password": "some password", "port": 6379, @@ -183,7 +189,7 @@ def test_connect_with_password(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_username_and_password(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "host": "127.0.0.1", "username": "redis", "password": "some password", @@ -193,7 +199,7 @@ def test_connect_with_username_and_password(self, MockStrictRedis): self._test_helper(MockStrictRedis, arguments) def test_connect_with_socket_timeout(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "host": "127.0.0.1", "port": 6379, "socket_timeout": 0.5, @@ -204,7 +210,7 @@ def test_connect_with_socket_timeout(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_socket_connect_timeout(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "host": "127.0.0.1", "port": 6379, "socket_timeout": 1.0, @@ -215,7 +221,7 @@ def test_connect_with_socket_connect_timeout(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_socket_keepalive(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "host": "127.0.0.1", "port": 6379, "socket_keepalive": True, @@ -226,7 +232,7 @@ def test_connect_with_socket_keepalive(self, MockStrictRedis): self._test_helper(MockStrictRedis, expected, arguments) def test_connect_with_socket_keepalive_options(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "host": "127.0.0.1", "port": 6379, "socket_keepalive": True, @@ -240,7 +246,7 @@ def test_connect_with_socket_keepalive_options(self, MockStrictRedis): def test_connect_with_connection_pool(self, MockStrictRedis): pool = Mock() - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "connection_pool": pool, "socket_timeout": 0.5, } @@ -250,13 +256,13 @@ def test_connect_with_connection_pool(self, MockStrictRedis): ) def test_connect_with_url(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "url": "redis://redis:password@127.0.0.1:6379/0" } self._test_helper(MockStrictRedis.from_url, arguments) def test_extra_arbitrary_args(self, MockStrictRedis): - arguments: RedisKwargs = { + arguments: RedisBackendKwargs = { "url": "redis://redis:password@127.0.0.1:6379/0", "connection_kwargs": { "ssl": True, diff --git a/tests/cache/test_region.py b/tests/cache/test_region.py index 6f8120e..bf5538a 100644 --- a/tests/cache/test_region.py +++ b/tests/cache/test_region.py @@ -24,7 +24,7 @@ from dogpile.testing.fixtures import MockBackend -def key_mangler(key): +def key_mangler(key: str) -> str: return "HI!" + key diff --git a/tests/cache/test_valkey_backend.py b/tests/cache/test_valkey_backend.py index f0e1c25..8d83a7b 100644 --- a/tests/cache/test_valkey_backend.py +++ b/tests/cache/test_valkey_backend.py @@ -2,6 +2,8 @@ import os from threading import Event import time +from typing import Type +from typing import TYPE_CHECKING from unittest.mock import Mock from unittest.mock import patch @@ -14,6 +16,9 @@ from dogpile.testing.fixtures import _GenericMutexTestSuite from dogpile.testing.fixtures import _GenericSerializerTestSuite +if TYPE_CHECKING: + import valkey + VALKEY_HOST = "127.0.0.1" VALKEY_PORT = int(os.getenv("DOGPILE_VALKEY_PORT", "6379")) expect_valkey_running = os.getenv("DOGPILE_VALKEY_PORT") is not None @@ -129,6 +134,7 @@ def blah(k): @patch("valkey.StrictValkey", autospec=True) class ValkeyConnectionTest: backend = "dogpile.cache.valkey" + backend_cls = Type["valkey.StrictValkey"] @classmethod def setup_class(cls): From f3e42ff904c90e9e50e50980185f107107338fe1 Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Mon, 18 Aug 2025 19:16:26 -0400 Subject: [PATCH 7/9] tests for guard against region args mutation fixed valkey default for lock_sleep Change-Id: I03c80c105be4ec3b280d57f340b050b16c4d937a --- dogpile/cache/backends/file.py | 11 +- dogpile/cache/backends/memcached.py | 75 ++++++------- dogpile/cache/backends/memory.py | 4 +- dogpile/cache/backends/null.py | 1 - dogpile/cache/backends/redis.py | 49 ++++----- dogpile/cache/backends/valkey.py | 55 +++++----- tests/cache/test_region.py | 156 ++++++++++++++++++++++++++++ 7 files changed, 244 insertions(+), 107 deletions(-) diff --git a/dogpile/cache/backends/file.py b/dogpile/cache/backends/file.py index 6b9b487..b4a26c4 100644 --- a/dogpile/cache/backends/file.py +++ b/dogpile/cache/backends/file.py @@ -12,8 +12,6 @@ import dbm import os import threading -from typing import cast -from typing import Dict from typing import Literal from typing import TypedDict from typing import Union @@ -152,18 +150,17 @@ def release_write_lock(self): """ def __init__(self, arguments: DBMBackendArguments): - _arguments = cast(Dict, arguments.copy()) self.filename = os.path.abspath( - os.path.normpath(_arguments["filename"]) + os.path.normpath(arguments["filename"]) ) dir_, filename = os.path.split(self.filename) - self.lock_factory = _arguments.get("lock_factory", FileLock) + self.lock_factory = arguments.get("lock_factory", FileLock) self._rw_lock = self._init_lock( - _arguments.get("rw_lockfile"), ".rw.lock", dir_, filename + arguments.get("rw_lockfile"), ".rw.lock", dir_, filename ) self._dogpile_lock = self._init_lock( - _arguments.get("dogpile_lockfile"), + arguments.get("dogpile_lockfile"), ".dogpile.lock", dir_, filename, diff --git a/dogpile/cache/backends/memcached.py b/dogpile/cache/backends/memcached.py index a88212a..9278452 100644 --- a/dogpile/cache/backends/memcached.py +++ b/dogpile/cache/backends/memcached.py @@ -14,7 +14,6 @@ import typing from typing import Any from typing import cast -from typing import Dict from typing import Mapping from typing import Optional from typing import Sequence @@ -171,16 +170,15 @@ class GenericMemcachedBackend(CacheBackend): deserializer = None def __init__(self, arguments: GenericMemcachedBackendArguments): - _arguments = cast(Dict, arguments.copy()) self._imports() # using a plain threading.local here. threading.local # automatically deletes the __dict__ when a thread ends, # so the idea is that this is superior to pylibmc's # own ThreadMappedPool which doesn't handle this # automatically. - self.url = util.to_list(_arguments["url"]) - self.distributed_lock = _arguments.get("distributed_lock", False) - self.lock_timeout = _arguments.get("lock_timeout", 0) + self.url = util.to_list(arguments["url"]) + self.distributed_lock = arguments.get("distributed_lock", False) + self.lock_timeout = arguments.get("lock_timeout", 0) def has_lock_timeout(self): return self.lock_timeout != 0 @@ -281,17 +279,16 @@ class MemcacheArgs(GenericMemcachedBackend): """ def __init__(self, arguments: MemcachedArgsArguments): - _arguments = cast(Dict, arguments.copy()) - self.min_compress_len = _arguments.get("min_compress_len", 0) + self.min_compress_len = arguments.get("min_compress_len", 0) self.set_arguments = {} - if "memcached_expire_time" in _arguments: - self.set_arguments["time"] = _arguments["memcached_expire_time"] - if "min_compress_len" in _arguments: - self.set_arguments["min_compress_len"] = _arguments[ + if "memcached_expire_time" in arguments: + self.set_arguments["time"] = arguments["memcached_expire_time"] + if "min_compress_len" in arguments: + self.set_arguments["min_compress_len"] = arguments[ "min_compress_len" ] - _arguments_super = cast(GenericMemcachedBackendArguments, _arguments) + _arguments_super = cast(GenericMemcachedBackendArguments, arguments) super(MemcacheArgs, self).__init__(_arguments_super) @@ -330,10 +327,9 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend): """ def __init__(self, arguments): - _arguments = cast(Dict, arguments.copy()) - self.binary = _arguments.get("binary", False) - self.behaviors = _arguments.get("behaviors", {}) - _arguments_super = cast(MemcachedArgsArguments, _arguments) + self.binary = arguments.get("binary", False) + self.behaviors = arguments.get("behaviors", {}) + _arguments_super = cast(MemcachedArgsArguments, arguments) super(PylibmcBackend, self).__init__(_arguments_super) def _imports(self): @@ -383,10 +379,9 @@ class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend): """ def __init__(self, arguments: MemcachedBackendArguments): - _arguments = cast(Dict, arguments.copy()) - self.dead_retry = _arguments.get("dead_retry", 30) - self.socket_timeout = _arguments.get("socket_timeout", 3) - _arguments_super = cast(MemcachedArgsArguments, _arguments) + self.dead_retry = arguments.get("dead_retry", 30) + self.socket_timeout = arguments.get("socket_timeout", 3) + _arguments_super = cast(MemcachedArgsArguments, arguments) super(MemcachedBackend, self).__init__(_arguments_super) def _imports(self): @@ -461,11 +456,10 @@ class BMemcachedBackend(GenericMemcachedBackend): """ def __init__(self, arguments: BMemcachedBackendArguments): - _arguments = cast(Dict, arguments.copy()) - self.username = _arguments.get("username", None) - self.password = _arguments.get("password", None) - self.tls_context = _arguments.get("tls_context", None) - _arguments_super = cast(GenericMemcachedBackendArguments, _arguments) + self.username = arguments.get("username", None) + self.password = arguments.get("password", None) + self.tls_context = arguments.get("tls_context", None) + _arguments_super = cast(GenericMemcachedBackendArguments, arguments) super(BMemcachedBackend, self).__init__(_arguments_super) def _imports(self): @@ -652,26 +646,25 @@ class PyMemcacheBackend(GenericMemcachedBackend): """ # noqa E501 def __init__(self, arguments: PyMemcacheBackendArguments): - _arguments = cast(Dict, arguments.copy()) - _arguments_super = cast(GenericMemcachedBackendArguments, _arguments) + _arguments_super = cast(GenericMemcachedBackendArguments, arguments) super().__init__(_arguments_super) - self.serde = _arguments.get("serde", pymemcache.serde.pickle_serde) - self.default_noreply = _arguments.get("default_noreply", False) - self.tls_context = _arguments.get("tls_context", None) - self.socket_keepalive = _arguments.get("socket_keepalive", None) - self.enable_retry_client = _arguments.get("enable_retry_client", False) - self.retry_attempts = _arguments.get("retry_attempts", None) - self.retry_delay = _arguments.get("retry_delay", None) - self.retry_for = _arguments.get("retry_for", None) - self.do_not_retry_for = _arguments.get("do_not_retry_for", None) - self.hashclient_retry_attempts = _arguments.get( + self.serde = arguments.get("serde", pymemcache.serde.pickle_serde) + self.default_noreply = arguments.get("default_noreply", False) + self.tls_context = arguments.get("tls_context", None) + self.socket_keepalive = arguments.get("socket_keepalive", None) + self.enable_retry_client = arguments.get("enable_retry_client", False) + self.retry_attempts = arguments.get("retry_attempts", None) + self.retry_delay = arguments.get("retry_delay", None) + self.retry_for = arguments.get("retry_for", None) + self.do_not_retry_for = arguments.get("do_not_retry_for", None) + self.hashclient_retry_attempts = arguments.get( "hashclient_retry_attempts", 2 ) - self.hashclient_retry_timeout = _arguments.get( + self.hashclient_retry_timeout = arguments.get( "hashclient_retry_timeout", 1 ) - self.hashclient_dead_timeout = _arguments.get( + self.hashclient_dead_timeout = arguments.get( "hashclient_dead_timeout", 60 ) if ( @@ -685,8 +678,8 @@ def __init__(self, arguments: PyMemcacheBackendArguments): "will be ignored" ) self.set_arguments = {} - if "memcached_expire_time" in _arguments: - self.set_arguments["expire"] = _arguments["memcached_expire_time"] + if "memcached_expire_time" in arguments: + self.set_arguments["expire"] = arguments["memcached_expire_time"] def _imports(self): global pymemcache diff --git a/dogpile/cache/backends/memory.py b/dogpile/cache/backends/memory.py index a5ca635..752d6c9 100644 --- a/dogpile/cache/backends/memory.py +++ b/dogpile/cache/backends/memory.py @@ -13,7 +13,6 @@ from __future__ import annotations from typing import Any -from typing import cast from typing import Dict from typing import TypedDict @@ -61,8 +60,7 @@ class MemoryBackend(CacheBackend): """ def __init__(self, arguments: MemoryBackendArguments): - _arguments = cast(Dict, arguments.copy()) - self._cache = _arguments.pop("cache_dict", {}) + self._cache = arguments.get("cache_dict", {}) def get(self, key): return self._cache.get(key, NO_VALUE) diff --git a/dogpile/cache/backends/null.py b/dogpile/cache/backends/null.py index b84e12e..b349792 100644 --- a/dogpile/cache/backends/null.py +++ b/dogpile/cache/backends/null.py @@ -47,7 +47,6 @@ class NullBackend(CacheBackend): """ def __init__(self, arguments: Dict[str, Any]): - # _arguments = typing.cast(typing.Dict, arguments.copy()) pass def get_mutex(self, key): diff --git a/dogpile/cache/backends/redis.py b/dogpile/cache/backends/redis.py index 9dabfa0..9d0c78a 100644 --- a/dogpile/cache/backends/redis.py +++ b/dogpile/cache/backends/redis.py @@ -166,27 +166,26 @@ class RedisBackend(BytesBackend): """ def __init__(self, arguments: RedisBackendKwargs): - _arguments = cast(Dict, arguments.copy()) self._imports() - self.url = _arguments.pop("url", None) - self.host = _arguments.pop("host", "localhost") - self.username = _arguments.pop("username", None) - self.password = _arguments.pop("password", None) - self.port = _arguments.pop("port", 6379) - self.db = _arguments.pop("db", 0) - self.distributed_lock = _arguments.pop("distributed_lock", False) - self.socket_timeout = _arguments.pop("socket_timeout", None) - self.socket_connect_timeout = _arguments.pop( + self.url = arguments.get("url", None) + self.host = arguments.get("host", "localhost") + self.username = arguments.get("username", None) + self.password = arguments.get("password", None) + self.port = arguments.get("port", 6379) + self.db = arguments.get("db", 0) + self.distributed_lock = arguments.get("distributed_lock", False) + self.socket_timeout = arguments.get("socket_timeout", None) + self.socket_connect_timeout = arguments.get( "socket_connect_timeout", None ) - self.socket_keepalive = _arguments.pop("socket_keepalive", False) - self.socket_keepalive_options = _arguments.pop( + self.socket_keepalive = arguments.get("socket_keepalive", False) + self.socket_keepalive_options = arguments.get( "socket_keepalive_options", None ) - self.lock_timeout = _arguments.pop("lock_timeout", None) - self.lock_sleep = _arguments.pop("lock_sleep", 0.1) - self.thread_local_lock = _arguments.pop("thread_local_lock", True) - self.connection_kwargs = _arguments.pop("connection_kwargs", {}) + self.lock_timeout = arguments.get("lock_timeout", None) + self.lock_sleep = arguments.get("lock_sleep", 0.1) + self.thread_local_lock = arguments.get("thread_local_lock", True) + self.connection_kwargs = arguments.get("connection_kwargs", {}) if self.distributed_lock and self.thread_local_lock: warnings.warn( @@ -194,8 +193,8 @@ def __init__(self, arguments: RedisBackendKwargs): "set to False when distributed_lock is True" ) - self.redis_expiration_time = _arguments.pop("redis_expiration_time", 0) - self.connection_pool = _arguments.pop("connection_pool", None) + self.redis_expiration_time = arguments.get("redis_expiration_time", 0) + self.connection_pool = arguments.get("connection_pool", None) self._create_client() def _imports(self) -> None: @@ -405,15 +404,14 @@ class RedisSentinelBackend(RedisBackend): """ def __init__(self, arguments: RedisSentinelBackendKwargs): - _arguments = cast(Dict, arguments.copy()) - self.sentinels = _arguments.pop("sentinels", None) - self.service_name = _arguments.pop("service_name", "mymaster") - self.sentinel_kwargs = _arguments.pop("sentinel_kwargs", {}) + self.sentinels = arguments.get("sentinels", None) + self.service_name = arguments.get("service_name", "mymaster") + self.sentinel_kwargs = arguments.get("sentinel_kwargs", {}) super().__init__( arguments={ "distributed_lock": True, "thread_local_lock": False, - **_arguments, + **arguments, } ) @@ -592,9 +590,8 @@ class RedisClusterBackend(RedisBackend): """ def __init__(self, arguments: RedisClusterBackendKwargs): - _arguments = cast(Dict, arguments.copy()) - self.startup_nodes = _arguments.pop("startup_nodes", None) - _arguments_super = cast(RedisBackendKwargs, _arguments) + self.startup_nodes = arguments.get("startup_nodes", None) + _arguments_super = cast(RedisBackendKwargs, arguments) super().__init__(_arguments_super) def _imports(self) -> None: diff --git a/dogpile/cache/backends/valkey.py b/dogpile/cache/backends/valkey.py index ffa3da9..f6078b7 100644 --- a/dogpile/cache/backends/valkey.py +++ b/dogpile/cache/backends/valkey.py @@ -46,7 +46,7 @@ class ValkeyBackendArguments(TypedDict, total=False): socket_connect_timeout: Optional[float] socket_keepalive: bool socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] - lock_sleep: Optional[int] + lock_sleep: float = 0.1 thread_local_lock: bool connection_kwargs: Dict[str, Any] connection_pool: Optional["valkey.ConnectionPool"] @@ -126,9 +126,9 @@ class ValkeyBackend(BytesBackend): :param socket_keepalive_options: dict, socket keepalive options. Default is None (no options). - :param lock_sleep: integer, number of seconds to sleep when failed to + :param lock_sleep: float, number of seconds to sleep when failed to acquire a lock. This argument is only valid when - ``distributed_lock`` is ``True``. + ``distributed_lock`` is ``True``. Default is `0.1`, the Valkey default. :param connection_pool: ``valkey.ConnectionPool`` object. If provided, this object supersedes other connection arguments passed to the @@ -151,27 +151,26 @@ class ValkeyBackend(BytesBackend): """ def __init__(self, arguments: ValkeyBackendArguments): - _arguments = cast(Dict, arguments.copy()) self._imports() - self.url = _arguments.pop("url", None) - self.host = _arguments.pop("host", "localhost") - self.username = _arguments.pop("username", None) - self.password = _arguments.pop("password", None) - self.port = _arguments.pop("port", 6379) - self.db = _arguments.pop("db", 0) - self.distributed_lock = _arguments.pop("distributed_lock", False) - self.socket_timeout = _arguments.pop("socket_timeout", None) - self.socket_connect_timeout = _arguments.pop( + self.url = arguments.get("url", None) + self.host = arguments.get("host", "localhost") + self.username = arguments.get("username", None) + self.password = arguments.get("password", None) + self.port = arguments.get("port", 6379) + self.db = arguments.get("db", 0) + self.distributed_lock = arguments.get("distributed_lock", False) + self.socket_timeout = arguments.get("socket_timeout", None) + self.socket_connect_timeout = arguments.get( "socket_connect_timeout", None ) - self.socket_keepalive = _arguments.pop("socket_keepalive", False) - self.socket_keepalive_options = _arguments.pop( + self.socket_keepalive = arguments.get("socket_keepalive", False) + self.socket_keepalive_options = arguments.get( "socket_keepalive_options", None ) - self.lock_timeout = _arguments.pop("lock_timeout", None) - self.lock_sleep = _arguments.pop("lock_sleep", 0.1) - self.thread_local_lock = _arguments.pop("thread_local_lock", True) - self.connection_kwargs = _arguments.pop("connection_kwargs", {}) + self.lock_timeout = arguments.get("lock_timeout", None) + self.lock_sleep = arguments.get("lock_sleep", 0.1) + self.thread_local_lock = arguments.get("thread_local_lock", True) + self.connection_kwargs = arguments.get("connection_kwargs", {}) if self.distributed_lock and self.thread_local_lock: warnings.warn( @@ -179,10 +178,10 @@ def __init__(self, arguments: ValkeyBackendArguments): "set to False when distributed_lock is True" ) - self.valkey_expiration_time = _arguments.pop( + self.valkey_expiration_time = arguments.get( "valkey_expiration_time", 0 ) - self.connection_pool = _arguments.pop("connection_pool", None) + self.connection_pool = arguments.get("connection_pool", None) self._create_client() def _imports(self): @@ -390,16 +389,15 @@ class ValkeySentinelBackend(ValkeyBackend): """ def __init__(self, arguments: ValkeySentinelBackendArguments): - _arguments = cast(Dict, arguments.copy()) - self.sentinels = _arguments.pop("sentinels", None) - self.service_name = _arguments.pop("service_name", "mymaster") - self.sentinel_kwargs = _arguments.pop("sentinel_kwargs", {}) + self.sentinels = arguments.get("sentinels", None) + self.service_name = arguments.get("service_name", "mymaster") + self.sentinel_kwargs = arguments.get("sentinel_kwargs", {}) super().__init__( arguments={ "distributed_lock": True, "thread_local_lock": False, - **_arguments, + **arguments, } ) @@ -578,9 +576,8 @@ class ValkeyClusterBackend(ValkeyBackend): """ # noqa: E501 def __init__(self, arguments: ValkeyClusterBackendArguments): - _arguments = cast(Dict, arguments.copy()) - self.startup_nodes = _arguments.pop("startup_nodes", None) - _arguments_super = cast(ValkeyBackendArguments, _arguments) + self.startup_nodes = arguments.get("startup_nodes", None) + _arguments_super = cast(ValkeyBackendArguments, arguments) super().__init__(_arguments_super) def _imports(self): diff --git a/tests/cache/test_region.py b/tests/cache/test_region.py index bf5538a..3d0fc46 100644 --- a/tests/cache/test_region.py +++ b/tests/cache/test_region.py @@ -4,6 +4,7 @@ import io import itertools import time +from typing import Dict from unittest import mock from dogpile.cache import CacheRegion @@ -1046,3 +1047,158 @@ def test_log_is_hard_invalidated(self): ) ], ) + + +class BackendArgsAntiMutationTest: + """ + This test ensures backends arguments are not mutated + """ + + # note each backend as it's tested here + _backends_tested = [] + + @classmethod + def teardown_class(cls): + """ + this is a compliance test. it will cause an Error in the test suite if: + * any test fails + * a new backend is registered, but not tested + """ + backends_missing = [] + _backends_eligible = [ + k + for k, v in _backend_loader._unloaded.items() + if k.startswith("dogpile.cache.") + ] + for bck in _backends_eligible: + if bck not in cls._backends_tested: + backends_missing.append(bck) + assert not backends_missing + + def _test_arguments(self, backend: str, arguments: Dict): + backend_cls = _backend_loader.load(backend) + arguments_copy = arguments.copy() + _backend_inst = backend_cls(arguments_copy) # noqa: F841 + assert arguments_copy == arguments + + # note this backend has been tested; + # used by `BackendArgsAntiMutationTest.teardown_class` + BackendArgsAntiMutationTest._backends_tested.append(backend) + + def test_null(self): + backend = "dogpile.cache.null" + arguments = {} + self._test_arguments(backend, arguments) + + def test_dbm(self): + from .test_dbm_backend import test_fname + + backend = "dogpile.cache.dbm" + arguments = {"filename": test_fname} + self._test_arguments(backend, arguments) + + def test_pylibmc(self): + from .test_memcached_backend import MEMCACHED_URL + + backend = "dogpile.cache.pylibmc" + arguments = {"url": MEMCACHED_URL} + self._test_arguments(backend, arguments) + + def test_bmemcached(self): + from .test_memcached_backend import MEMCACHED_URL + + backend = "dogpile.cache.bmemcached" + arguments = {"url": MEMCACHED_URL} + self._test_arguments(backend, arguments) + + def test_memcached(self): + from .test_memcached_backend import MEMCACHED_URL + + backend = "dogpile.cache.memcached" + arguments = {"url": MEMCACHED_URL} + self._test_arguments(backend, arguments) + + def test_pymemcache(self): + from .test_memcached_backend import MEMCACHED_URL + + backend = "dogpile.cache.pymemcache" + arguments = {"url": MEMCACHED_URL} + self._test_arguments(backend, arguments) + + def test_memory(self): + backend = "dogpile.cache.memory" + arguments = {"cache_dict": {}} + self._test_arguments(backend, arguments) + + def test_memory_pickle(self): + backend = "dogpile.cache.memory_pickle" + arguments = {"cache_dict": {}} + self._test_arguments(backend, arguments) + + def test_redis(self): + from .test_redis_backend import REDIS_HOST + from .test_redis_backend import REDIS_PORT + + backend = "dogpile.cache.redis" + arguments = { + "host": REDIS_HOST, + "port": REDIS_PORT, + } + self._test_arguments(backend, arguments) + + def test_redis_sentinel(self): + from .test_redis_sentinel_backend import REDIS_HOST + from .test_redis_sentinel_backend import REDIS_PORT + + backend = "dogpile.cache.redis_sentinel" + arguments = { + "sentinels": [[REDIS_HOST, REDIS_PORT]], + } + self._test_arguments(backend, arguments) + + def test_redis_cluster(self): + from .test_redis_backend import REDIS_HOST + from .test_redis_backend import REDIS_PORT + from redis.cluster import ClusterNode + + backend = "dogpile.cache.redis_cluster" + arguments = { + "startup_nodes": [ + ClusterNode(REDIS_HOST, REDIS_PORT), + ], + } + self._test_arguments(backend, arguments) + + def test_valkey(self): + from .test_valkey_backend import VALKEY_HOST + from .test_valkey_backend import VALKEY_PORT + + backend = "dogpile.cache.valkey" + arguments = { + "host": VALKEY_HOST, + "port": VALKEY_PORT, + } + self._test_arguments(backend, arguments) + + def test_valkey_sentinel(self): + from .test_valkey_sentinel_backend import VALKEY_HOST + from .test_valkey_sentinel_backend import VALKEY_PORT + + backend = "dogpile.cache.valkey_sentinel" + arguments = { + "sentinels": [[VALKEY_HOST, VALKEY_PORT]], + } + self._test_arguments(backend, arguments) + + def test_valkey_cluster(self): + from .test_valkey_backend import VALKEY_HOST + from .test_valkey_backend import VALKEY_PORT + from valkey.cluster import ClusterNode + + backend = "dogpile.cache.valkey_cluster" + arguments = { + "startup_nodes": [ + ClusterNode(VALKEY_HOST, VALKEY_PORT), + ], + } + self._test_arguments(backend, arguments) From 0680d81b9b7da6ea28f5cf5700d59b3f599129ca Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Tue, 19 Aug 2025 13:44:14 -0400 Subject: [PATCH 8/9] drop unnecessary cast Change-Id: Ib7bf3ebf825c0823e70b29b0ef5ee5f21010616a --- dogpile/cache/backends/memcached.py | 18 ++++++------------ dogpile/cache/backends/redis.py | 3 +-- 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/dogpile/cache/backends/memcached.py b/dogpile/cache/backends/memcached.py index 9278452..c839c96 100644 --- a/dogpile/cache/backends/memcached.py +++ b/dogpile/cache/backends/memcached.py @@ -13,7 +13,6 @@ import time import typing from typing import Any -from typing import cast from typing import Mapping from typing import Optional from typing import Sequence @@ -288,8 +287,7 @@ def __init__(self, arguments: MemcachedArgsArguments): self.set_arguments["min_compress_len"] = arguments[ "min_compress_len" ] - _arguments_super = cast(GenericMemcachedBackendArguments, arguments) - super(MemcacheArgs, self).__init__(_arguments_super) + super(MemcacheArgs, self).__init__(arguments) class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend): @@ -326,11 +324,10 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend): """ - def __init__(self, arguments): + def __init__(self, arguments: MemcachedArgsArguments): self.binary = arguments.get("binary", False) self.behaviors = arguments.get("behaviors", {}) - _arguments_super = cast(MemcachedArgsArguments, arguments) - super(PylibmcBackend, self).__init__(_arguments_super) + super(PylibmcBackend, self).__init__(arguments) def _imports(self): global pylibmc @@ -381,8 +378,7 @@ class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend): def __init__(self, arguments: MemcachedBackendArguments): self.dead_retry = arguments.get("dead_retry", 30) self.socket_timeout = arguments.get("socket_timeout", 3) - _arguments_super = cast(MemcachedArgsArguments, arguments) - super(MemcachedBackend, self).__init__(_arguments_super) + super(MemcachedBackend, self).__init__(arguments) def _imports(self): global memcache @@ -459,8 +455,7 @@ def __init__(self, arguments: BMemcachedBackendArguments): self.username = arguments.get("username", None) self.password = arguments.get("password", None) self.tls_context = arguments.get("tls_context", None) - _arguments_super = cast(GenericMemcachedBackendArguments, arguments) - super(BMemcachedBackend, self).__init__(_arguments_super) + super(BMemcachedBackend, self).__init__(arguments) def _imports(self): global bmemcached @@ -646,8 +641,7 @@ class PyMemcacheBackend(GenericMemcachedBackend): """ # noqa E501 def __init__(self, arguments: PyMemcacheBackendArguments): - _arguments_super = cast(GenericMemcachedBackendArguments, arguments) - super().__init__(_arguments_super) + super().__init__(arguments) self.serde = arguments.get("serde", pymemcache.serde.pickle_serde) self.default_noreply = arguments.get("default_noreply", False) diff --git a/dogpile/cache/backends/redis.py b/dogpile/cache/backends/redis.py index 9d0c78a..7618f5a 100644 --- a/dogpile/cache/backends/redis.py +++ b/dogpile/cache/backends/redis.py @@ -591,8 +591,7 @@ class RedisClusterBackend(RedisBackend): def __init__(self, arguments: RedisClusterBackendKwargs): self.startup_nodes = arguments.get("startup_nodes", None) - _arguments_super = cast(RedisBackendKwargs, arguments) - super().__init__(_arguments_super) + super().__init__(arguments) def _imports(self) -> None: global redis From 871119763a2d3de5a091759aaada49a61e7cd9d2 Mon Sep 17 00:00:00 2001 From: jonathan vanasco Date: Wed, 10 Sep 2025 20:49:50 -0400 Subject: [PATCH 9/9] remove BackendArgsAntiMutationTest in favor of merged commit Change-Id: I6a9b9f45ecb9c8acf6e030aacfb4ec2f28b0b689 --- tests/cache/test_region.py | 156 ------------------------------------- 1 file changed, 156 deletions(-) diff --git a/tests/cache/test_region.py b/tests/cache/test_region.py index 3d0fc46..bf5538a 100644 --- a/tests/cache/test_region.py +++ b/tests/cache/test_region.py @@ -4,7 +4,6 @@ import io import itertools import time -from typing import Dict from unittest import mock from dogpile.cache import CacheRegion @@ -1047,158 +1046,3 @@ def test_log_is_hard_invalidated(self): ) ], ) - - -class BackendArgsAntiMutationTest: - """ - This test ensures backends arguments are not mutated - """ - - # note each backend as it's tested here - _backends_tested = [] - - @classmethod - def teardown_class(cls): - """ - this is a compliance test. it will cause an Error in the test suite if: - * any test fails - * a new backend is registered, but not tested - """ - backends_missing = [] - _backends_eligible = [ - k - for k, v in _backend_loader._unloaded.items() - if k.startswith("dogpile.cache.") - ] - for bck in _backends_eligible: - if bck not in cls._backends_tested: - backends_missing.append(bck) - assert not backends_missing - - def _test_arguments(self, backend: str, arguments: Dict): - backend_cls = _backend_loader.load(backend) - arguments_copy = arguments.copy() - _backend_inst = backend_cls(arguments_copy) # noqa: F841 - assert arguments_copy == arguments - - # note this backend has been tested; - # used by `BackendArgsAntiMutationTest.teardown_class` - BackendArgsAntiMutationTest._backends_tested.append(backend) - - def test_null(self): - backend = "dogpile.cache.null" - arguments = {} - self._test_arguments(backend, arguments) - - def test_dbm(self): - from .test_dbm_backend import test_fname - - backend = "dogpile.cache.dbm" - arguments = {"filename": test_fname} - self._test_arguments(backend, arguments) - - def test_pylibmc(self): - from .test_memcached_backend import MEMCACHED_URL - - backend = "dogpile.cache.pylibmc" - arguments = {"url": MEMCACHED_URL} - self._test_arguments(backend, arguments) - - def test_bmemcached(self): - from .test_memcached_backend import MEMCACHED_URL - - backend = "dogpile.cache.bmemcached" - arguments = {"url": MEMCACHED_URL} - self._test_arguments(backend, arguments) - - def test_memcached(self): - from .test_memcached_backend import MEMCACHED_URL - - backend = "dogpile.cache.memcached" - arguments = {"url": MEMCACHED_URL} - self._test_arguments(backend, arguments) - - def test_pymemcache(self): - from .test_memcached_backend import MEMCACHED_URL - - backend = "dogpile.cache.pymemcache" - arguments = {"url": MEMCACHED_URL} - self._test_arguments(backend, arguments) - - def test_memory(self): - backend = "dogpile.cache.memory" - arguments = {"cache_dict": {}} - self._test_arguments(backend, arguments) - - def test_memory_pickle(self): - backend = "dogpile.cache.memory_pickle" - arguments = {"cache_dict": {}} - self._test_arguments(backend, arguments) - - def test_redis(self): - from .test_redis_backend import REDIS_HOST - from .test_redis_backend import REDIS_PORT - - backend = "dogpile.cache.redis" - arguments = { - "host": REDIS_HOST, - "port": REDIS_PORT, - } - self._test_arguments(backend, arguments) - - def test_redis_sentinel(self): - from .test_redis_sentinel_backend import REDIS_HOST - from .test_redis_sentinel_backend import REDIS_PORT - - backend = "dogpile.cache.redis_sentinel" - arguments = { - "sentinels": [[REDIS_HOST, REDIS_PORT]], - } - self._test_arguments(backend, arguments) - - def test_redis_cluster(self): - from .test_redis_backend import REDIS_HOST - from .test_redis_backend import REDIS_PORT - from redis.cluster import ClusterNode - - backend = "dogpile.cache.redis_cluster" - arguments = { - "startup_nodes": [ - ClusterNode(REDIS_HOST, REDIS_PORT), - ], - } - self._test_arguments(backend, arguments) - - def test_valkey(self): - from .test_valkey_backend import VALKEY_HOST - from .test_valkey_backend import VALKEY_PORT - - backend = "dogpile.cache.valkey" - arguments = { - "host": VALKEY_HOST, - "port": VALKEY_PORT, - } - self._test_arguments(backend, arguments) - - def test_valkey_sentinel(self): - from .test_valkey_sentinel_backend import VALKEY_HOST - from .test_valkey_sentinel_backend import VALKEY_PORT - - backend = "dogpile.cache.valkey_sentinel" - arguments = { - "sentinels": [[VALKEY_HOST, VALKEY_PORT]], - } - self._test_arguments(backend, arguments) - - def test_valkey_cluster(self): - from .test_valkey_backend import VALKEY_HOST - from .test_valkey_backend import VALKEY_PORT - from valkey.cluster import ClusterNode - - backend = "dogpile.cache.valkey_cluster" - arguments = { - "startup_nodes": [ - ClusterNode(VALKEY_HOST, VALKEY_PORT), - ], - } - self._test_arguments(backend, arguments)