Skip to content

Commit a27b51d

Browse files
committed
feat(typing)!: make cache generic over value (aio-libs#608)
- Make BaseCache generic over both key and value types for improved type safety. - Update all cache backends and usages to use BaseCache[KeyType, ValueType]. - Improve type annotations for cache methods. - Update RedLock, OptimisticLock, and test utilities for new generics. BREAKING CHANGE: BaseCache and all backends now require two type parameters (key, value). Existing code using a single type parameter must be updated. Refs: aio-libs#608
1 parent 768763b commit a27b51d

File tree

7 files changed

+83
-37
lines changed

7 files changed

+83
-37
lines changed

aiocache/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
logger = logging.getLogger(__name__)
1010

11-
_AIOCACHE_CACHES: list[Type[BaseCache[Any]]] = [SimpleMemoryCache]
11+
_AIOCACHE_CACHES: list[Type[BaseCache[Any, Any]]] = [SimpleMemoryCache]
1212

1313
try:
1414
import redis

aiocache/backends/memcached.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
import asyncio
2-
from typing import Optional
2+
from typing import Any, Optional
33

44
import aiomcache
55

66
from aiocache.base import BaseCache
77
from aiocache.serializers import JsonSerializer
88

99

10-
class MemcachedBackend(BaseCache[bytes]):
10+
class MemcachedBackend(BaseCache[bytes, Any]):
1111
def __init__(self, host="127.0.0.1", port=11211, pool_size=2, **kwargs):
1212
super().__init__(**kwargs)
1313
self.host = host

aiocache/backends/memory.py

+8-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,16 @@
11
import asyncio
2-
from typing import Any, Dict, Optional
2+
from typing import Any, Dict, Optional, TypeVar
33

44
from aiocache.base import BaseCache
55
from aiocache.serializers import NullSerializer
66

7+
CacheKeyType = TypeVar('CacheKeyType')
8+
CacheValueType = TypeVar('CacheValueType')
79

8-
class SimpleMemoryBackend(BaseCache[str]):
10+
11+
class SimpleMemoryBackend(
12+
BaseCache[CacheKeyType, CacheValueType],
13+
):
914
"""
1015
Wrapper around dict operations to use it as a cache backend
1116
"""
@@ -110,7 +115,7 @@ def build_key(self, key: str, namespace: Optional[str] = None) -> str:
110115
return self._str_build_key(key, namespace)
111116

112117

113-
class SimpleMemoryCache(SimpleMemoryBackend):
118+
class SimpleMemoryCache(SimpleMemoryBackend[str, Any]):
114119
"""
115120
Memory cache implementation with the following components as defaults:
116121
- serializer: :class:`aiocache.serializers.NullSerializer`

aiocache/backends/redis.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from aiocache.serializers import BaseSerializer
1212

1313

14-
class RedisBackend(BaseCache[str]):
14+
class RedisBackend(BaseCache[str, Any]):
1515
RELEASE_SCRIPT = (
1616
"if redis.call('get',KEYS[1]) == ARGV[1] then"
1717
" return redis.call('del',KEYS[1])"

aiocache/base.py

+67-26
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,21 @@
66
from abc import ABC, abstractmethod
77
from enum import Enum
88
from types import TracebackType
9-
from typing import Callable, Generic, List, Optional, Set, TYPE_CHECKING, Type, TypeVar
9+
from typing import Any, Callable, Generic, List, Optional, Set, TYPE_CHECKING, Type, TypeVar
1010

1111
from aiocache.serializers import StringSerializer
1212

1313
if TYPE_CHECKING: # pragma: no cover
1414
from aiocache.plugins import BasePlugin
1515
from aiocache.serializers import BaseSerializer
1616

17+
CacheKeyType = TypeVar("CacheKeyType")
18+
CacheValueType = TypeVar("CacheValueType")
19+
1720

1821
logger = logging.getLogger(__name__)
1922

2023
SENTINEL = object()
21-
CacheKeyType = TypeVar("CacheKeyType")
2224

2325

2426
class API:
@@ -93,7 +95,7 @@ async def _plugins(self, *args, **kwargs):
9395
return _plugins
9496

9597

96-
class BaseCache(Generic[CacheKeyType], ABC):
98+
class BaseCache(Generic[CacheKeyType, CacheValueType], ABC):
9799
"""
98100
Base class that agregates the common logic for the different caches that may exist. Cache
99101
related available options are:
@@ -110,6 +112,8 @@ class BaseCache(Generic[CacheKeyType], ABC):
110112
By default its 5. Use 0 or None if you want to disable it.
111113
:param ttl: int the expiration time in seconds to use as a default in all operations of
112114
the backend. It can be overriden in the specific calls.
115+
:typeparam CacheKeyType: The type of the cache key (e.g., str, bytes).
116+
:typeparam CacheValueType: The type of the cache value (e.g., str, int, custom object).
113117
"""
114118

115119
NAME: str
@@ -152,16 +156,25 @@ def plugins(self, value):
152156
@API.aiocache_enabled(fake_return=True)
153157
@API.timeout
154158
@API.plugins
155-
async def add(self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _conn=None):
159+
async def add(
160+
self,
161+
key: CacheKeyType,
162+
value: CacheValueType,
163+
ttl=SENTINEL,
164+
dumps_fn: Optional[Callable[[CacheValueType], Any]] = None,
165+
namespace: Optional[str] = None,
166+
_conn=None,
167+
) -> bool:
156168
"""
157169
Stores the value in the given key with ttl if specified. Raises an error if the
158170
key already exists.
159171
160-
:param key: str
161-
:param value: obj
162-
:param ttl: int the expiration time in seconds. Due to memcached
163-
restrictions if you want compatibility use int. In case you
164-
need miliseconds, redis and memory support float ttls
172+
:param key: CacheKeyType
173+
:param value: CacheValueType
174+
:param ttl: int the expiration time in seconds. Due to memcached restrictions.
175+
If you want compatibility use int.
176+
In case you need milliseconds,
177+
redis and memory support float ttls
165178
:param dumps_fn: callable alternative to use as dumps function
166179
:param namespace: str alternative namespace to use
167180
:param timeout: int or float in seconds specifying maximum timeout
@@ -188,17 +201,24 @@ async def _add(self, key, value, ttl, _conn=None):
188201
@API.aiocache_enabled()
189202
@API.timeout
190203
@API.plugins
191-
async def get(self, key, default=None, loads_fn=None, namespace=None, _conn=None):
204+
async def get(
205+
self,
206+
key: CacheKeyType,
207+
default: Optional[CacheValueType] = None,
208+
loads_fn: Optional[Callable[[Any], CacheValueType]] = None,
209+
namespace: Optional[str] = None,
210+
_conn=None,
211+
) -> Optional[CacheValueType]:
192212
"""
193213
Get a value from the cache. Returns default if not found.
194214
195-
:param key: str
196-
:param default: obj to return when key is not found
215+
:param key: CacheKeyType
216+
:param default: CacheValueType to return when key is not found
197217
:param loads_fn: callable alternative to use as loads function
198218
:param namespace: str alternative namespace to use
199219
:param timeout: int or float in seconds specifying maximum timeout
200220
for the operations to last
201-
:returns: obj loaded
221+
:returns: CacheValueType loaded
202222
:raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
203223
"""
204224
start = time.monotonic()
@@ -222,16 +242,22 @@ async def _gets(self, key, encoding="utf-8", _conn=None):
222242
@API.aiocache_enabled(fake_return=[])
223243
@API.timeout
224244
@API.plugins
225-
async def multi_get(self, keys, loads_fn=None, namespace=None, _conn=None):
245+
async def multi_get(
246+
self,
247+
keys: List[CacheKeyType],
248+
loads_fn: Optional[Callable[[Any], CacheValueType]] = None,
249+
namespace: Optional[str] = None,
250+
_conn=None,
251+
) -> List[Optional[CacheValueType]]:
226252
"""
227253
Get multiple values from the cache, values not found are Nones.
228254
229-
:param keys: list of str
255+
:param keys: list of CacheKeyType
230256
:param loads_fn: callable alternative to use as loads function
231257
:param namespace: str alternative namespace to use
232258
:param timeout: int or float in seconds specifying maximum timeout
233259
for the operations to last
234-
:returns: list of objs
260+
:returns: list of CacheValueType
235261
:raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
236262
"""
237263
start = time.monotonic()
@@ -262,13 +288,20 @@ async def _multi_get(self, keys, encoding, _conn=None):
262288
@API.timeout
263289
@API.plugins
264290
async def set(
265-
self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _cas_token=None, _conn=None
266-
):
291+
self,
292+
key: CacheKeyType,
293+
value: CacheValueType,
294+
ttl=SENTINEL,
295+
dumps_fn: Optional[Callable[[CacheValueType], Any]] = None,
296+
namespace: Optional[str] = None,
297+
_cas_token=None,
298+
_conn=None,
299+
) -> bool:
267300
"""
268301
Stores the value in the given key with ttl if specified
269302
270-
:param key: str
271-
:param value: obj
303+
:param key: CacheKeyType
304+
:param value: CacheValueType
272305
:param ttl: int the expiration time in seconds. Due to memcached
273306
restrictions if you want compatibility use int. In case you
274307
need miliseconds, redis and memory support float ttls
@@ -298,14 +331,22 @@ async def _set(self, key, value, ttl, _cas_token=None, _conn=None):
298331
@API.aiocache_enabled(fake_return=True)
299332
@API.timeout
300333
@API.plugins
301-
async def multi_set(self, pairs, ttl=SENTINEL, dumps_fn=None, namespace=None, _conn=None):
334+
async def multi_set(
335+
self,
336+
pairs: List[tuple[CacheKeyType, CacheValueType]],
337+
ttl=SENTINEL,
338+
dumps_fn: Optional[Callable[[CacheValueType], Any]] = None,
339+
namespace: Optional[str] = None,
340+
_conn=None,
341+
) -> bool:
302342
"""
303343
Stores multiple values in the given keys.
304344
305-
:param pairs: list of two element iterables. First is key and second is value
306-
:param ttl: int the expiration time in seconds. Due to memcached
307-
restrictions if you want compatibility use int. In case you
308-
need miliseconds, redis and memory support float ttls
345+
:param pairs: list of two element iterables. First is CacheKeyType
346+
and second is CacheValueType
347+
:param ttl: int the expiration time in seconds. Due to memcached restrictions.
348+
If you want compatibility use int. In case you need milliseconds,
349+
redis and memory support float ttls
309350
:param dumps_fn: callable alternative to use as dumps function
310351
:param namespace: str alternative namespace to use
311352
:param timeout: int or float in seconds specifying maximum timeout
@@ -326,7 +367,7 @@ async def multi_set(self, pairs, ttl=SENTINEL, dumps_fn=None, namespace=None, _c
326367
"MULTI_SET %s %d (%.4f)s",
327368
[key for key, value in tmp_pairs],
328369
len(tmp_pairs),
329-
time.monotonic() - start,
370+
time.monotonic() - start
330371
)
331372
return True
332373

aiocache/lock.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ class RedLock(Generic[CacheKeyType]):
6262

6363
_EVENTS: Dict[str, asyncio.Event] = {}
6464

65-
def __init__(self, client: BaseCache[CacheKeyType], key: str, lease: Union[int, float]):
65+
def __init__(self, client: BaseCache[CacheKeyType, Any], key: str, lease: Union[int, float]):
6666
self.client = client
6767
self.key = self.client.build_key(key + "-lock")
6868
self.lease = lease
@@ -133,7 +133,7 @@ class OptimisticLock(Generic[CacheKeyType]):
133133
If the lock is created with an unexisting key, there will never be conflicts.
134134
"""
135135

136-
def __init__(self, client: BaseCache[CacheKeyType], key: str):
136+
def __init__(self, client: BaseCache[CacheKeyType, Any], key: str):
137137
self.client = client
138138
self.key = key
139139
self.ns_key = self.client.build_key(key)

tests/utils.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from enum import Enum
2-
from typing import Optional, Union
2+
from typing import Any, Optional, Union
33

44
from aiocache.base import BaseCache
55

@@ -19,7 +19,7 @@ def ensure_key(key: Union[str, Enum]) -> str:
1919
return key
2020

2121

22-
class AbstractBaseCache(BaseCache[str]):
22+
class AbstractBaseCache(BaseCache[str, Any]):
2323
"""BaseCache that can be mocked for NotImplementedError tests"""
2424
def __init__(self, *args, **kwargs):
2525
super().__init__(*args, **kwargs)

0 commit comments

Comments
 (0)