From 422a0f5a40139b81e1f2b9b7cf90d7fea5b4c090 Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 1 Apr 2025 06:09:57 +0330 Subject: [PATCH 01/95] update gitignore Signed-off-by: amirreza --- .gitignore | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitignore b/.gitignore index a7aba6889..3b8ec93d5 100644 --- a/.gitignore +++ b/.gitignore @@ -80,6 +80,7 @@ celerybeat-schedule .env # virtualenv +.venv/ venv/ ENV/ @@ -95,3 +96,6 @@ _release_notes tags .mypy_cache/ .pytest_cache/ + +# IDEs +.idea From 10e627a2d927d658d049594e3bac31d0c6379f72 Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 1 Apr 2025 06:27:05 +0330 Subject: [PATCH 02/95] implement a valkey client instead of the redis client Signed-off-by: amirreza --- aiocache/__init__.py | 10 +- aiocache/backends/{redis.py => valkey.py} | 142 ++++++++++++++-------- 2 files changed, 93 insertions(+), 59 deletions(-) rename aiocache/backends/{redis.py => valkey.py} (55%) diff --git a/aiocache/__init__.py b/aiocache/__init__.py index 4b5abe2fb..6e8715d00 100644 --- a/aiocache/__init__.py +++ b/aiocache/__init__.py @@ -11,14 +11,14 @@ _AIOCACHE_CACHES: list[Type[BaseCache[Any]]] = [SimpleMemoryCache] try: - import redis + import glide except ImportError: - logger.debug("redis not installed, RedisCache unavailable") + logger.debug("glide not installed, ValkeyCache unavailable") else: - from aiocache.backends.redis import RedisCache + from aiocache.backends.valkey import ValkeyCache - _AIOCACHE_CACHES.append(RedisCache) - del redis + _AIOCACHE_CACHES.append(ValkeyCache) + del glide try: import aiomcache diff --git a/aiocache/backends/redis.py b/aiocache/backends/valkey.py similarity index 55% rename from aiocache/backends/redis.py rename to aiocache/backends/valkey.py index 22f175ed9..298facea5 100644 --- a/aiocache/backends/redis.py +++ b/aiocache/backends/valkey.py @@ -1,31 +1,36 @@ -import itertools -from typing import Any, Callable, Optional, TYPE_CHECKING +import logging +import time +from typing import Any, Callable, Optional, TYPE_CHECKING, List, cast -import redis.asyncio as redis -from redis.exceptions import ResponseError as IncrbyException +from glide import GlideClient, Script, Transaction, ExpirySet, ExpiryType, ConditionalChange +from glide.exceptions import RequestError as IncrbyException +from glide.protobuf.command_request_pb2 import RequestType -from aiocache.base import BaseCache +from aiocache.base import BaseCache, API from aiocache.serializers import JsonSerializer if TYPE_CHECKING: # pragma: no cover from aiocache.serializers import BaseSerializer -class RedisBackend(BaseCache[str]): - RELEASE_SCRIPT = ( - "if redis.call('get',KEYS[1]) == ARGV[1] then" - " return redis.call('del',KEYS[1])" +logger = logging.getLogger(__name__) + + +class ValkeyBackend(BaseCache[str]): + RELEASE_SCRIPT = Script( + "if server.call('get',KEYS[1]) == ARGV[1] then" + " return server.call('del',KEYS[1])" " else" " return 0" " end" ) - CAS_SCRIPT = ( - "if redis.call('get',KEYS[1]) == ARGV[2] then" + CAS_SCRIPT = Script( + "if server.call('get',KEYS[1]) == ARGV[2] then" " if #ARGV == 4 then" - " return redis.call('set', KEYS[1], ARGV[1], ARGV[3], ARGV[4])" + " return server.call('set', KEYS[1], ARGV[1], ARGV[3], ARGV[4])" " else" - " return redis.call('set', KEYS[1], ARGV[1])" + " return server.call('set', KEYS[1], ARGV[1])" " end" " else" " return 0" @@ -34,18 +39,11 @@ class RedisBackend(BaseCache[str]): def __init__( self, - client: redis.Redis, + client: GlideClient, **kwargs, ): super().__init__(**kwargs) - # NOTE: decoding can't be controlled on API level after switching to - # redis, we need to disable decoding on global/connection level - # (decode_responses=False), because some of the values are saved as - # bytes directly, like pickle serialized values, which may raise an - # exception when decoded with 'utf-8'. - if client.connection_pool.connection_kwargs['decode_responses']: - raise ValueError("redis client must be constructed with decode_responses set to False") self.client = client async def _get(self, key, encoding="utf-8", _conn=None): @@ -58,7 +56,7 @@ async def _gets(self, key, encoding="utf-8", _conn=None): return await self._get(key, encoding=encoding, _conn=_conn) async def _multi_get(self, keys, encoding="utf-8", _conn=None): - values = await self.client.mget(*keys) + values = await self.client.mget(keys) if encoding is None: return values return [v if v is None else v.decode(encoding) for v in values] @@ -70,47 +68,55 @@ async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): return await self.client.set(key, value) if isinstance(ttl, float): ttl = int(ttl * 1000) - return await self.client.psetex(key, ttl, value) - return await self.client.setex(key, ttl, value) + return await self.client.set(key, value, expiry=ExpirySet(ExpiryType.MILLSEC, ttl)) + return await self.client.set(key, value, expiry=ExpirySet(ExpiryType.SEC, ttl)) async def _cas(self, key, value, token, ttl=None, _conn=None): args = () if ttl is not None: - args = ("PX", int(ttl * 1000)) if isinstance(ttl, float) else ("EX", ttl) - return await self._raw("eval", self.CAS_SCRIPT, 1, key, value, token, *args, _conn=_conn) + args = ("PX", str(int(ttl * 1000))) if isinstance(ttl, float) else ("EX", str(ttl)) + if isinstance(key, str): + key = [key] + return await self._script(self.CAS_SCRIPT, key, value, token, *args) async def _multi_set(self, pairs, ttl=None, _conn=None): ttl = ttl or 0 - flattened = list(itertools.chain.from_iterable((key, value) for key, value in pairs)) + values = {key: value for key, value in pairs} if ttl: - await self.__multi_set_ttl(flattened, ttl) + await self.__multi_set_ttl(values, ttl) else: - await self.client.execute_command("MSET", *flattened) + await self.client.mset(values) return True - async def __multi_set_ttl(self, flattened, ttl): - async with self.client.pipeline(transaction=True) as p: - p.execute_command("MSET", *flattened) - ttl, exp = (int(ttl * 1000), p.pexpire) if isinstance(ttl, float) else (ttl, p.expire) - for key in flattened[::2]: - exp(key, time=ttl) - await p.execute() + async def __multi_set_ttl(self, values, ttl): + transaction = Transaction() + transaction.mset(values) + ttl, exp = ( + (int(ttl * 1000), transaction.pexpire) + if isinstance(ttl, float) + else (ttl, transaction.expire) + ) + for key in values: + exp(key, ttl) + await self.client.exec(transaction) async def _add(self, key, value, ttl=None, _conn=None): - kwargs = {"nx": True} + kwargs = {"conditional_set": ConditionalChange.ONLY_IF_DOES_NOT_EXIST} if isinstance(ttl, float): - kwargs["px"] = int(ttl * 1000) - else: - kwargs["ex"] = ttl + kwargs["expiry"] = ExpirySet(ExpiryType.MILLSEC, int(ttl * 1000)) + elif ttl: + kwargs["expiry"] = ExpirySet(ExpiryType.SEC, ttl) was_set = await self.client.set(key, value, **kwargs) - if not was_set: + if was_set != "OK": raise ValueError("Key {} already exists, use .set to update the value".format(key)) return was_set async def _exists(self, key, _conn=None): + if isinstance(key, str): + key = [key] number = await self.client.exists(key) return bool(number) @@ -126,16 +132,43 @@ async def _expire(self, key, ttl, _conn=None): return await self.client.expire(key, ttl) async def _delete(self, key, _conn=None): + if isinstance(key, str): + key = [key] return await self.client.delete(key) async def _clear(self, namespace=None, _conn=None): if namespace: - keys = await self.client.keys("{}:*".format(namespace)) + cursor, keys = await self.client.scan(b"0", "{}:*".format(namespace)) if keys: - await self.client.delete(*keys) + return bool(await self.client.delete(keys)) else: - await self.client.flushdb() - return True + return await self.client.flushdb() + + @API.register + @API.aiocache_enabled() + @API.timeout + @API.plugins + async def script(self, script: Script, keys: List, *args): + """ + Send the raw scripts to the underlying client. Note that by using this CMD you + will lose compatibility with other backends. + + Due to limitations with aiomcache client, args have to be provided as bytes. + For rest of backends, str. + + :param script: glide.Script object. + :param keys: list of keys of the script + :param args: arguments of the script + :returns: whatever the underlying client returns + :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout + """ + start = time.monotonic() + ret = await self._script(script, keys, *args) + logger.debug("%s (%.4f)s", script, time.monotonic() - start) + return ret + + async def _script(self, script, keys: List, *args): + return await self.client.invoke_script(script, keys=keys, args=args) async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): value = await getattr(self.client, command)(*args, **kwargs) @@ -147,15 +180,17 @@ async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): return value async def _redlock_release(self, key, value): - return await self._raw("eval", self.RELEASE_SCRIPT, 1, key, value) + if await self.client.get(key): + return await self.client.delete([key]) + return 0 def build_key(self, key: str, namespace: Optional[str] = None) -> str: return self._str_build_key(key, namespace) -class RedisCache(RedisBackend): +class ValkeyCache(ValkeyBackend): """ - Redis cache implementation with the following components as defaults: + Valkey cache implementation with the following components as defaults: - serializer: :class:`aiocache.serializers.JsonSerializer` - plugins: [] @@ -167,14 +202,14 @@ class RedisCache(RedisBackend): the backend. Default is an empty string, "". :param timeout: int or float in seconds specifying maximum timeout for the operations to last. By default its 5. - :param client: redis.Redis which is an active client for working with redis + :param client: glide.GlideClient which is an active client for working with valkey """ - NAME = "redis" + NAME = "valkey" def __init__( self, - client: redis.Redis, + client: GlideClient, serializer: Optional["BaseSerializer"] = None, namespace: str = "", key_builder: Callable[[str, str], str] = lambda k, ns: f"{ns}:{k}" if ns else k, @@ -191,7 +226,7 @@ def __init__( @classmethod def parse_uri_path(cls, path): """ - Given a uri path, return the Redis specific configuration + Given a uri path, return the Valkey specific configuration options in that path string according to iana definition http://www.iana.org/assignments/uri-schemes/prov/redis @@ -205,5 +240,4 @@ def parse_uri_path(cls, path): return options def __repr__(self): # pragma: no cover - connection_kwargs = self.client.connection_pool.connection_kwargs - return "RedisCache ({}:{})".format(connection_kwargs['host'], connection_kwargs['port']) + return "ValkeyCache" From 35246e4dffbed12cdabed92de6ad4b4d5054addb Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 1 Apr 2025 06:28:11 +0330 Subject: [PATCH 03/95] setup valkey and valkey-glide Signed-off-by: amirreza --- .github/workflows/ci.yml | 18 +++++++++--------- Makefile | 2 +- docker-compose.yml | 4 ++-- requirements-dev.txt | 1 - requirements.txt | 2 +- setup.cfg | 3 ++- setup.py | 2 +- 7 files changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a5ff99021..959a76ff3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -51,26 +51,26 @@ jobs: matrix: os: [ubuntu] pyver: ['3.9', '3.10', '3.11', '3.12', '3.13'] - redis: ['latest'] + valkey: ['latest'] ujson: [''] include: - os: ubuntu pyver: pypy-3.9 - redis: 'latest' + valkey: 'latest' - os: ubuntu pyver: '3.9' - redis: '5.0.14' + valkey: '5.0.14' - os: ubuntu pyver: '3.9' - redis: 'latest' + valkey: 'latest' ujson: 'ujson' services: - redis: - image: redis:${{ matrix.redis }} + valkey: + image: valkey:${{ matrix.valkey }} ports: - 6379:6379 options: >- - --health-cmd "redis-cli ping" + --health-cmd "valkey-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5 @@ -104,12 +104,12 @@ jobs: - name: Run functional tests run: bash examples/run_all.sh - name: Uninstall optional backends - run: pip uninstall -y aiomcache redis + run: pip uninstall -y aiomcache valkey-glide - name: Run unittests with minimal backend set env: COLOR: 'yes' run: | - pytest --cov-report xml --cov-report html --cov-append tests/acceptance tests/ut -m "not memcached and not redis" --ignore "tests/ut/backends/test_memcached.py" --ignore "tests/ut/backends/test_redis.py" + pytest --cov-report xml --cov-report html --cov-append tests/acceptance tests/ut -m "not memcached and not valkey" --ignore "tests/ut/backends/test_memcached.py" --ignore "tests/ut/backends/test_valkey.py" - name: Produce coverage report run: python -m coverage xml - name: Upload coverage diff --git a/Makefile b/Makefile index c2f834812..2eb6b4037 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ lint: flake8 tests/ aiocache/ install-dev: - pip install -e .[redis,memcached,msgpack,dev] + pip install -e .[valkey,memcached,msgpack,dev] pylint: pylint --disable=C0111 aiocache diff --git a/docker-compose.yml b/docker-compose.yml index 9376ef8bf..b81d6634c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: '2' services: - redis: - image: redis + valkey: + image: valkey/valkey ports: - "6379:6379" memcached: diff --git a/requirements-dev.txt b/requirements-dev.txt index 1552158d6..49b54b0e4 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,6 +6,5 @@ flake8-bugbear==24.12.12 flake8-import-order==0.18.2 flake8-requirements==2.2.1 mypy==1.15.0; implementation_name=="cpython" -types-redis==4.6.0.20241004 types-ujson==5.10.0.20250326 aiocache-dynamodb==1.0.2 # used for documentation diff --git a/requirements.txt b/requirements.txt index 430092d21..542eed684 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,4 +8,4 @@ pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.1.1 pytest-mock==3.14.0 -redis==5.2.1 +valkey-glide==1.3.1 diff --git a/setup.cfg b/setup.cfg index 679fe00eb..1c76fcf1f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,6 +7,7 @@ max-line-length=100 [tool:pytest] addopts = --cov=aiocache --cov=tests/ --cov-report term --strict-markers asyncio_mode = auto +asyncio_default_fixture_loop_scope = function junit_suite_name = aiohttp_test_suite filterwarnings= error @@ -17,7 +18,7 @@ junit_family=xunit2 xfail_strict = true markers = memcached: tests requiring memcached backend - redis: tests requiring redis backend + valkey: tests requiring valkey backend [coverage:run] branch = True diff --git a/setup.py b/setup.py index 92024e003..b3c33ad8a 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ packages=("aiocache",), install_requires=None, extras_require={ - "redis": ["redis>=5"], + "valkey": ["valkey-glide>=1.3.0"], "memcached": ["aiomcache>=0.5.2"], "msgpack": ["msgpack>=0.5.5"], }, From 2279fdc28ca48bad6a18dcac8635d957e6043c27 Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 1 Apr 2025 06:28:43 +0330 Subject: [PATCH 04/95] adjust the package's tests with valkey Signed-off-by: amirreza --- tests/acceptance/conftest.py | 12 +- tests/acceptance/test_base.py | 74 +++++---- tests/acceptance/test_lock.py | 60 ++++---- tests/acceptance/test_serializers.py | 21 ++- tests/conftest.py | 21 +-- tests/performance/conftest.py | 8 +- tests/performance/server.py | 18 +-- tests/performance/test_concurrency.py | 2 +- tests/performance/test_footprint.py | 63 ++++---- tests/ut/backends/test_memcached.py | 6 +- tests/ut/backends/test_redis.py | 209 ------------------------- tests/ut/backends/test_valkey.py | 210 ++++++++++++++++++++++++++ tests/ut/conftest.py | 27 +++- 13 files changed, 392 insertions(+), 339 deletions(-) delete mode 100644 tests/ut/backends/test_redis.py create mode 100644 tests/ut/backends/test_valkey.py diff --git a/tests/acceptance/conftest.py b/tests/acceptance/conftest.py index 28224293a..4d33df86e 100644 --- a/tests/acceptance/conftest.py +++ b/tests/acceptance/conftest.py @@ -6,9 +6,10 @@ @pytest.fixture -async def redis_cache(redis_client): - from aiocache.backends.redis import RedisCache - async with RedisCache(namespace="test", client=redis_client) as cache: +async def valkey_cache(valkey_client): + from aiocache.backends.valkey import ValkeyCache + + async with ValkeyCache(namespace="test", client=valkey_client) as cache: yield cache await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) @@ -31,9 +32,10 @@ async def memcached_cache(): @pytest.fixture( params=( - pytest.param("redis_cache", marks=pytest.mark.redis), + pytest.param("valkey_cache", marks=pytest.mark.valkey), "memory_cache", pytest.param("memcached_cache", marks=pytest.mark.memcached), - )) + ) +) def cache(request): return request.getfixturevalue(request.param) diff --git a/tests/acceptance/test_base.py b/tests/acceptance/test_base.py index 1a9e6fc0c..725d55c77 100644 --- a/tests/acceptance/test_base.py +++ b/tests/acceptance/test_base.py @@ -3,6 +3,7 @@ import pytest from aiocache.backends.memory import SimpleMemoryCache +from aiocache.backends.valkey import ValkeyCache from aiocache.base import _Conn from ..utils import Keys @@ -42,7 +43,10 @@ async def test_delete_existing(self, cache): assert value is None async def test_set(self, cache): - assert await cache.set(Keys.KEY, "value") is True + if isinstance(cache, ValkeyCache): + assert await cache.set(Keys.KEY, "value") == "OK" + else: + assert await cache.set(Keys.KEY, "value") is True async def test_set_cancel_previous_ttl_handle(self, cache): await cache.set(Keys.KEY, "value", ttl=4) @@ -79,7 +83,7 @@ async def test_add_missing(self, cache): assert await cache.add(Keys.KEY, "value", ttl=1) is True async def test_add_existing(self, cache): - assert await cache.set(Keys.KEY, "value") is True + assert await cache.set(Keys.KEY, "value") == "OK" with pytest.raises(ValueError): await cache.add(Keys.KEY, "value") @@ -129,13 +133,19 @@ async def test_clear(self, cache): async def test_close_pool_only_clears_resources(self, cache): await cache.set(Keys.KEY, "value") await cache.close() - assert await cache.set(Keys.KEY, "value") is True + if isinstance(cache, ValkeyCache): + assert await cache.set(Keys.KEY, "value") == "OK" + else: + assert await cache.set(Keys.KEY, "value") is True assert await cache.get(Keys.KEY) == "value" async def test_single_connection(self, cache): async with cache.get_connection() as conn: assert isinstance(conn, _Conn) - assert await conn.set(Keys.KEY, "value") is True + if isinstance(cache, ValkeyCache): + assert await conn.set(Keys.KEY, "value") == "OK" + else: + assert await conn.set(Keys.KEY, "value") is True assert await conn.get(Keys.KEY) == "value" @@ -213,40 +223,52 @@ async def test_close(self, memcached_cache): assert memcached_cache.client._pool._pool.qsize() == 0 -@pytest.mark.redis -class TestRedisCache: +@pytest.mark.valkey +class TestValkeyCache: async def test_accept_explicit_args(self): - from aiocache.backends.redis import RedisCache + from aiocache.backends.valkey import ValkeyCache with pytest.raises(TypeError): - RedisCache(random_attr="wtf") + ValkeyCache(random_attr="wtf") - async def test_float_ttl(self, redis_cache): - await redis_cache.set(Keys.KEY, "value", ttl=0.1) + async def test_float_ttl(self, valkey_cache): + await valkey_cache.set(Keys.KEY, "value", ttl=0.1) await asyncio.sleep(0.15) - assert await redis_cache.get(Keys.KEY) is None + assert await valkey_cache.get(Keys.KEY) is None - async def test_multi_set_float_ttl(self, redis_cache): + async def test_multi_set_float_ttl(self, valkey_cache): pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] - assert await redis_cache.multi_set(pairs, ttl=0.1) is True + assert await valkey_cache.multi_set(pairs, ttl=0.1) is True await asyncio.sleep(0.15) - assert await redis_cache.multi_get([Keys.KEY, Keys.KEY_1]) == [None, None] + assert await valkey_cache.multi_get([Keys.KEY, Keys.KEY_1]) == [None, None] - async def test_raw(self, redis_cache): - await redis_cache.raw("set", "key", "value") - assert await redis_cache.raw("get", "key") == "value" - assert await redis_cache.raw("keys", "k*") == ["key"] + async def test_raw(self, valkey_cache): + await valkey_cache.raw("set", "key", "value") + assert await valkey_cache.raw("get", "key") == "value" + assert await valkey_cache.raw("scan", b"0", "k*") == ["key"] # .raw() doesn't build key with namespace prefix, clear it manually - await redis_cache.raw("delete", "key") + await valkey_cache.raw("delete", "key") + + async def test_script(self, valkey_cache): + from glide import Script + set_script = Script("return server.call('set',KEYS[1], ARGV[1])") + get_script = Script("return server.call('get',KEYS[1])") + key_script = Script("return server.call('keys',KEYS[1])") + del_script = Script("server.call('del',KEYS[1])") + await valkey_cache.script(set_script, ["key"], "value") + assert await valkey_cache.script(get_script, keys=["key"]) == b"value" + assert await valkey_cache.script(key_script, keys=["k*"]) == [b"key"] + # .raw() doesn't build key with namespace prefix, clear it manually + await valkey_cache.script(del_script, "key") - async def test_clear_with_namespace_redis(self, redis_cache): - await redis_cache.set(Keys.KEY, "value", namespace="test") - await redis_cache.clear(namespace="test") + async def test_clear_with_namespace_valkey(self, valkey_cache): + await valkey_cache.set(Keys.KEY, "value", namespace="test") + await valkey_cache.clear(namespace="test") - assert await redis_cache.exists(Keys.KEY, namespace="test") is False + assert await valkey_cache.exists(Keys.KEY, namespace="test") is False - async def test_close(self, redis_cache): - await redis_cache.set(Keys.KEY, "value") - await redis_cache._close() + async def test_close(self, valkey_cache): + await valkey_cache.set(Keys.KEY, "value") + await valkey_cache._close() diff --git a/tests/acceptance/test_lock.py b/tests/acceptance/test_lock.py index 3e5a53792..64202ae6d 100644 --- a/tests/acceptance/test_lock.py +++ b/tests/acceptance/test_lock.py @@ -21,9 +21,9 @@ def build_key_bytes(key, namespace=None): @pytest.fixture -def custom_redis_cache(mocker, redis_cache, build_key=build_key): - mocker.patch.object(redis_cache, "build_key", new=build_key) - yield redis_cache +def custom_valkey_cache(mocker, valkey_cache, build_key=build_key): + mocker.patch.object(valkey_cache, "build_key", new=build_key) + yield valkey_cache @pytest.fixture @@ -127,35 +127,35 @@ async def test_float_lease(self, memory_cache): assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None -@pytest.mark.redis -class TestRedisRedLock: +@pytest.mark.valkey +class TestValkeyRedLock: @pytest.fixture - def lock(self, redis_cache): - return RedLock(redis_cache, Keys.KEY, 20) + def lock(self, valkey_cache): + return RedLock(valkey_cache, Keys.KEY, 20) - async def test_acquire_key_builder(self, custom_redis_cache, lock): - custom_redis_cache.serializer = StringSerializer() + async def test_acquire_key_builder(self, custom_valkey_cache, lock): + custom_valkey_cache.serializer = StringSerializer() async with lock: - assert await custom_redis_cache.get(KEY_LOCK) == lock._value + assert await custom_valkey_cache.get(KEY_LOCK) == lock._value - async def test_acquire_release_key_builder(self, custom_redis_cache, lock): - custom_redis_cache.serializer = StringSerializer() + async def test_acquire_release_key_builder(self, custom_valkey_cache, lock): + custom_valkey_cache.serializer = StringSerializer() async with lock: - assert await custom_redis_cache.get(KEY_LOCK) is not None - assert await custom_redis_cache.get(KEY_LOCK) is None + assert await custom_valkey_cache.get(KEY_LOCK) is not None + assert await custom_valkey_cache.get(KEY_LOCK) is None async def test_release_wrong_token_fails(self, lock): await lock.__aenter__() lock._value = "random" assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None - async def test_release_wrong_client_fails(self, redis_cache, lock): - wrong_lock = RedLock(redis_cache, Keys.KEY, 20) + async def test_release_wrong_client_fails(self, valkey_cache, lock): + wrong_lock = RedLock(valkey_cache, Keys.KEY, 20) await lock.__aenter__() assert await wrong_lock.__aexit__("exc_type", "exc_value", "traceback") is None - async def test_float_lease(self, redis_cache): - lock = RedLock(redis_cache, Keys.KEY, 0.1) + async def test_float_lease(self, valkey_cache): + lock = RedLock(valkey_cache, Keys.KEY, 0.1) await lock.__aenter__() await asyncio.sleep(0.2) assert await lock.__aexit__("exc_type", "exc_value", "traceback") is None @@ -257,23 +257,23 @@ async def test_check_and_set_with_float_ttl(self, memory_cache, lock): assert await memory_cache.get(Keys.KEY) is None -@pytest.mark.redis -class TestRedisOptimisticLock: +@pytest.mark.valkey +class TestValkeyOptimisticLock: @pytest.fixture - def lock(self, redis_cache): - return OptimisticLock(redis_cache, Keys.KEY) + def lock(self, valkey_cache): + return OptimisticLock(valkey_cache, Keys.KEY) - async def test_acquire_key_builder(self, custom_redis_cache, lock): - custom_redis_cache.serializer = StringSerializer() - await custom_redis_cache.set(Keys.KEY, "value") + async def test_acquire_key_builder(self, custom_valkey_cache, lock): + custom_valkey_cache.serializer = StringSerializer() + await custom_valkey_cache.set(Keys.KEY, "value") async with lock: - assert await custom_redis_cache.get(KEY_LOCK) == lock._token - await custom_redis_cache.delete(Keys.KEY, "value") + assert await custom_valkey_cache.get(KEY_LOCK) == lock._token + await custom_valkey_cache.delete(Keys.KEY, "value") - async def test_check_and_set_with_float_ttl(self, redis_cache, lock): - await redis_cache.set(Keys.KEY, "previous_value") + async def test_check_and_set_with_float_ttl(self, valkey_cache, lock): + await valkey_cache.set(Keys.KEY, "previous_value") async with lock as locked: await locked.cas("value", ttl=0.1) await asyncio.sleep(1) - assert await redis_cache.get(Keys.KEY) is None + assert await valkey_cache.get(Keys.KEY) is None diff --git a/tests/acceptance/test_serializers.py b/tests/acceptance/test_serializers.py index 694f0a8b6..246ed10d1 100644 --- a/tests/acceptance/test_serializers.py +++ b/tests/acceptance/test_serializers.py @@ -10,6 +10,7 @@ except ImportError: import json # type: ignore[no-redef] +from aiocache.backends.valkey import ValkeyCache from aiocache.serializers import ( BaseSerializer, JsonSerializer, @@ -59,7 +60,10 @@ class TestNullSerializer: @pytest.mark.parametrize("obj", TYPES) async def test_set_get_types(self, memory_cache, obj): memory_cache.serializer = NullSerializer() - assert await memory_cache.set(Keys.KEY, obj) is True + if isinstance(memory_cache, ValkeyCache): + assert await memory_cache.set(Keys.KEY, obj) == "OK" + else: + assert await memory_cache.set(Keys.KEY, obj) is True assert await memory_cache.get(Keys.KEY) is obj @pytest.mark.parametrize("obj", TYPES) @@ -81,7 +85,10 @@ class TestStringSerializer: @pytest.mark.parametrize("obj", TYPES) async def test_set_get_types(self, cache, obj): cache.serializer = StringSerializer() - assert await cache.set(Keys.KEY, obj) is True + if isinstance(cache, ValkeyCache): + assert await cache.set(Keys.KEY, obj) == "OK" + else: + assert await cache.set(Keys.KEY, obj) is True assert await cache.get(Keys.KEY) == str(obj) @pytest.mark.parametrize("obj", TYPES) @@ -103,7 +110,10 @@ class TestJsonSerializer: @pytest.mark.parametrize("obj", TYPES) async def test_set_get_types(self, cache, obj): cache.serializer = JsonSerializer() - assert await cache.set(Keys.KEY, obj) is True + if isinstance(cache, ValkeyCache): + assert await cache.set(Keys.KEY, obj) == "OK" + else: + assert await cache.set(Keys.KEY, obj) is True assert await cache.get(Keys.KEY) == json.loads(json.dumps(obj)) @pytest.mark.parametrize("obj", TYPES) @@ -125,7 +135,10 @@ class TestPickleSerializer: @pytest.mark.parametrize("obj", TYPES) async def test_set_get_types(self, cache, obj): cache.serializer = PickleSerializer() - assert await cache.set(Keys.KEY, obj) is True + if isinstance(cache, ValkeyCache): + assert await cache.set(Keys.KEY, obj) == "OK" + else: + assert await cache.set(Keys.KEY, obj) is True assert await cache.get(Keys.KEY) == pickle.loads(pickle.dumps(obj)) @pytest.mark.parametrize("obj", TYPES) diff --git a/tests/conftest.py b/tests/conftest.py index 4482701d1..267a34325 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,16 +12,11 @@ def decode_responses(): @pytest.fixture -async def redis_client(max_conns, decode_responses): - import redis.asyncio as redis - - async with redis.Redis( - host="127.0.0.1", - port=6379, - db=0, - password=None, - decode_responses=decode_responses, - socket_connect_timeout=None, - max_connections=max_conns - ) as r: - yield r +async def valkey_client(max_conns, decode_responses): + from glide import GlideClient, GlideClientConfiguration, NodeAddress + + addresses = [NodeAddress("localhost", 6379)] + conf = GlideClientConfiguration(addresses=addresses, database_id=0) + client = await GlideClient.create(conf) + + yield client diff --git a/tests/performance/conftest.py b/tests/performance/conftest.py index 03066cbb3..22c41991d 100644 --- a/tests/performance/conftest.py +++ b/tests/performance/conftest.py @@ -2,11 +2,11 @@ @pytest.fixture -async def redis_cache(redis_client): - # redis connection pool raises ConnectionError but doesn't wait for conn reuse +async def valkey_cache(valkey_client): + # valkey connection pool raises ConnectionError but doesn't wait for conn reuse # when exceeding max pool size. - from aiocache.backends.redis import RedisCache - async with RedisCache(namespace="test", client=redis_client) as cache: + from aiocache.backends.valkey import ValkeyCache + async with ValkeyCache(namespace="test", client=valkey_client) as cache: yield cache diff --git a/tests/performance/server.py b/tests/performance/server.py index c5d536700..c05cbd789 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -2,7 +2,8 @@ import logging import uuid -import redis.asyncio as redis +from glide import GlideClient, GlideClientConfiguration, NodeAddress + from aiohttp import web logging.getLogger("aiohttp.access").propagate = False @@ -10,15 +11,12 @@ class CacheManager: def __init__(self, backend: str): - if backend == "redis": - from aiocache.backends.redis import RedisCache - cache = RedisCache( - client=redis.Redis( - host="127.0.0.1", - port=6379, - db=0, - password=None, - decode_responses=False, + if backend == "valkey": + from aiocache.backends.valkey import ValkeyCache + + cache = ValkeyCache( + client=GlideClient.create( + GlideClientConfiguration(addresses=[NodeAddress()], database_id=0), ) ) elif backend == "memcached": diff --git a/tests/performance/test_concurrency.py b/tests/performance/test_concurrency.py index 4d1803b20..4f40f9490 100644 --- a/tests/performance/test_concurrency.py +++ b/tests/performance/test_concurrency.py @@ -10,7 +10,7 @@ # TODO: Fix and readd "memcached" (currently fails >98% of requests) -@pytest.fixture(params=("memory", "redis")) +@pytest.fixture(params=("memory", "valkey")) def server(request): p = Process(target=run_server, args=(request.param,)) p.start() diff --git a/tests/performance/test_footprint.py b/tests/performance/test_footprint.py index 9595759f5..ca443e532 100644 --- a/tests/performance/test_footprint.py +++ b/tests/performance/test_footprint.py @@ -4,74 +4,77 @@ import aiomcache import pytest -import redis.asyncio as redis + +from glide import GlideClient, GlideClientConfiguration, NodeAddress @pytest.fixture -async def redis_client() -> AsyncIterator["redis.Redis[str]"]: - async with cast("redis.Redis[str]", - redis.Redis(host="127.0.0.1", port=6379, max_connections=1)) as r: - yield r +async def valkey_client() -> AsyncIterator["GlideClient"]: + addresses = [NodeAddress("localhost", 6379)] + conf = GlideClientConfiguration(addresses=addresses) + client = await GlideClient.create(conf) + + yield client @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Too slow") -class TestRedis: - async def test_redis_getsetdel(self, redis_client, redis_cache): +class TestValkey: + async def test_valkey_getsetdel(self, valkey_client, valkey_cache): N = 10000 - redis_total_time = 0 + valkey_total_time = 0 for _n in range(N): start = time.time() - await redis_client.set("hi", "value") - await redis_client.get("hi") - await redis_client.delete("hi") - redis_total_time += time.time() - start + await valkey_client.set("hi", "value") + await valkey_client.get("hi") + await valkey_client.delete(["hi"]) + valkey_total_time += time.time() - start aiocache_total_time = 0 for _n in range(N): start = time.time() - await redis_cache.set("hi", "value", timeout=0) - await redis_cache.get("hi", timeout=0) - await redis_cache.delete("hi", timeout=0) + await valkey_cache.set("hi", "value", timeout=0) + await valkey_cache.get("hi", timeout=0) + await valkey_cache.delete("hi", timeout=0) aiocache_total_time += time.time() - start print( "\n{:0.2f}/{:0.2f}: {:0.2f}".format( - aiocache_total_time, redis_total_time, aiocache_total_time / redis_total_time + aiocache_total_time, valkey_total_time, aiocache_total_time / valkey_total_time ) ) print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) - print("redis avg call: {:0.5f}s".format(redis_total_time / N)) - assert aiocache_total_time / redis_total_time < 1.35 + print("valkey avg call: {:0.5f}s".format(valkey_total_time / N)) + assert aiocache_total_time / valkey_total_time < 1.35 - async def test_redis_multigetsetdel(self, redis_client, redis_cache): + async def test_valkey_multigetsetdel(self, valkey_client, valkey_cache): N = 5000 - redis_total_time = 0 + valkey_total_time = 0 values = ["a", "b", "c", "d", "e", "f"] for _n in range(N): start = time.time() - await redis_client.mset({x: x for x in values}) - await redis_client.mget(values) + await valkey_client.mset({x: x for x in values}) + await valkey_client.mget(values) for k in values: - await redis_client.delete(k) - redis_total_time += time.time() - start + await valkey_client.delete([k]) + valkey_total_time += time.time() - start aiocache_total_time = 0 for _n in range(N): start = time.time() - await redis_cache.multi_set([(x, x) for x in values], timeout=0) - await redis_cache.multi_get(values, timeout=0) + await valkey_cache.multi_set([(x, x) for x in values], timeout=0) + await valkey_cache.multi_get(values, timeout=0) for k in values: - await redis_cache.delete(k, timeout=0) + await valkey_cache.delete(k, timeout=0) aiocache_total_time += time.time() - start print( "\n{:0.2f}/{:0.2f}: {:0.2f}".format( - aiocache_total_time, redis_total_time, aiocache_total_time / redis_total_time + aiocache_total_time, valkey_total_time, aiocache_total_time / valkey_total_time ) ) print("aiocache avg call: {:0.5f}s".format(aiocache_total_time / N)) - print("redis_client avg call: {:0.5f}s".format(redis_total_time / N)) - assert aiocache_total_time / redis_total_time < 1.35 + print("valkey_client avg call: {:0.5f}s".format(valkey_total_time / N)) + assert aiocache_total_time / valkey_total_time < 1.35 @pytest.fixture diff --git a/tests/ut/backends/test_memcached.py b/tests/ut/backends/test_memcached.py index f0de04cb4..77c28a3a8 100644 --- a/tests/ut/backends/test_memcached.py +++ b/tests/ut/backends/test_memcached.py @@ -249,7 +249,11 @@ def test_parse_uri_path(self): @pytest.mark.parametrize( "namespace, expected", - ([None, "test" + ensure_key(Keys.KEY)], ["", ensure_key(Keys.KEY)], ["my_ns", "my_ns" + ensure_key(Keys.KEY)]), # noqa: B950 + ( + [None, "test" + ensure_key(Keys.KEY)], + ["", ensure_key(Keys.KEY)], + ["my_ns", "my_ns" + ensure_key(Keys.KEY)], + ), # noqa: B950 ) def test_build_key_bytes(self, set_test_namespace, memcached_cache, namespace, expected): assert memcached_cache.build_key(Keys.KEY, namespace) == expected.encode() diff --git a/tests/ut/backends/test_redis.py b/tests/ut/backends/test_redis.py deleted file mode 100644 index 10e5d2de2..000000000 --- a/tests/ut/backends/test_redis.py +++ /dev/null @@ -1,209 +0,0 @@ -from unittest.mock import ANY, AsyncMock, create_autospec, patch - -import pytest -from redis.asyncio.client import Pipeline -from redis.exceptions import ResponseError - -from aiocache.backends.redis import RedisBackend, RedisCache -from aiocache.base import BaseCache -from aiocache.serializers import JsonSerializer -from ...utils import Keys, ensure_key - - -@pytest.fixture -def redis(redis_client): - redis = RedisBackend(client=redis_client) - with patch.object(redis, "client", autospec=True) as m: - # These methods actually return an awaitable. - for method in ( - "eval", "expire", "get", "psetex", "setex", "execute_command", "exists", - "incrby", "persist", "delete", "keys", "flushdb", - ): - setattr(m, method, AsyncMock(return_value=None, spec_set=())) - m.mget = AsyncMock(return_value=[None], spec_set=()) - m.set = AsyncMock(return_value=True, spec_set=()) - - m.pipeline.return_value = create_autospec(Pipeline, instance=True) - m.pipeline.return_value.__aenter__.return_value = m.pipeline.return_value - yield redis - - -class TestRedisBackend: - - @pytest.mark.parametrize("decode_responses", [True]) - async def test_redis_backend_requires_client_decode_responses(self, redis_client): - with pytest.raises(ValueError) as ve: - RedisBackend(client=redis_client) - - assert str(ve.value) == ( - "redis client must be constructed with decode_responses set to False" - ) - - async def test_get(self, redis): - redis.client.get.return_value = b"value" - assert await redis._get(Keys.KEY) == "value" - redis.client.get.assert_called_with(Keys.KEY) - - async def test_gets(self, mocker, redis): - mocker.spy(redis, "_get") - await redis._gets(Keys.KEY) - redis._get.assert_called_with(Keys.KEY, encoding="utf-8", _conn=ANY) - - async def test_set(self, redis): - await redis._set(Keys.KEY, "value") - redis.client.set.assert_called_with(Keys.KEY, "value") - - await redis._set(Keys.KEY, "value", ttl=1) - redis.client.setex.assert_called_with(Keys.KEY, 1, "value") - - async def test_set_cas_token(self, mocker, redis): - mocker.spy(redis, "_cas") - await redis._set(Keys.KEY, "value", _cas_token="old_value", _conn=redis.client) - redis._cas.assert_called_with( - Keys.KEY, "value", "old_value", ttl=None, _conn=redis.client - ) - - async def test_cas(self, mocker, redis): - mocker.spy(redis, "_raw") - await redis._cas(Keys.KEY, "value", "old_value", ttl=10, _conn=redis.client) - redis._raw.assert_called_with( - "eval", - redis.CAS_SCRIPT, - 1, - *[Keys.KEY, "value", "old_value", "EX", 10], - _conn=redis.client, - ) - - async def test_cas_float_ttl(self, mocker, redis): - mocker.spy(redis, "_raw") - await redis._cas(Keys.KEY, "value", "old_value", ttl=0.1, _conn=redis.client) - redis._raw.assert_called_with( - "eval", - redis.CAS_SCRIPT, - 1, - *[Keys.KEY, "value", "old_value", "PX", 100], - _conn=redis.client, - ) - - async def test_multi_get(self, redis): - await redis._multi_get([Keys.KEY, Keys.KEY_1]) - redis.client.mget.assert_called_with(Keys.KEY, Keys.KEY_1) - - async def test_multi_set(self, redis): - await redis._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")]) - redis.client.execute_command.assert_called_with( - "MSET", Keys.KEY, "value", Keys.KEY_1, "random" - ) - - async def test_multi_set_with_ttl(self, redis): - await redis._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")], ttl=1) - assert redis.client.pipeline.call_count == 1 - pipeline = redis.client.pipeline.return_value - pipeline.execute_command.assert_called_with( - "MSET", Keys.KEY, "value", Keys.KEY_1, "random" - ) - pipeline.expire.assert_any_call(Keys.KEY, time=1) - pipeline.expire.assert_any_call(Keys.KEY_1, time=1) - assert pipeline.execute.call_count == 1 - - async def test_add(self, redis): - await redis._add(Keys.KEY, "value") - redis.client.set.assert_called_with(Keys.KEY, "value", nx=True, ex=None) - - await redis._add(Keys.KEY, "value", 1) - redis.client.set.assert_called_with(Keys.KEY, "value", nx=True, ex=1) - - async def test_add_existing(self, redis): - redis.client.set.return_value = False - with pytest.raises(ValueError): - await redis._add(Keys.KEY, "value") - - async def test_add_float_ttl(self, redis): - await redis._add(Keys.KEY, "value", 0.1) - redis.client.set.assert_called_with(Keys.KEY, "value", nx=True, px=100) - - async def test_exists(self, redis): - redis.client.exists.return_value = 1 - await redis._exists(Keys.KEY) - redis.client.exists.assert_called_with(Keys.KEY) - - async def test_increment(self, redis): - await redis._increment(Keys.KEY, delta=2) - redis.client.incrby.assert_called_with(Keys.KEY, 2) - - async def test_increment_typerror(self, redis): - redis.client.incrby.side_effect = ResponseError("msg") - with pytest.raises(TypeError): - await redis._increment(Keys.KEY, delta=2) - redis.client.incrby.assert_called_with(Keys.KEY, 2) - - async def test_expire(self, redis): - await redis._expire(Keys.KEY, 1) - redis.client.expire.assert_called_with(Keys.KEY, 1) - await redis._increment(Keys.KEY, 2) - - async def test_expire_0_ttl(self, redis): - await redis._expire(Keys.KEY, ttl=0) - redis.client.persist.assert_called_with(Keys.KEY) - - async def test_delete(self, redis): - await redis._delete(Keys.KEY) - redis.client.delete.assert_called_with(Keys.KEY) - - async def test_clear(self, redis): - redis.client.keys.return_value = ["nm:a", "nm:b"] - await redis._clear("nm") - redis.client.delete.assert_called_with("nm:a", "nm:b") - - async def test_clear_no_keys(self, redis): - redis.client.keys.return_value = [] - await redis._clear("nm") - redis.client.delete.assert_not_called() - - async def test_clear_no_namespace(self, redis): - await redis._clear() - assert redis.client.flushdb.call_count == 1 - - async def test_raw(self, redis): - await redis._raw("get", Keys.KEY) - await redis._raw("set", Keys.KEY, 1) - redis.client.get.assert_called_with(Keys.KEY) - redis.client.set.assert_called_with(Keys.KEY, 1) - - async def test_redlock_release(self, mocker, redis): - mocker.spy(redis, "_raw") - await redis._redlock_release(Keys.KEY, "random") - redis._raw.assert_called_with("eval", redis.RELEASE_SCRIPT, 1, Keys.KEY, "random") - - -class TestRedisCache: - @pytest.fixture - def set_test_namespace(self, redis_cache): - redis_cache.namespace = "test" - yield - redis_cache.namespace = None - - def test_name(self): - assert RedisCache.NAME == "redis" - - def test_inheritance(self, redis_client): - assert isinstance(RedisCache(client=redis_client), BaseCache) - - def test_default_serializer(self, redis_client): - assert isinstance(RedisCache(client=redis_client).serializer, JsonSerializer) - - @pytest.mark.parametrize( - "path,expected", [("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})] - ) - def test_parse_uri_path(self, path, expected, redis_client): - assert RedisCache(client=redis_client).parse_uri_path(path) == expected - - @pytest.mark.parametrize( - "namespace, expected", - ([None, "test:" + ensure_key(Keys.KEY)], ["", ensure_key(Keys.KEY)], ["my_ns", "my_ns:" + ensure_key(Keys.KEY)]), # noqa: B950 - ) - def test_build_key_double_dot(self, set_test_namespace, redis_cache, namespace, expected): - assert redis_cache.build_key(Keys.KEY, namespace) == expected - - def test_build_key_no_namespace(self, redis_cache): - assert redis_cache.build_key(Keys.KEY, namespace=None) == Keys.KEY diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py new file mode 100644 index 000000000..8b60a2595 --- /dev/null +++ b/tests/ut/backends/test_valkey.py @@ -0,0 +1,210 @@ +from unittest.mock import ANY, AsyncMock, create_autospec, patch + +import pytest + +from glide import Transaction, Script +from glide.exceptions import RequestError + +from aiocache.backends.valkey import ValkeyBackend, ValkeyCache +from aiocache.base import BaseCache +from aiocache.serializers import JsonSerializer +from ...utils import Keys, ensure_key + + +@pytest.fixture +def valkey(valkey_client): + valkey = ValkeyBackend(client=valkey_client) + with patch.object(valkey, "client", autospec=True) as m: + # These methods actually return an awaitable. + for method in ( + "eval", + "expire", + "get", + "execute_command", + "exists", + "incrby", + "persist", + "delete", + "scan", + "flushdb", + ): + setattr(m, method, AsyncMock(return_value=None, spec_set=())) + m.mget = AsyncMock(return_value=[None], spec_set=()) + m.set = AsyncMock(return_value=True, spec_set=()) + + yield valkey + + +class TestValkeyBackend: + # async def test_valkey_backend_requires_client_decode_responses(self, valkey_client): + # with pytest.raises(ValueError) as ve: + # ValkeyBackend(client=valkey_client) + # + # assert str(ve.value) == ( + # "valkey client must be constructed with decode_responses set to False" + # ) + + async def test_get(self, valkey): + valkey.client.get.return_value = b"value" + assert await valkey._get(Keys.KEY) == "value" + valkey.client.get.assert_called_with(Keys.KEY) + + async def test_gets(self, mocker, valkey): + mocker.spy(valkey, "_get") + await valkey._gets(Keys.KEY) + valkey._get.assert_called_with(Keys.KEY, encoding="utf-8", _conn=ANY) + + async def test_set(self, valkey): + await valkey._set(Keys.KEY, "value") + valkey.client.set.assert_called_with(Keys.KEY, "value") + + await valkey._set(Keys.KEY, "value", ttl=1) + valkey.client.set.assert_called_once + + async def test_set_cas_token(self, mocker, valkey): + mocker.spy(valkey, "_cas") + await valkey._set(Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client) + valkey._cas.assert_called_with( + Keys.KEY, "value", "old_value", ttl=None, _conn=valkey.client + ) + + async def test_cas(self, mocker, valkey): + mocker.spy(valkey, "_script") + await valkey._cas(Keys.KEY, "value", "old_value", ttl=10, _conn=valkey.client) + valkey._script.assert_called_with( + valkey.CAS_SCRIPT, + *[[Keys.KEY], "value", "old_value", "EX", "10"], + ) + + async def test_cas_float_ttl(self, mocker, valkey): + mocker.spy(valkey, "_script") + await valkey._cas(Keys.KEY, "value", "old_value", ttl=0.1, _conn=valkey.client) + valkey._script.assert_called_with( + valkey.CAS_SCRIPT, + *[[Keys.KEY], "value", "old_value", "PX", "100"], + ) + + async def test_multi_get(self, valkey): + await valkey._multi_get([Keys.KEY, Keys.KEY_1]) + valkey.client.mget.assert_called_with(Keys.KEY, Keys.KEY_1) + + async def test_multi_set(self, valkey): + await valkey._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")]) + valkey.client.mset.assert_called_with({Keys.KEY: "value", Keys.KEY_1: "random"}) + + async def test_multi_set_with_ttl(self, valkey, mocker): + spy_mset = mocker.spy(Transaction, "mset") + spy_expire = mocker.spy(Transaction, "expire") + await valkey._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")], ttl=1) + + valkey.client.exec.assert_called() + + assert spy_mset.call_count == 1 + assert spy_expire.call_count == 2 + spy_expire.assert_any_call(valkey.client.exec.call_args.args[0], Keys.KEY, 1) + spy_expire.assert_any_call(valkey.client.exec.call_args.args[0], Keys.KEY_1, 1) + + async def test_add(self, valkey): + await valkey._add(Keys.KEY, "value") + valkey.client.set.assert_called_with(Keys.KEY, "value", nx=True, ex=None) + + await valkey._add(Keys.KEY, "value", 1) + valkey.client.set.assert_called_with(Keys.KEY, "value", nx=True, ex=1) + + async def test_add_existing(self, valkey): + valkey.client.set.return_value = False + with pytest.raises(ValueError): + await valkey._add(Keys.KEY, "value") + + async def test_add_float_ttl(self, valkey): + await valkey._add(Keys.KEY, "value", 0.1) + valkey.client.set.assert_called_with(Keys.KEY, "value", nx=True, px=100) + + async def test_exists(self, valkey): + valkey.client.exists.return_value = 1 + await valkey._exists(Keys.KEY) + valkey.client.exists.assert_called_with([Keys.KEY]) + + async def test_increment(self, valkey): + await valkey._increment(Keys.KEY, delta=2) + valkey.client.incrby.assert_called_with(Keys.KEY, 2) + + async def test_increment_typerror(self, valkey): + valkey.client.incrby.side_effect = RequestError("msg") + with pytest.raises(TypeError): + await valkey._increment(Keys.KEY, delta=2) + valkey.client.incrby.assert_called_with(Keys.KEY, 2) + + async def test_expire(self, valkey): + await valkey._expire(Keys.KEY, 1) + valkey.client.expire.assert_called_with(Keys.KEY, 1) + await valkey._increment(Keys.KEY, 2) + + async def test_expire_0_ttl(self, valkey): + await valkey._expire(Keys.KEY, ttl=0) + valkey.client.persist.assert_called_with(Keys.KEY) + + async def test_delete(self, valkey): + await valkey._delete(Keys.KEY) + valkey.client.delete.assert_called_with([Keys.KEY]) + + async def test_clear(self, valkey): + valkey.client.scan.return_value = [b"0", ["nm:a", "nm:b"]] + await valkey._clear("nm") + valkey.client.delete.assert_called_with("nm:a", "nm:b") + + async def test_clear_no_keys(self, valkey): + valkey.client.scan.return_value = [b"0", []] + await valkey._clear("nm") + valkey.client.delete.assert_not_called() + + async def test_clear_no_namespace(self, valkey): + await valkey._clear() + assert valkey.client.flushdb.call_count == 1 + + async def test_script(self, valkey): + script = Script("server.call('get', Keys[1]") + await valkey._script(script, Keys.KEY) + valkey.client.invoke_script.assert_called_with(script, Keys.KEY, ()) + + async def test_redlock_release(self, mocker, valkey): + mocker.spy(valkey, "_script") + await valkey._redlock_release(Keys.KEY, "random") + valkey._script.assert_called_with(valkey.RELEASE_SCRIPT, Keys.KEY, "random") + + +class TestValkeyCache: + @pytest.fixture + def set_test_namespace(self, valkey_cache): + valkey_cache.namespace = "test" + yield + valkey_cache.namespace = None + + def test_name(self): + assert ValkeyCache.NAME == "valkey" + + def test_inheritance(self, valkey_client): + assert isinstance(ValkeyCache(client=valkey_client), BaseCache) + + def test_default_serializer(self, valkey_client): + assert isinstance(ValkeyCache(client=valkey_client).serializer, JsonSerializer) + + @pytest.mark.parametrize( + "path,expected", [("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})] + ) + def test_parse_uri_path(self, path, expected, valkey_client): + assert ValkeyCache(client=valkey_client).parse_uri_path(path) == expected + + @pytest.mark.parametrize( + "namespace, expected", + ( + [None, "test:" + ensure_key(Keys.KEY)], + ["", ensure_key(Keys.KEY)], + ["my_ns", "my_ns:" + ensure_key(Keys.KEY)], + ), # noqa: B950 + ) + def test_build_key_double_dot(self, set_test_namespace, valkey_cache, namespace, expected): + assert valkey_cache.build_key(Keys.KEY, namespace) == expected + + def test_build_key_no_namespace(self, valkey_cache): + assert valkey_cache.build_key(Keys.KEY, namespace=None) == Keys.KEY diff --git a/tests/ut/conftest.py b/tests/ut/conftest.py index 38dae44a0..41bc640ac 100644 --- a/tests/ut/conftest.py +++ b/tests/ut/conftest.py @@ -17,9 +17,24 @@ def mock_base_cache(): """Return BaseCache instance with unimplemented methods mocked out.""" plugin = create_autospec(BasePlugin, instance=True) cache = ConcreteBaseCache(timeout=0.002, plugins=(plugin,)) - methods = ("_add", "_get", "_gets", "_set", "_multi_get", "_multi_set", "_delete", - "_exists", "_increment", "_expire", "_clear", "_raw", "_close", - "_redlock_release", "acquire_conn", "release_conn") + methods = ( + "_add", + "_get", + "_gets", + "_set", + "_multi_get", + "_multi_set", + "_delete", + "_exists", + "_increment", + "_expire", + "_clear", + "_raw", + "_close", + "_redlock_release", + "acquire_conn", + "release_conn", + ) with ExitStack() as stack: for f in methods: stack.enter_context(patch.object(cache, f, autospec=True)) @@ -40,10 +55,10 @@ def base_cache(): @pytest.fixture -async def redis_cache(redis_client): - from aiocache.backends.redis import RedisCache +async def valkey_cache(valkey_client): + from aiocache.backends.valkey import ValkeyCache - async with RedisCache(client=redis_client) as cache: + async with ValkeyCache(client=valkey_client) as cache: yield cache From 06a8e06d896979775a9b178367fe2492dc55806f Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 1 Apr 2025 06:29:07 +0330 Subject: [PATCH 05/95] adjust the package with valkey Signed-off-by: amirreza --- aiocache/base.py | 6 +++--- aiocache/decorators.py | 2 +- aiocache/lock.py | 25 +++++++++++++++++++------ 3 files changed, 23 insertions(+), 10 deletions(-) diff --git a/aiocache/base.py b/aiocache/base.py index f64edeb68..8aed65706 100644 --- a/aiocache/base.py +++ b/aiocache/base.py @@ -161,7 +161,7 @@ async def add(self, key, value, ttl=SENTINEL, dumps_fn=None, namespace=None, _co :param value: obj :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you - need miliseconds, redis and memory support float ttls + need miliseconds, valkey and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout @@ -271,7 +271,7 @@ async def set( :param value: obj :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you - need miliseconds, redis and memory support float ttls + need miliseconds, valkey and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout @@ -305,7 +305,7 @@ async def multi_set(self, pairs, ttl=SENTINEL, dumps_fn=None, namespace=None, _c :param pairs: list of two element iterables. First is key and second is value :param ttl: int the expiration time in seconds. Due to memcached restrictions if you want compatibility use int. In case you - need miliseconds, redis and memory support float ttls + need miliseconds, valkey and memory support float ttls :param dumps_fn: callable alternative to use as dumps function :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout diff --git a/aiocache/decorators.py b/aiocache/decorators.py index d2c41b24a..3322afce6 100644 --- a/aiocache/decorators.py +++ b/aiocache/decorators.py @@ -112,7 +112,7 @@ class cached_stampede(cached): :param cache: cache instance to use when calling the ``set``/``get`` operations. Default is :class:`aiocache.SimpleMemoryCache`. :param lease: int seconds to lock function call to avoid cache stampede effects. - If 0 or None, no locking happens (default is 2). redis and memory backends support + If 0 or None, no locking happens (default is 2). valkey and memory backends support float ttls :param ttl: int seconds to store the function call. Default is None which means no expiration. :param key_from_attr: str arg or kwarg name from the function to use as a key. diff --git a/aiocache/lock.py b/aiocache/lock.py index 34e2299c9..cd8a94f08 100644 --- a/aiocache/lock.py +++ b/aiocache/lock.py @@ -7,7 +7,7 @@ class RedLock(Generic[CacheKeyType]): """ - Implementation of `Redlock `_ + Implementation of `Redlock `_ with a single instance because aiocache is focused on single instance cache. @@ -27,7 +27,7 @@ class RedLock(Generic[CacheKeyType]): Backend specific implementation: - - Redis implements correctly the redlock algorithm. It sets + - Valkey implements correctly the redlock algorithm. It sets the key if it doesn't exist. To release, it checks the value is the same as the instance trying to release and if it is, it removes the lock. If not it will do nothing @@ -43,10 +43,16 @@ class RedLock(Generic[CacheKeyType]): Example usage:: - from aiocache import Cache + from aiocache import ValkeyCache from aiocache.lock import RedLock - cache = Cache(Cache.REDIS) + from glide import GlideClient, GlideClientConfiguration, NodeAddress + + addresses = [NodeAddress("localhost", 6379)] + conf = GlideClientConfiguration(addresses=addresses, database_id=0) + client = await GlideClient.create(conf) + + cache = ValkeyCache(client) async with RedLock(cache, 'key', lease=1): # Calls will wait here result = await cache.get('key') if result is not None: @@ -110,8 +116,15 @@ class OptimisticLock(Generic[CacheKeyType]): the one we retrieved when the lock started. Example usage:: + from aiocache import ValkeyCache + + from glide import GlideClient, GlideClientConfiguration, NodeAddress + + addresses = [NodeAddress("localhost", 6379)] + conf = GlideClientConfiguration(addresses=addresses, database_id=0) + client = await GlideClient.create(conf) - cache = Cache(Cache.REDIS) + cache = ValkeyCache(client) # The value stored in 'key' will be checked here async with OptimisticLock(cache, 'key') as lock: @@ -122,7 +135,7 @@ class OptimisticLock(Generic[CacheKeyType]): an :class:`aiocache.lock.OptimisticLockError` will be raised. A way to make the same call crash would be to change the value inside the lock like:: - cache = Cache(Cache.REDIS) + cache = ValkeyCache(client) # The value stored in 'key' will be checked here async with OptimisticLock(cache, 'key') as lock: From 73cf620ed413bd18dd76bda6b8b123240b02c367 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 7 Apr 2025 01:05:52 +0330 Subject: [PATCH 06/95] adjust set() to return boolean instead of string --- aiocache/backends/valkey.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 298facea5..dfd58963c 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -62,6 +62,7 @@ async def _multi_get(self, keys, encoding="utf-8", _conn=None): return [v if v is None else v.decode(encoding) for v in values] async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): + success_message = "OK" if _cas_token is not None: return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) if ttl is None: @@ -70,6 +71,9 @@ async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): ttl = int(ttl * 1000) return await self.client.set(key, value, expiry=ExpirySet(ExpiryType.MILLSEC, ttl)) return await self.client.set(key, value, expiry=ExpirySet(ExpiryType.SEC, ttl)) + return await self.client.set(key, value) == success_message + + return await self.client.set(key, value, expiry=ttl) == success_message async def _cas(self, key, value, token, ttl=None, _conn=None): args = () From 08310ab532040a796e20cb21b0bc18bd4cf6b3a6 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 7 Apr 2025 01:08:19 +0330 Subject: [PATCH 07/95] replace lua scripts with python code should be noted that the lua script for cas had a bug as well --- aiocache/backends/valkey.py | 43 ++++++++++--------------------------- 1 file changed, 11 insertions(+), 32 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index dfd58963c..514cf0f18 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -17,26 +17,6 @@ class ValkeyBackend(BaseCache[str]): - RELEASE_SCRIPT = Script( - "if server.call('get',KEYS[1]) == ARGV[1] then" - " return server.call('del',KEYS[1])" - " else" - " return 0" - " end" - ) - - CAS_SCRIPT = Script( - "if server.call('get',KEYS[1]) == ARGV[2] then" - " if #ARGV == 4 then" - " return server.call('set', KEYS[1], ARGV[1], ARGV[3], ARGV[4])" - " else" - " return server.call('set', KEYS[1], ARGV[1])" - " end" - " else" - " return 0" - " end" - ) - def __init__( self, client: GlideClient, @@ -63,25 +43,24 @@ async def _multi_get(self, keys, encoding="utf-8", _conn=None): async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): success_message = "OK" + + if isinstance(ttl, float): + ttl = ExpirySet(ExpiryType.MILLSEC, int(ttl * 1000)) + elif ttl: + ttl = ExpirySet(ExpiryType.SEC, ttl) + if _cas_token is not None: return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) + if ttl is None: - return await self.client.set(key, value) - if isinstance(ttl, float): - ttl = int(ttl * 1000) - return await self.client.set(key, value, expiry=ExpirySet(ExpiryType.MILLSEC, ttl)) - return await self.client.set(key, value, expiry=ExpirySet(ExpiryType.SEC, ttl)) return await self.client.set(key, value) == success_message return await self.client.set(key, value, expiry=ttl) == success_message async def _cas(self, key, value, token, ttl=None, _conn=None): - args = () - if ttl is not None: - args = ("PX", str(int(ttl * 1000))) if isinstance(ttl, float) else ("EX", str(ttl)) - if isinstance(key, str): - key = [key] - return await self._script(self.CAS_SCRIPT, key, value, token, *args) + if await self._get(key) == token: + return await self.client.set(key, value, expiry=ttl) == "OK" + return 0 async def _multi_set(self, pairs, ttl=None, _conn=None): ttl = ttl or 0 @@ -184,7 +163,7 @@ async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): return value async def _redlock_release(self, key, value): - if await self.client.get(key): + if await self._get(key) == value: return await self.client.delete([key]) return 0 From 21af9c67ad1d0da29a0dfde3a047d01aa4aa2a4d Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 7 Apr 2025 01:09:11 +0330 Subject: [PATCH 08/95] adjust _clear to return True if there is not key in the namespace --- aiocache/backends/valkey.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 514cf0f18..12407bcf2 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -121,12 +121,14 @@ async def _delete(self, key, _conn=None): async def _clear(self, namespace=None, _conn=None): if namespace: - cursor, keys = await self.client.scan(b"0", "{}:*".format(namespace)) + _, keys = await self.client.scan(b"0", "{}:*".format(namespace)) if keys: return bool(await self.client.delete(keys)) else: return await self.client.flushdb() + return True + @API.register @API.aiocache_enabled() @API.timeout From e3be5f94104f3ff4799970c7e63785bdf1c2dc98 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 7 Apr 2025 01:11:30 +0330 Subject: [PATCH 09/95] adjust tests' expected return values --- tests/acceptance/test_base.py | 21 ++++++--------------- tests/acceptance/test_serializers.py | 20 ++++---------------- 2 files changed, 10 insertions(+), 31 deletions(-) diff --git a/tests/acceptance/test_base.py b/tests/acceptance/test_base.py index 725d55c77..a61a02c6f 100644 --- a/tests/acceptance/test_base.py +++ b/tests/acceptance/test_base.py @@ -3,7 +3,6 @@ import pytest from aiocache.backends.memory import SimpleMemoryCache -from aiocache.backends.valkey import ValkeyCache from aiocache.base import _Conn from ..utils import Keys @@ -43,10 +42,7 @@ async def test_delete_existing(self, cache): assert value is None async def test_set(self, cache): - if isinstance(cache, ValkeyCache): - assert await cache.set(Keys.KEY, "value") == "OK" - else: - assert await cache.set(Keys.KEY, "value") is True + assert await cache.set(Keys.KEY, "value") is True async def test_set_cancel_previous_ttl_handle(self, cache): await cache.set(Keys.KEY, "value", ttl=4) @@ -83,7 +79,7 @@ async def test_add_missing(self, cache): assert await cache.add(Keys.KEY, "value", ttl=1) is True async def test_add_existing(self, cache): - assert await cache.set(Keys.KEY, "value") == "OK" + assert await cache.set(Keys.KEY, "value") is True with pytest.raises(ValueError): await cache.add(Keys.KEY, "value") @@ -133,19 +129,13 @@ async def test_clear(self, cache): async def test_close_pool_only_clears_resources(self, cache): await cache.set(Keys.KEY, "value") await cache.close() - if isinstance(cache, ValkeyCache): - assert await cache.set(Keys.KEY, "value") == "OK" - else: - assert await cache.set(Keys.KEY, "value") is True + assert await cache.set(Keys.KEY, "value") is True assert await cache.get(Keys.KEY) == "value" async def test_single_connection(self, cache): async with cache.get_connection() as conn: assert isinstance(conn, _Conn) - if isinstance(cache, ValkeyCache): - assert await conn.set(Keys.KEY, "value") == "OK" - else: - assert await conn.set(Keys.KEY, "value") is True + assert await conn.set(Keys.KEY, "value") is True assert await conn.get(Keys.KEY) == "value" @@ -247,12 +237,13 @@ async def test_multi_set_float_ttl(self, valkey_cache): async def test_raw(self, valkey_cache): await valkey_cache.raw("set", "key", "value") assert await valkey_cache.raw("get", "key") == "value" - assert await valkey_cache.raw("scan", b"0", "k*") == ["key"] + assert await valkey_cache.raw("scan", b"0", "k*") == [b"0", [b"key"]] # .raw() doesn't build key with namespace prefix, clear it manually await valkey_cache.raw("delete", "key") async def test_script(self, valkey_cache): from glide import Script + set_script = Script("return server.call('set',KEYS[1], ARGV[1])") get_script = Script("return server.call('get',KEYS[1])") key_script = Script("return server.call('keys',KEYS[1])") diff --git a/tests/acceptance/test_serializers.py b/tests/acceptance/test_serializers.py index 246ed10d1..0c6f40580 100644 --- a/tests/acceptance/test_serializers.py +++ b/tests/acceptance/test_serializers.py @@ -60,10 +60,7 @@ class TestNullSerializer: @pytest.mark.parametrize("obj", TYPES) async def test_set_get_types(self, memory_cache, obj): memory_cache.serializer = NullSerializer() - if isinstance(memory_cache, ValkeyCache): - assert await memory_cache.set(Keys.KEY, obj) == "OK" - else: - assert await memory_cache.set(Keys.KEY, obj) is True + assert await memory_cache.set(Keys.KEY, obj) is True assert await memory_cache.get(Keys.KEY) is obj @pytest.mark.parametrize("obj", TYPES) @@ -85,10 +82,7 @@ class TestStringSerializer: @pytest.mark.parametrize("obj", TYPES) async def test_set_get_types(self, cache, obj): cache.serializer = StringSerializer() - if isinstance(cache, ValkeyCache): - assert await cache.set(Keys.KEY, obj) == "OK" - else: - assert await cache.set(Keys.KEY, obj) is True + assert await cache.set(Keys.KEY, obj) is True assert await cache.get(Keys.KEY) == str(obj) @pytest.mark.parametrize("obj", TYPES) @@ -110,10 +104,7 @@ class TestJsonSerializer: @pytest.mark.parametrize("obj", TYPES) async def test_set_get_types(self, cache, obj): cache.serializer = JsonSerializer() - if isinstance(cache, ValkeyCache): - assert await cache.set(Keys.KEY, obj) == "OK" - else: - assert await cache.set(Keys.KEY, obj) is True + assert await cache.set(Keys.KEY, obj) is True assert await cache.get(Keys.KEY) == json.loads(json.dumps(obj)) @pytest.mark.parametrize("obj", TYPES) @@ -135,10 +126,7 @@ class TestPickleSerializer: @pytest.mark.parametrize("obj", TYPES) async def test_set_get_types(self, cache, obj): cache.serializer = PickleSerializer() - if isinstance(cache, ValkeyCache): - assert await cache.set(Keys.KEY, obj) == "OK" - else: - assert await cache.set(Keys.KEY, obj) is True + assert await cache.set(Keys.KEY, obj) is True assert await cache.get(Keys.KEY) == pickle.loads(pickle.dumps(obj)) @pytest.mark.parametrize("obj", TYPES) From 136af1d31f87783807962a5a2d4abd3450660b96 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 7 Apr 2025 02:03:44 +0330 Subject: [PATCH 10/95] fix valkey's image name in CI --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 959a76ff3..cde7cd764 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -59,14 +59,14 @@ jobs: valkey: 'latest' - os: ubuntu pyver: '3.9' - valkey: '5.0.14' + valkey: '7.2.8' - os: ubuntu pyver: '3.9' valkey: 'latest' ujson: 'ujson' services: valkey: - image: valkey:${{ matrix.valkey }} + image: valkey/valkey:${{ matrix.valkey }} ports: - 6379:6379 options: >- From 74ef1fad9e7f85dbd711c99eb4302f192e63aef7 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 7 Apr 2025 07:54:51 +0330 Subject: [PATCH 11/95] fix glide setup for tests --- tests/conftest.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 267a34325..66a701750 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -20,3 +20,5 @@ async def valkey_client(max_conns, decode_responses): client = await GlideClient.create(conf) yield client + + await client.close() From e9d35cc696e1d0b6214abc981de8424ef0de7543 Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 8 Apr 2025 00:10:24 +0330 Subject: [PATCH 12/95] fix unit tests --- tests/ut/backends/test_valkey.py | 94 +++++++++++++++++++------------- 1 file changed, 56 insertions(+), 38 deletions(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 8b60a2595..4358367f0 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -1,8 +1,8 @@ -from unittest.mock import ANY, AsyncMock, create_autospec, patch +from unittest.mock import ANY, AsyncMock, patch import pytest -from glide import Transaction, Script +from glide import ConditionalChange, ExpirySet, ExpiryType, Transaction, Script from glide.exceptions import RequestError from aiocache.backends.valkey import ValkeyBackend, ValkeyCache @@ -30,20 +30,12 @@ def valkey(valkey_client): ): setattr(m, method, AsyncMock(return_value=None, spec_set=())) m.mget = AsyncMock(return_value=[None], spec_set=()) - m.set = AsyncMock(return_value=True, spec_set=()) + m.set = AsyncMock(return_value="OK", spec_set=()) yield valkey class TestValkeyBackend: - # async def test_valkey_backend_requires_client_decode_responses(self, valkey_client): - # with pytest.raises(ValueError) as ve: - # ValkeyBackend(client=valkey_client) - # - # assert str(ve.value) == ( - # "valkey client must be constructed with decode_responses set to False" - # ) - async def test_get(self, valkey): valkey.client.get.return_value = b"value" assert await valkey._get(Keys.KEY) == "value" @@ -62,54 +54,68 @@ async def test_set(self, valkey): valkey.client.set.assert_called_once async def test_set_cas_token(self, mocker, valkey): - mocker.spy(valkey, "_cas") - await valkey._set(Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client) + mocker.patch.object(valkey, "_cas") + await valkey._set( + Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client + ) valkey._cas.assert_called_with( Keys.KEY, "value", "old_value", ttl=None, _conn=valkey.client ) async def test_cas(self, mocker, valkey): - mocker.spy(valkey, "_script") + mocker.spy(valkey, "_get") + mocker.spy(valkey, "_cas") await valkey._cas(Keys.KEY, "value", "old_value", ttl=10, _conn=valkey.client) - valkey._script.assert_called_with( - valkey.CAS_SCRIPT, - *[[Keys.KEY], "value", "old_value", "EX", "10"], - ) + valkey._get.assert_called_with(Keys.KEY) + assert valkey._cas.spy_return == 0 async def test_cas_float_ttl(self, mocker, valkey): - mocker.spy(valkey, "_script") + spy = mocker.spy(valkey, "_get") await valkey._cas(Keys.KEY, "value", "old_value", ttl=0.1, _conn=valkey.client) - valkey._script.assert_called_with( - valkey.CAS_SCRIPT, - *[[Keys.KEY], "value", "old_value", "PX", "100"], - ) + spy.assert_called_with(Keys.KEY) + mocker.stop(spy) + mock = mocker.patch.object(valkey, "_get", return_value="old_value") + await valkey._cas(Keys.KEY, "value", "old_value", ttl=0.1, _conn=valkey.client) + mock.assert_called_once() + valkey.client.set.assert_called_with(Keys.KEY, "value", expiry=0.1) async def test_multi_get(self, valkey): await valkey._multi_get([Keys.KEY, Keys.KEY_1]) - valkey.client.mget.assert_called_with(Keys.KEY, Keys.KEY_1) + valkey.client.mget.assert_called_with([Keys.KEY, Keys.KEY_1]) async def test_multi_set(self, valkey): await valkey._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")]) valkey.client.mset.assert_called_with({Keys.KEY: "value", Keys.KEY_1: "random"}) async def test_multi_set_with_ttl(self, valkey, mocker): - spy_mset = mocker.spy(Transaction, "mset") - spy_expire = mocker.spy(Transaction, "expire") + mock_mset = mocker.patch.object(Transaction, "mset") + mock_expire = mocker.patch.object(Transaction, "expire") await valkey._multi_set([(Keys.KEY, "value"), (Keys.KEY_1, "random")], ttl=1) valkey.client.exec.assert_called() - assert spy_mset.call_count == 1 - assert spy_expire.call_count == 2 - spy_expire.assert_any_call(valkey.client.exec.call_args.args[0], Keys.KEY, 1) - spy_expire.assert_any_call(valkey.client.exec.call_args.args[0], Keys.KEY_1, 1) + assert mock_mset.call_count == 1 + assert mock_expire.call_count == 2 + mock_expire.assert_any_call(Keys.KEY, 1) + mock_expire.assert_any_call(Keys.KEY_1, 1) async def test_add(self, valkey): await valkey._add(Keys.KEY, "value") - valkey.client.set.assert_called_with(Keys.KEY, "value", nx=True, ex=None) + valkey.client.set.assert_called_with( + Keys.KEY, "value", conditional_set=ConditionalChange.ONLY_IF_DOES_NOT_EXIST + ) await valkey._add(Keys.KEY, "value", 1) - valkey.client.set.assert_called_with(Keys.KEY, "value", nx=True, ex=1) + # TODO: change this to `assert_called_with` once ExpirySet support `__eq__` + assert valkey.client.set.call_args.args[0] == Keys.KEY + assert ( + valkey.client.set.call_args.kwargs["conditional_set"] + == ConditionalChange.ONLY_IF_DOES_NOT_EXIST + ) + assert ( + valkey.client.set.call_args.kwargs["expiry"].get_cmd_args() + == ExpirySet(ExpiryType.SEC, 1).get_cmd_args() + ) async def test_add_existing(self, valkey): valkey.client.set.return_value = False @@ -118,7 +124,15 @@ async def test_add_existing(self, valkey): async def test_add_float_ttl(self, valkey): await valkey._add(Keys.KEY, "value", 0.1) - valkey.client.set.assert_called_with(Keys.KEY, "value", nx=True, px=100) + assert valkey.client.set.call_args.args[0] == Keys.KEY + assert ( + valkey.client.set.call_args.kwargs["conditional_set"] + == ConditionalChange.ONLY_IF_DOES_NOT_EXIST + ) + assert ( + valkey.client.set.call_args.kwargs["expiry"].get_cmd_args() + == ExpirySet(ExpiryType.MILLSEC, 100).get_cmd_args() + ) async def test_exists(self, valkey): valkey.client.exists.return_value = 1 @@ -151,7 +165,7 @@ async def test_delete(self, valkey): async def test_clear(self, valkey): valkey.client.scan.return_value = [b"0", ["nm:a", "nm:b"]] await valkey._clear("nm") - valkey.client.delete.assert_called_with("nm:a", "nm:b") + valkey.client.delete.assert_called_with(["nm:a", "nm:b"]) async def test_clear_no_keys(self, valkey): valkey.client.scan.return_value = [b"0", []] @@ -168,9 +182,10 @@ async def test_script(self, valkey): valkey.client.invoke_script.assert_called_with(script, Keys.KEY, ()) async def test_redlock_release(self, mocker, valkey): - mocker.spy(valkey, "_script") + mocker.patch.object(valkey, "_get", return_value="random") await valkey._redlock_release(Keys.KEY, "random") - valkey._script.assert_called_with(valkey.RELEASE_SCRIPT, Keys.KEY, "random") + valkey._get.assert_called_once_with(Keys.KEY) + valkey.client.delete.assert_called_once_with([Keys.KEY]) class TestValkeyCache: @@ -190,7 +205,8 @@ def test_default_serializer(self, valkey_client): assert isinstance(ValkeyCache(client=valkey_client).serializer, JsonSerializer) @pytest.mark.parametrize( - "path,expected", [("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})] + "path,expected", + [("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})], ) def test_parse_uri_path(self, path, expected, valkey_client): assert ValkeyCache(client=valkey_client).parse_uri_path(path) == expected @@ -203,7 +219,9 @@ def test_parse_uri_path(self, path, expected, valkey_client): ["my_ns", "my_ns:" + ensure_key(Keys.KEY)], ), # noqa: B950 ) - def test_build_key_double_dot(self, set_test_namespace, valkey_cache, namespace, expected): + def test_build_key_double_dot( + self, set_test_namespace, valkey_cache, namespace, expected + ): assert valkey_cache.build_key(Keys.KEY, namespace) == expected def test_build_key_no_namespace(self, valkey_cache): From 504690eb4ddad6009cb0b573d9041e50d5dcbae3 Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 8 Apr 2025 00:56:21 +0330 Subject: [PATCH 13/95] adjust backend's context manager protocol with glide --- aiocache/backends/valkey.py | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 12407bcf2..4ae38e301 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -1,10 +1,17 @@ import logging import time -from typing import Any, Callable, Optional, TYPE_CHECKING, List, cast - -from glide import GlideClient, Script, Transaction, ExpirySet, ExpiryType, ConditionalChange +from typing import Any, Callable, Optional, TYPE_CHECKING, List + +from glide import ( + ConditionalChange, + ExpirySet, + ExpiryType, + GlideClient, + GlideClientConfiguration, + Script, + Transaction, +) from glide.exceptions import RequestError as IncrbyException -from glide.protobuf.command_request_pb2 import RequestType from aiocache.base import BaseCache, API from aiocache.serializers import JsonSerializer @@ -194,10 +201,12 @@ class ValkeyCache(ValkeyBackend): def __init__( self, - client: GlideClient, + client: Optional[GlideClient] = None, serializer: Optional["BaseSerializer"] = None, namespace: str = "", key_builder: Callable[[str, str], str] = lambda k, ns: f"{ns}:{k}" if ns else k, + backend: type[GlideClient] = GlideClient, + config: GlideClientConfiguration = None, **kwargs: Any, ): super().__init__( @@ -207,6 +216,17 @@ def __init__( key_builder=key_builder, **kwargs, ) + self.backend = backend + self.config = config + + async def __aenter__(self): + if not self.config: + raise AttributeError("Configuration must be provided for context manager") + self.client = await self.backend.create(config=self.config) + return self + + async def __aexit__(self, *args, **kwargs): + await self.client.close() @classmethod def parse_uri_path(cls, path): From 3e5a99dd8b94fc650170689c6ed41142ee59c3a1 Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 8 Apr 2025 00:56:51 +0330 Subject: [PATCH 14/95] adjust tests with the new context manager --- tests/acceptance/conftest.py | 6 ++++-- tests/conftest.py | 14 +++++++++++--- tests/performance/conftest.py | 6 ++++-- tests/ut/backends/test_valkey.py | 8 ++------ tests/ut/conftest.py | 4 ++-- 5 files changed, 23 insertions(+), 15 deletions(-) diff --git a/tests/acceptance/conftest.py b/tests/acceptance/conftest.py index 4d33df86e..ce3192b49 100644 --- a/tests/acceptance/conftest.py +++ b/tests/acceptance/conftest.py @@ -6,10 +6,10 @@ @pytest.fixture -async def valkey_cache(valkey_client): +async def valkey_cache(valkey_config): from aiocache.backends.valkey import ValkeyCache - async with ValkeyCache(namespace="test", client=valkey_client) as cache: + async with ValkeyCache(namespace="test", config=valkey_config) as cache: yield cache await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) @@ -17,6 +17,7 @@ async def valkey_cache(valkey_client): @pytest.fixture async def memory_cache(): from aiocache.backends.memory import SimpleMemoryCache + async with SimpleMemoryCache(namespace="test") as cache: yield cache await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) @@ -25,6 +26,7 @@ async def memory_cache(): @pytest.fixture async def memcached_cache(): from aiocache.backends.memcached import MemcachedCache + async with MemcachedCache(namespace="test") as cache: yield cache await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) diff --git a/tests/conftest.py b/tests/conftest.py index 66a701750..c62a19fff 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,12 +12,20 @@ def decode_responses(): @pytest.fixture -async def valkey_client(max_conns, decode_responses): - from glide import GlideClient, GlideClientConfiguration, NodeAddress +def valkey_config(): + from glide import GlideClientConfiguration, NodeAddress addresses = [NodeAddress("localhost", 6379)] conf = GlideClientConfiguration(addresses=addresses, database_id=0) - client = await GlideClient.create(conf) + + yield conf + + +@pytest.fixture +async def valkey_client(max_conns, decode_responses, valkey_config): + from glide import GlideClient + + client = await GlideClient.create(valkey_config) yield client diff --git a/tests/performance/conftest.py b/tests/performance/conftest.py index 22c41991d..02a44a37b 100644 --- a/tests/performance/conftest.py +++ b/tests/performance/conftest.py @@ -2,16 +2,18 @@ @pytest.fixture -async def valkey_cache(valkey_client): +async def valkey_cache(valkey_config): # valkey connection pool raises ConnectionError but doesn't wait for conn reuse # when exceeding max pool size. from aiocache.backends.valkey import ValkeyCache - async with ValkeyCache(namespace="test", client=valkey_client) as cache: + + async with ValkeyCache(namespace="test", config=valkey_config) as cache: yield cache @pytest.fixture async def memcached_cache(): from aiocache.backends.memcached import MemcachedCache + async with MemcachedCache(namespace="test", pool_size=1) as cache: yield cache diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 4358367f0..0f77fbc36 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -55,9 +55,7 @@ async def test_set(self, valkey): async def test_set_cas_token(self, mocker, valkey): mocker.patch.object(valkey, "_cas") - await valkey._set( - Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client - ) + await valkey._set(Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client) valkey._cas.assert_called_with( Keys.KEY, "value", "old_value", ttl=None, _conn=valkey.client ) @@ -219,9 +217,7 @@ def test_parse_uri_path(self, path, expected, valkey_client): ["my_ns", "my_ns:" + ensure_key(Keys.KEY)], ), # noqa: B950 ) - def test_build_key_double_dot( - self, set_test_namespace, valkey_cache, namespace, expected - ): + def test_build_key_double_dot(self, set_test_namespace, valkey_cache, namespace, expected): assert valkey_cache.build_key(Keys.KEY, namespace) == expected def test_build_key_no_namespace(self, valkey_cache): diff --git a/tests/ut/conftest.py b/tests/ut/conftest.py index 41bc640ac..1d1fbf0c4 100644 --- a/tests/ut/conftest.py +++ b/tests/ut/conftest.py @@ -55,10 +55,10 @@ def base_cache(): @pytest.fixture -async def valkey_cache(valkey_client): +async def valkey_cache(valkey_config): from aiocache.backends.valkey import ValkeyCache - async with ValkeyCache(client=valkey_client) as cache: + async with ValkeyCache(config=valkey_config) as cache: yield cache From 540304124fe995ee2ff84191fb4c5caf6e4ff9d0 Mon Sep 17 00:00:00 2001 From: amirreza Date: Sun, 13 Apr 2025 07:24:14 +0330 Subject: [PATCH 15/95] add server information to repr --- aiocache/backends/valkey.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 4ae38e301..b2ce37275 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -245,4 +245,4 @@ def parse_uri_path(cls, path): return options def __repr__(self): # pragma: no cover - return "ValkeyCache" + return f"ValkeyCache ({self.client.config.addresses[0].host}:{self.client.config.addresses[0].port})" From c69f6328f282ac21fa5f0a1d23d28841c303b40c Mon Sep 17 00:00:00 2001 From: amirreza Date: Sun, 13 Apr 2025 07:24:35 +0330 Subject: [PATCH 16/95] cleanup --- .gitignore | 3 --- aiocache/backends/valkey.py | 4 +++- setup.cfg | 1 - tests/acceptance/test_serializers.py | 1 - tests/performance/test_footprint.py | 2 +- 5 files changed, 4 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index 3b8ec93d5..c548ea880 100644 --- a/.gitignore +++ b/.gitignore @@ -96,6 +96,3 @@ _release_notes tags .mypy_cache/ .pytest_cache/ - -# IDEs -.idea diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index b2ce37275..f3b23216a 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -101,7 +101,9 @@ async def _add(self, key, value, ttl=None, _conn=None): kwargs["expiry"] = ExpirySet(ExpiryType.SEC, ttl) was_set = await self.client.set(key, value, **kwargs) if was_set != "OK": - raise ValueError("Key {} already exists, use .set to update the value".format(key)) + raise ValueError( + "Key {} already exists, use .set to update the value".format(key) + ) return was_set async def _exists(self, key, _conn=None): diff --git a/setup.cfg b/setup.cfg index 1c76fcf1f..0c44b7801 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,7 +7,6 @@ max-line-length=100 [tool:pytest] addopts = --cov=aiocache --cov=tests/ --cov-report term --strict-markers asyncio_mode = auto -asyncio_default_fixture_loop_scope = function junit_suite_name = aiohttp_test_suite filterwarnings= error diff --git a/tests/acceptance/test_serializers.py b/tests/acceptance/test_serializers.py index 0c6f40580..694f0a8b6 100644 --- a/tests/acceptance/test_serializers.py +++ b/tests/acceptance/test_serializers.py @@ -10,7 +10,6 @@ except ImportError: import json # type: ignore[no-redef] -from aiocache.backends.valkey import ValkeyCache from aiocache.serializers import ( BaseSerializer, JsonSerializer, diff --git a/tests/performance/test_footprint.py b/tests/performance/test_footprint.py index ca443e532..1e4bd8efa 100644 --- a/tests/performance/test_footprint.py +++ b/tests/performance/test_footprint.py @@ -1,6 +1,6 @@ import platform import time -from typing import AsyncIterator, cast +from typing import AsyncIterator import aiomcache import pytest From 1037de792bbfe98c6f101a67881494d01b08568b Mon Sep 17 00:00:00 2001 From: amirreza Date: Sun, 13 Apr 2025 07:55:04 +0330 Subject: [PATCH 17/95] refactor cache instantiation only context manager is supported to instantiate ValkeyBackend --- aiocache/backends/valkey.py | 39 ++++++++++++++------------------ tests/conftest.py | 11 --------- tests/ut/backends/test_valkey.py | 26 +++++++++++++-------- 3 files changed, 33 insertions(+), 43 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index f3b23216a..bf704818c 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -1,6 +1,6 @@ import logging import time -from typing import Any, Callable, Optional, TYPE_CHECKING, List +from typing import Any, Callable, List, Optional, Self, TYPE_CHECKING from glide import ( ConditionalChange, @@ -24,14 +24,24 @@ class ValkeyBackend(BaseCache[str]): - def __init__( - self, - client: GlideClient, - **kwargs, - ): + def __init__(self, config: GlideClientConfiguration = None, **kwargs): + self.config = config super().__init__(**kwargs) - self.client = client + async def __aenter__(self) -> Self: + if not self.config: + raise AttributeError("Configuration must be provided for context manager") + self.client = await self._connect(self.config) + return self + + async def __aexit__(self, *args, **kwargs) -> None: + await self._disconnect() + + async def _connect(self, config: GlideClientConfiguration) -> GlideClient: + return await GlideClient.create(config=config) + + async def _disconnect(self) -> None: + await self.client.close() async def _get(self, key, encoding="utf-8", _conn=None): value = await self.client.get(key) @@ -203,32 +213,17 @@ class ValkeyCache(ValkeyBackend): def __init__( self, - client: Optional[GlideClient] = None, serializer: Optional["BaseSerializer"] = None, namespace: str = "", key_builder: Callable[[str, str], str] = lambda k, ns: f"{ns}:{k}" if ns else k, - backend: type[GlideClient] = GlideClient, - config: GlideClientConfiguration = None, **kwargs: Any, ): super().__init__( - client=client, serializer=serializer or JsonSerializer(), namespace=namespace, key_builder=key_builder, **kwargs, ) - self.backend = backend - self.config = config - - async def __aenter__(self): - if not self.config: - raise AttributeError("Configuration must be provided for context manager") - self.client = await self.backend.create(config=self.config) - return self - - async def __aexit__(self, *args, **kwargs): - await self.client.close() @classmethod def parse_uri_path(cls, path): diff --git a/tests/conftest.py b/tests/conftest.py index c62a19fff..e9c6c81c3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,14 +19,3 @@ def valkey_config(): conf = GlideClientConfiguration(addresses=addresses, database_id=0) yield conf - - -@pytest.fixture -async def valkey_client(max_conns, decode_responses, valkey_config): - from glide import GlideClient - - client = await GlideClient.create(valkey_config) - - yield client - - await client.close() diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 0f77fbc36..040c9fc19 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -12,8 +12,8 @@ @pytest.fixture -def valkey(valkey_client): - valkey = ValkeyBackend(client=valkey_client) +async def valkey(valkey_config): + valkey = await ValkeyBackend(config=valkey_config).__aenter__() with patch.object(valkey, "client", autospec=True) as m: # These methods actually return an awaitable. for method in ( @@ -34,6 +34,8 @@ def valkey(valkey_client): yield valkey + await valkey.__aexit__() + class TestValkeyBackend: async def test_get(self, valkey): @@ -55,7 +57,9 @@ async def test_set(self, valkey): async def test_set_cas_token(self, mocker, valkey): mocker.patch.object(valkey, "_cas") - await valkey._set(Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client) + await valkey._set( + Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client + ) valkey._cas.assert_called_with( Keys.KEY, "value", "old_value", ttl=None, _conn=valkey.client ) @@ -196,18 +200,18 @@ def set_test_namespace(self, valkey_cache): def test_name(self): assert ValkeyCache.NAME == "valkey" - def test_inheritance(self, valkey_client): - assert isinstance(ValkeyCache(client=valkey_client), BaseCache) + def test_inheritance(self, valkey_config): + assert isinstance(ValkeyCache(config=valkey_config), BaseCache) - def test_default_serializer(self, valkey_client): - assert isinstance(ValkeyCache(client=valkey_client).serializer, JsonSerializer) + def test_default_serializer(self, valkey_config): + assert isinstance(ValkeyCache(config=valkey_config).serializer, JsonSerializer) @pytest.mark.parametrize( "path,expected", [("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})], ) - def test_parse_uri_path(self, path, expected, valkey_client): - assert ValkeyCache(client=valkey_client).parse_uri_path(path) == expected + def test_parse_uri_path(self, path, expected, valkey_config): + assert ValkeyCache(config=valkey_config).parse_uri_path(path) == expected @pytest.mark.parametrize( "namespace, expected", @@ -217,7 +221,9 @@ def test_parse_uri_path(self, path, expected, valkey_client): ["my_ns", "my_ns:" + ensure_key(Keys.KEY)], ), # noqa: B950 ) - def test_build_key_double_dot(self, set_test_namespace, valkey_cache, namespace, expected): + def test_build_key_double_dot( + self, set_test_namespace, valkey_cache, namespace, expected + ): assert valkey_cache.build_key(Keys.KEY, namespace) == expected def test_build_key_no_namespace(self, valkey_cache): From d67a25454b7c1b28e812d101f96237a6ec6b993e Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 13 Apr 2025 12:26:38 +0100 Subject: [PATCH 18/95] Fix import --- aiocache/backends/valkey.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index bf704818c..a99f18c5f 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -1,6 +1,6 @@ import logging import time -from typing import Any, Callable, List, Optional, Self, TYPE_CHECKING +from typing import Any, Callable, List, Optional, TYPE_CHECKING from glide import ( ConditionalChange, @@ -19,6 +19,11 @@ if TYPE_CHECKING: # pragma: no cover from aiocache.serializers import BaseSerializer +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing import Any as Self + logger = logging.getLogger(__name__) From 6fb2dc4766e9a6e67c0ffa0d40088350e8deda32 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 13 Apr 2025 12:28:24 +0100 Subject: [PATCH 19/95] Update valkey.py --- aiocache/backends/valkey.py | 1 + 1 file changed, 1 insertion(+) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index a99f18c5f..f016f7840 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -1,5 +1,6 @@ import logging import time +import sys from typing import Any, Callable, List, Optional, TYPE_CHECKING from glide import ( From b85333a5adbbd33e38ff683c4d764698dd67122c Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 6 May 2025 08:31:01 +0330 Subject: [PATCH 20/95] update glide version --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 542eed684..8a7a8516a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,4 +8,4 @@ pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.1.1 pytest-mock==3.14.0 -valkey-glide==1.3.1 +valkey-glide==1.3.3 diff --git a/setup.py b/setup.py index b3c33ad8a..15ec1e222 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ packages=("aiocache",), install_requires=None, extras_require={ - "valkey": ["valkey-glide>=1.3.0"], + "valkey": ["valkey-glide>=1.3.3"], "memcached": ["aiomcache>=0.5.2"], "msgpack": ["msgpack>=0.5.5"], }, From 3edfe803390954e43b07701e468a951d462e3bd5 Mon Sep 17 00:00:00 2001 From: amirreza Date: Wed, 7 May 2025 20:15:57 +0330 Subject: [PATCH 21/95] formatter --- aiocache/backends/valkey.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index f016f7840..65163fa3e 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -117,9 +117,7 @@ async def _add(self, key, value, ttl=None, _conn=None): kwargs["expiry"] = ExpirySet(ExpiryType.SEC, ttl) was_set = await self.client.set(key, value, **kwargs) if was_set != "OK": - raise ValueError( - "Key {} already exists, use .set to update the value".format(key) - ) + raise ValueError("Key {} already exists, use .set to update the value".format(key)) return was_set async def _exists(self, key, _conn=None): From 0227ae183a0f60d9d14aa707a2a890bf7a8d0a1c Mon Sep 17 00:00:00 2001 From: amirreza Date: Wed, 7 May 2025 20:17:22 +0330 Subject: [PATCH 22/95] update tests to use the newly __eq__ implemented in `ExpirySet` --- tests/ut/backends/test_valkey.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 040c9fc19..514d825f4 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -53,7 +53,9 @@ async def test_set(self, valkey): valkey.client.set.assert_called_with(Keys.KEY, "value") await valkey._set(Keys.KEY, "value", ttl=1) - valkey.client.set.assert_called_once + valkey.client.set.assert_called_with( + Keys.KEY, "value", expiry=ExpirySet(ExpiryType.SEC, 1) + ) async def test_set_cas_token(self, mocker, valkey): mocker.patch.object(valkey, "_cas") @@ -108,15 +110,11 @@ async def test_add(self, valkey): ) await valkey._add(Keys.KEY, "value", 1) - # TODO: change this to `assert_called_with` once ExpirySet support `__eq__` - assert valkey.client.set.call_args.args[0] == Keys.KEY - assert ( - valkey.client.set.call_args.kwargs["conditional_set"] - == ConditionalChange.ONLY_IF_DOES_NOT_EXIST - ) - assert ( - valkey.client.set.call_args.kwargs["expiry"].get_cmd_args() - == ExpirySet(ExpiryType.SEC, 1).get_cmd_args() + valkey.client.set.assert_called_with( + Keys.KEY, + "value", + conditional_set=ConditionalChange.ONLY_IF_DOES_NOT_EXIST, + expiry=ExpirySet(ExpiryType.SEC, 1), ) async def test_add_existing(self, valkey): From 5f7473660fd14c609f22ef555eb1ce4692906d05 Mon Sep 17 00:00:00 2001 From: amirreza Date: Wed, 7 May 2025 20:18:07 +0330 Subject: [PATCH 23/95] add tests to call _set with cas token with ttl --- tests/ut/backends/test_valkey.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 514d825f4..4b2ea30a6 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -59,13 +59,29 @@ async def test_set(self, valkey): async def test_set_cas_token(self, mocker, valkey): mocker.patch.object(valkey, "_cas") - await valkey._set( - Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client - ) + await valkey._set(Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client) valkey._cas.assert_called_with( Keys.KEY, "value", "old_value", ttl=None, _conn=valkey.client ) + async def test_set_cas_token_ttl(self, mocker, valkey): + mocker.patch.object(valkey, "_cas") + await valkey._set(Keys.KEY, "value", ttl=1, _cas_token="old_value", _conn=valkey.client) + valkey._cas.assert_called_with( + Keys.KEY, "value", "old_value", ttl=ExpirySet(ExpiryType.SEC, 1), _conn=valkey.client + ) + + async def test_set_cas_token_float_ttl(self, mocker, valkey): + mocker.patch.object(valkey, "_cas") + await valkey._set(Keys.KEY, "value", ttl=1.1, _cas_token="old_value", _conn=valkey.client) + valkey._cas.assert_called_with( + Keys.KEY, + "value", + "old_value", + ttl=ExpirySet(ExpiryType.MILLSEC, 1100), + _conn=valkey.client, + ) + async def test_cas(self, mocker, valkey): mocker.spy(valkey, "_get") mocker.spy(valkey, "_cas") @@ -219,9 +235,7 @@ def test_parse_uri_path(self, path, expected, valkey_config): ["my_ns", "my_ns:" + ensure_key(Keys.KEY)], ), # noqa: B950 ) - def test_build_key_double_dot( - self, set_test_namespace, valkey_cache, namespace, expected - ): + def test_build_key_double_dot(self, set_test_namespace, valkey_cache, namespace, expected): assert valkey_cache.build_key(Keys.KEY, namespace) == expected def test_build_key_no_namespace(self, valkey_cache): From 21edbf82161286f01d3564031fb89b326f0c7877 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 12 May 2025 16:36:23 +0100 Subject: [PATCH 24/95] Update requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8a7a8516a..c65a6e12a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,4 +8,4 @@ pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.1.1 pytest-mock==3.14.0 -valkey-glide==1.3.3 +valkey-glide==1.3.5 From 0fcda4fcb18c51c2347a24731301cca7a13e3c9d Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 13 May 2025 19:10:13 +0330 Subject: [PATCH 25/95] adjust examples --- examples/multicached_decorator.py | 33 ++++++++++++-------------- examples/optimistic_lock.py | 39 +++++++++++++++++-------------- examples/python_object.py | 22 ++++++++++------- examples/redlock.py | 35 ++++++++++++++------------- examples/serializer_class.py | 31 ++++++++++++++---------- examples/serializer_function.py | 30 +++++++++++++----------- examples/simple_redis.py | 28 ---------------------- examples/simple_valkey.py | 30 ++++++++++++++++++++++++ 8 files changed, 135 insertions(+), 113 deletions(-) delete mode 100644 examples/simple_redis.py create mode 100644 examples/simple_valkey.py diff --git a/examples/multicached_decorator.py b/examples/multicached_decorator.py index 5fd834948..f9c79144e 100644 --- a/examples/multicached_decorator.py +++ b/examples/multicached_decorator.py @@ -1,18 +1,15 @@ import asyncio -import redis.asyncio as redis +from glide import GlideClientConfiguration, NodeAddress from aiocache import multi_cached -from aiocache import RedisCache +from aiocache import ValkeyCache -DICT = { - 'a': "Z", - 'b': "Y", - 'c': "X", - 'd': "W" -} +DICT = {"a": "Z", "b": "Y", "c": "X", "d": "W"} -cache = RedisCache(namespace="main", client=redis.Redis()) +addresses = [NodeAddress("localhost", 6379)] +config = GlideClientConfiguration(addresses=addresses, database_id=0) +cache = ValkeyCache(config=config, namespace="main") @multi_cached(cache, keys_from_attr="ids") @@ -30,16 +27,16 @@ async def test_multi_cached(): await multi_cached_ids(ids=("a", "c")) await multi_cached_keys(keys=("d",)) - assert await cache.exists("a") - assert await cache.exists("b") - assert await cache.exists("c") - assert await cache.exists("d") + async with ValkeyCache(config=config, namespace="main") as cache: + assert await cache.exists("a") + assert await cache.exists("b") + assert await cache.exists("c") + assert await cache.exists("d") - await cache.delete("a") - await cache.delete("b") - await cache.delete("c") - await cache.delete("d") - await cache.close() + await cache.delete("a") + await cache.delete("b") + await cache.delete("c") + await cache.delete("d") if __name__ == "__main__": diff --git a/examples/optimistic_lock.py b/examples/optimistic_lock.py index 8b62f9176..b8545657e 100644 --- a/examples/optimistic_lock.py +++ b/examples/optimistic_lock.py @@ -2,34 +2,39 @@ import logging import random -import redis.asyncio as redis -from aiocache import RedisCache +from glide import GlideClientConfiguration, NodeAddress + +from aiocache import ValkeyCache from aiocache.lock import OptimisticLock, OptimisticLockError logger = logging.getLogger(__name__) -cache = RedisCache(namespace="main", client=redis.Redis()) +addresses = [NodeAddress("localhost", 6379)] +config = GlideClientConfiguration(addresses=addresses, database_id=0) async def expensive_function(): - logger.warning('Expensive is being executed...') + logger.warning("Expensive is being executed...") await asyncio.sleep(random.uniform(0, 2)) - return 'result' + return "result" async def my_view(): - async with OptimisticLock(cache, 'key') as lock: - result = await expensive_function() - try: - await lock.cas(result) - except OptimisticLockError: - logger.warning( - 'I failed setting the value because it is different since the lock started!') - return result + async with ValkeyCache(config=config, namespace="main") as cache: + async with OptimisticLock(cache, "key") as lock: + result = await expensive_function() + try: + await lock.cas(result) + except OptimisticLockError: + logger.warning( + "I failed setting the value because it is different since the lock started!" + ) + return result async def concurrent(): - await cache.set('key', 'initial_value') + async with ValkeyCache(config=config, namespace="main") as cache: + await cache.set("key", "initial_value") # All three calls will read 'initial_value' as the value to check and only # the first one finishing will succeed because the others, when trying to set # the value, will see that the value is not the same as when the lock started @@ -38,9 +43,9 @@ async def concurrent(): async def test_redis(): await concurrent() - await cache.delete("key") - await cache.close() + async with ValkeyCache(config=config, namespace="main") as cache: + await cache.delete("key") -if __name__ == '__main__': +if __name__ == "__main__": asyncio.run(test_redis()) diff --git a/examples/python_object.py b/examples/python_object.py index 881b69c48..5cb83fa77 100644 --- a/examples/python_object.py +++ b/examples/python_object.py @@ -1,20 +1,23 @@ import asyncio - from collections import namedtuple -import redis.asyncio as redis +from glide import GlideClientConfiguration, NodeAddress -from aiocache import RedisCache +from aiocache import ValkeyCache from aiocache.serializers import PickleSerializer MyObject = namedtuple("MyObject", ["x", "y"]) -cache = RedisCache(serializer=PickleSerializer(), namespace="main", client=redis.Redis()) +addresses = [NodeAddress("localhost", 6379)] +config = GlideClientConfiguration(addresses=addresses, database_id=0) async def complex_object(): obj = MyObject(x=1, y=2) - await cache.set("key", obj) - my_object = await cache.get("key") + async with ValkeyCache( + config=config, namespace="main", serializer=PickleSerializer() + ) as cache: + await cache.set("key", obj) + my_object = await cache.get("key") assert my_object.x == 1 assert my_object.y == 2 @@ -22,8 +25,11 @@ async def complex_object(): async def test_python_object(): await complex_object() - await cache.delete("key") - await cache.close() + async with ValkeyCache( + config=config, namespace="main", serializer=PickleSerializer() + ) as cache: + await cache.delete("key") + await cache.close() if __name__ == "__main__": diff --git a/examples/redlock.py b/examples/redlock.py index 51cbbd73d..6577a6a36 100644 --- a/examples/redlock.py +++ b/examples/redlock.py @@ -1,32 +1,34 @@ import asyncio import logging -import redis.asyncio as redis +from glide import GlideClientConfiguration, NodeAddress -from aiocache import RedisCache +from aiocache import ValkeyCache from aiocache.lock import RedLock logger = logging.getLogger(__name__) -cache = RedisCache(namespace="main", client=redis.Redis()) +addresses = [NodeAddress("localhost", 6379)] +config = GlideClientConfiguration(addresses=addresses, database_id=0) async def expensive_function(): - logger.warning('Expensive is being executed...') + logger.warning("Expensive is being executed...") await asyncio.sleep(1) - return 'result' + return "result" async def my_view(): - async with RedLock(cache, 'key', lease=2): # Wait at most 2 seconds - result = await cache.get('key') - if result is not None: - logger.info('Found the value in the cache hurray!') - return result + async with ValkeyCache(config=config, namespace="main") as cache: + async with RedLock(cache, "key", lease=2): # Wait at most 2 seconds + result = await cache.get("key") + if result is not None: + logger.info("Found the value in the cache hurray!") + return result - result = await expensive_function() - await cache.set('key', result) - return result + result = await expensive_function() + await cache.set("key", result) + return result async def concurrent(): @@ -35,9 +37,10 @@ async def concurrent(): async def test_redis(): await concurrent() - await cache.delete("key") - await cache.close() + async with ValkeyCache(config=config, namespace="main") as cache: + await cache.delete("key") + await cache.close() -if __name__ == '__main__': +if __name__ == "__main__": asyncio.run(test_redis()) diff --git a/examples/serializer_class.py b/examples/serializer_class.py index 2c25ff60b..75ae3dc6d 100644 --- a/examples/serializer_class.py +++ b/examples/serializer_class.py @@ -1,11 +1,14 @@ import asyncio import zlib -import redis.asyncio as redis +from glide import GlideClientConfiguration, NodeAddress -from aiocache import RedisCache +from aiocache import ValkeyCache from aiocache.serializers import BaseSerializer +addresses = [NodeAddress("localhost", 6379)] +config = GlideClientConfiguration(addresses=addresses, database_id=0) + class CompressionSerializer(BaseSerializer): @@ -27,9 +30,6 @@ def loads(self, value): return decompressed -cache = RedisCache(serializer=CompressionSerializer(), namespace="main", client=redis.Redis()) - - async def serializer(): text = ( "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt" @@ -37,18 +37,25 @@ async def serializer(): "ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in" "reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur" "sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit" - "anim id est laborum.") - await cache.set("key", text) - print("-----------------------------------") - real_value = await cache.get("key") - compressed_value = await cache.raw("get", "main:key") + "anim id est laborum." + ) + async with ValkeyCache( + config=config, namespace="main", serializer=CompressionSerializer() + ) as cache: + await cache.set("key", text) + print("-----------------------------------") + real_value = await cache.get("key") + compressed_value = await cache.raw("get", "main:key") assert len(compressed_value) < len(real_value.encode()) async def test_serializer(): await serializer() - await cache.delete("key") - await cache.close() + async with ValkeyCache( + config=config, namespace="main", serializer=CompressionSerializer() + ) as cache: + await cache.delete("key") + await cache.close() if __name__ == "__main__": diff --git a/examples/serializer_function.py b/examples/serializer_function.py index d85b3eb9e..64801e5b2 100644 --- a/examples/serializer_function.py +++ b/examples/serializer_function.py @@ -1,11 +1,15 @@ import asyncio import json -import redis.asyncio as redis +from glide import GlideClientConfiguration, NodeAddress from marshmallow import Schema, fields, post_load -from aiocache import RedisCache +from aiocache import ValkeyCache + + +addresses = [NodeAddress("localhost", 6379)] +config = GlideClientConfiguration(addresses=addresses, database_id=0) class MyType: @@ -20,7 +24,7 @@ class MyTypeSchema(Schema): @post_load def build_object(self, data, **kwargs): - return MyType(data['x'], data['y']) + return MyType(data["x"], data["y"]) def dumps(value): @@ -31,24 +35,22 @@ def loads(value): return MyTypeSchema().loads(value) -cache = RedisCache(namespace="main", client=redis.Redis()) - - async def serializer_function(): - await cache.set("key", MyType(1, 2), dumps_fn=dumps) + async with ValkeyCache(config=config, namespace="main") as cache: + await cache.set("key", MyType(1, 2), dumps_fn=dumps) - obj = await cache.get("key", loads_fn=loads) + obj = await cache.get("key", loads_fn=loads) - assert obj.x == 1 - assert obj.y == 2 - assert await cache.get("key") == json.loads(('{"y": 2.0, "x": 1.0}')) - assert json.loads(await cache.raw("get", "main:key")) == {"y": 2.0, "x": 1.0} + assert obj.x == 1 + assert obj.y == 2 + assert await cache.get("key") == json.loads(('{"y": 2.0, "x": 1.0}')) + assert json.loads(await cache.raw("get", "main:key")) == {"y": 2.0, "x": 1.0} async def test_serializer_function(): await serializer_function() - await cache.delete("key") - await cache.close() + async with ValkeyCache(config=config, namespace="main") as cache: + await cache.delete("key") if __name__ == "__main__": diff --git a/examples/simple_redis.py b/examples/simple_redis.py deleted file mode 100644 index 6d5553d8a..000000000 --- a/examples/simple_redis.py +++ /dev/null @@ -1,28 +0,0 @@ -import asyncio - - -import redis.asyncio as redis - -from aiocache import RedisCache - -cache = RedisCache(namespace="main", client=redis.Redis()) - - -async def redis(): - await cache.set("key", "value") - await cache.set("expire_me", "value", ttl=10) - - assert await cache.get("key") == "value" - assert await cache.get("expire_me") == "value" - assert await cache.raw("ttl", "main:expire_me") > 0 - - -async def test_redis(): - await redis() - await cache.delete("key") - await cache.delete("expire_me") - await cache.close() - - -if __name__ == "__main__": - asyncio.run(test_redis()) diff --git a/examples/simple_valkey.py b/examples/simple_valkey.py new file mode 100644 index 000000000..029135ba7 --- /dev/null +++ b/examples/simple_valkey.py @@ -0,0 +1,30 @@ +import asyncio + +from glide import GlideClientConfiguration, NodeAddress + +from aiocache import ValkeyCache + +addresses = [NodeAddress("localhost", 6379)] +config = GlideClientConfiguration(addresses=addresses, database_id=0) + + +async def valkey(): + async with ValkeyCache(config=config, namespace="main") as cache: + await cache.set("key", "value") + await cache.set("expire_me", "value", ttl=10) + + assert await cache.get("key") == "value" + assert await cache.get("expire_me") == "value" + assert await cache.raw("ttl", "main:expire_me") > 0 + + +async def test_valkey(): + await valkey() + async with ValkeyCache(config=config, namespace="main") as cache: + await cache.delete("key") + await cache.delete("expire_me") + await cache.close() + + +if __name__ == "__main__": + asyncio.run(test_valkey()) From b99ea48947380aac0d915fe2a2d4b2a76bcf76f8 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 13 May 2025 18:08:04 +0100 Subject: [PATCH 26/95] Update examples/multicached_decorator.py --- examples/multicached_decorator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/multicached_decorator.py b/examples/multicached_decorator.py index f9c79144e..053c5831d 100644 --- a/examples/multicached_decorator.py +++ b/examples/multicached_decorator.py @@ -27,7 +27,7 @@ async def test_multi_cached(): await multi_cached_ids(ids=("a", "c")) await multi_cached_keys(keys=("d",)) - async with ValkeyCache(config=config, namespace="main") as cache: + async with cache: assert await cache.exists("a") assert await cache.exists("b") assert await cache.exists("c") From 5a147a971ffe6cc860980ba455ff0230aa491b2c Mon Sep 17 00:00:00 2001 From: amirreza Date: Wed, 14 May 2025 06:53:39 +0330 Subject: [PATCH 27/95] adjust cache object --- examples/cached_decorator.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/examples/cached_decorator.py b/examples/cached_decorator.py index f5b9f02f7..4d8a619bf 100644 --- a/examples/cached_decorator.py +++ b/examples/cached_decorator.py @@ -1,15 +1,17 @@ import asyncio - from collections import namedtuple -import redis.asyncio as redis + +from glide import GlideClientConfiguration, NodeAddress from aiocache import cached -from aiocache import RedisCache +from aiocache import ValkeyCache from aiocache.serializers import PickleSerializer -Result = namedtuple('Result', "content, status") +Result = namedtuple("Result", "content, status") -cache = RedisCache(namespace="main", client=redis.Redis(), serializer=PickleSerializer()) +addresses = [NodeAddress("localhost", 6379)] +config = GlideClientConfiguration(addresses=addresses, database_id=0) +cache = ValkeyCache(config=config, namespace="main", serializer=PickleSerializer()) @cached(cache, ttl=10, key_builder=lambda *args, **kw: "key") From e38df8d407d51e9be1e207a0bfbb3842517343b1 Mon Sep 17 00:00:00 2001 From: amirreza Date: Wed, 14 May 2025 06:55:22 +0330 Subject: [PATCH 28/95] adjust context manager --- examples/multicached_decorator.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/multicached_decorator.py b/examples/multicached_decorator.py index 053c5831d..8b097a079 100644 --- a/examples/multicached_decorator.py +++ b/examples/multicached_decorator.py @@ -23,11 +23,11 @@ async def multi_cached_keys(keys=None): async def test_multi_cached(): - await multi_cached_ids(ids=("a", "b")) - await multi_cached_ids(ids=("a", "c")) - await multi_cached_keys(keys=("d",)) - async with cache: + await multi_cached_ids(ids=("a", "b")) + await multi_cached_ids(ids=("a", "c")) + await multi_cached_keys(keys=("d",)) + assert await cache.exists("a") assert await cache.exists("b") assert await cache.exists("c") From 6f850ffd1428cbe1185be39f3d0d98dbad22f7e5 Mon Sep 17 00:00:00 2001 From: amirreza Date: Wed, 14 May 2025 15:58:20 +0330 Subject: [PATCH 29/95] fix liniting --- Makefile | 3 +-- aiocache/backends/valkey.py | 7 ++++--- tests/performance/server.py | 3 +-- tests/performance/test_footprint.py | 1 - tests/ut/backends/test_valkey.py | 3 +-- 5 files changed, 7 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index 2eb6b4037..479dbe5d1 100644 --- a/Makefile +++ b/Makefile @@ -2,8 +2,7 @@ cov-report = true lint: - flake8 tests/ aiocache/ - + flake8 tests/ aiocache/ '--known-modules=valkey-glide:[glide]' install-dev: pip install -e .[valkey,memcached,msgpack,dev] diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 65163fa3e..7de72ae29 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -1,6 +1,6 @@ import logging -import time import sys +import time from typing import Any, Callable, List, Optional, TYPE_CHECKING from glide import ( @@ -14,7 +14,7 @@ ) from glide.exceptions import RequestError as IncrbyException -from aiocache.base import BaseCache, API +from aiocache.base import API, BaseCache from aiocache.serializers import JsonSerializer if TYPE_CHECKING: # pragma: no cover @@ -246,4 +246,5 @@ def parse_uri_path(cls, path): return options def __repr__(self): # pragma: no cover - return f"ValkeyCache ({self.client.config.addresses[0].host}:{self.client.config.addresses[0].port})" + return (f"ValkeyCache ({self.client.config.addresses[0].host}" + f":{self.client.config.addresses[0].port})") diff --git a/tests/performance/server.py b/tests/performance/server.py index c05cbd789..128b3f9b7 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -2,9 +2,8 @@ import logging import uuid -from glide import GlideClient, GlideClientConfiguration, NodeAddress - from aiohttp import web +from glide import GlideClient, GlideClientConfiguration, NodeAddress logging.getLogger("aiohttp.access").propagate = False diff --git a/tests/performance/test_footprint.py b/tests/performance/test_footprint.py index 1e4bd8efa..47aac3272 100644 --- a/tests/performance/test_footprint.py +++ b/tests/performance/test_footprint.py @@ -4,7 +4,6 @@ import aiomcache import pytest - from glide import GlideClient, GlideClientConfiguration, NodeAddress diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 4b2ea30a6..e34123e83 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -1,8 +1,7 @@ from unittest.mock import ANY, AsyncMock, patch import pytest - -from glide import ConditionalChange, ExpirySet, ExpiryType, Transaction, Script +from glide import ConditionalChange, ExpirySet, ExpiryType, Script, Transaction from glide.exceptions import RequestError from aiocache.backends.valkey import ValkeyBackend, ValkeyCache From a752f928f9469e447c68000b935d596a4a586ec6 Mon Sep 17 00:00:00 2001 From: amirreza Date: Wed, 14 May 2025 20:45:56 +0330 Subject: [PATCH 30/95] close connection after test --- tests/performance/test_footprint.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/performance/test_footprint.py b/tests/performance/test_footprint.py index 47aac3272..6f8f4cde2 100644 --- a/tests/performance/test_footprint.py +++ b/tests/performance/test_footprint.py @@ -15,6 +15,8 @@ async def valkey_client() -> AsyncIterator["GlideClient"]: yield client + await client.close() + @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Too slow") class TestValkey: From 6b73efaaa50a60d9203e0f2544a81685a436204f Mon Sep 17 00:00:00 2001 From: amirreza Date: Thu, 15 May 2025 08:48:45 +0330 Subject: [PATCH 31/95] remove redundent code --- tests/conftest.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e9c6c81c3..d67e1608d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,16 +1,6 @@ import pytest -@pytest.fixture() -def max_conns(): - return None - - -@pytest.fixture() -def decode_responses(): - return False - - @pytest.fixture def valkey_config(): from glide import GlideClientConfiguration, NodeAddress From eec6391728d20ae2f8e2454d8a1a9ac5dc76086f Mon Sep 17 00:00:00 2001 From: amirreza Date: Thu, 15 May 2025 08:49:11 +0330 Subject: [PATCH 32/95] test the context manager raises if no config is avilable --- tests/ut/backends/test_valkey.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index e34123e83..791b4278e 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -37,6 +37,12 @@ async def valkey(valkey_config): class TestValkeyBackend: + async def test_context_manager_raise_if_no_config(self): + msg = "Configuration must be provided for context manager" + with pytest.raises(AttributeError, match=msg): + async with ValkeyBackend() as _: + pass + async def test_get(self, valkey): valkey.client.get.return_value = b"value" assert await valkey._get(Keys.KEY) == "value" From ce3fb1c3f456504a5eed8b91d7dbee4c90228625 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 21:28:01 +0100 Subject: [PATCH 33/95] Test if concurrency errors are fixed --- tests/performance/test_concurrency.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/performance/test_concurrency.py b/tests/performance/test_concurrency.py index 4f40f9490..7439b7d8c 100644 --- a/tests/performance/test_concurrency.py +++ b/tests/performance/test_concurrency.py @@ -20,9 +20,6 @@ def server(request): p.join(timeout=15) -@pytest.mark.xfail(reason="currently fails >85% of requests on GitHub runner, " - "requires several re-runs to pass", - strict=False) @pytest.mark.skipif(platform.python_implementation() == "PyPy", reason="Not working currently.") def test_concurrency_error_rates(server): """Test with Apache benchmark tool.""" From cb1b7b4a02f98693fc89d00523d197ae32e750fe Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 21:40:43 +0100 Subject: [PATCH 34/95] Fix config --- tests/performance/server.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index 128b3f9b7..e593a21f5 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -13,11 +13,7 @@ def __init__(self, backend: str): if backend == "valkey": from aiocache.backends.valkey import ValkeyCache - cache = ValkeyCache( - client=GlideClient.create( - GlideClientConfiguration(addresses=[NodeAddress()], database_id=0), - ) - ) + cache = ValkeyCache(GlideClientConfiguration(addresses=[NodeAddress()], database_id=0)) elif backend == "memcached": from aiocache.backends.memcached import MemcachedCache cache = MemcachedCache() From bd169f8b967efa6ad88c8b4a1784abc59d7c47ac Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 21:43:13 +0100 Subject: [PATCH 35/95] Import --- tests/performance/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index e593a21f5..96a737336 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -3,7 +3,6 @@ import uuid from aiohttp import web -from glide import GlideClient, GlideClientConfiguration, NodeAddress logging.getLogger("aiohttp.access").propagate = False @@ -12,6 +11,7 @@ class CacheManager: def __init__(self, backend: str): if backend == "valkey": from aiocache.backends.valkey import ValkeyCache + from glide import GlideClientConfiguration, NodeAddress cache = ValkeyCache(GlideClientConfiguration(addresses=[NodeAddress()], database_id=0)) elif backend == "memcached": From 4e2677d3b56750cf06c1a0f453a90a119c5aac2a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 21:59:09 +0100 Subject: [PATCH 36/95] Use context manager for CacheManager --- tests/performance/server.py | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index 96a737336..17b08b040 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -1,9 +1,15 @@ import asyncio import logging import uuid +import sys from aiohttp import web +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing import Any as Self + logging.getLogger("aiohttp.access").propagate = False @@ -30,8 +36,17 @@ async def get(self, key): async def set(self, key, value): return await self.cache.set(key, value, timeout=0.1) - async def close(self, *_): - await self.cache.close() + async def __aenter__(self) -> Self: + await self.cache.__aenter__() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self.cache.__aexit__(exc_type, exc, tb) cache_key = web.AppKey("cache_key", CacheManager) @@ -50,9 +65,17 @@ async def handler_get(req: web.Request) -> web.Response: return web.Response(text=str(data)) +def cache_manager_ctx(backend: str) -> Callable[[web.Application], None]: + async def ctx(app: web.Application) -> None: + async with CacheManager(backend) as cm + app[cache_key] = cm + yield + + return ctx + + def run_server(backend: str) -> None: app = web.Application() - app[cache_key] = CacheManager(backend) - app.on_shutdown.append(app[cache_key].close) + app.cleanup_ctx.append(cache_manager_ctx(backend)) app.router.add_route("GET", "/", handler_get) web.run_app(app) From 95f23d1bc95755b21102bb0f11b9e8f7ca61f554 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 22:00:34 +0100 Subject: [PATCH 37/95] return type --- tests/performance/server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index 17b08b040..e3926f239 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -2,6 +2,7 @@ import logging import uuid import sys +from typing import AsyncIterator from aiohttp import web @@ -65,8 +66,8 @@ async def handler_get(req: web.Request) -> web.Response: return web.Response(text=str(data)) -def cache_manager_ctx(backend: str) -> Callable[[web.Application], None]: - async def ctx(app: web.Application) -> None: +def cache_manager_ctx(backend: str) -> Callable[[web.Application], AsyncIterator[None]]: + async def ctx(app: web.Application) -> AsyncIterator[None]: async with CacheManager(backend) as cm app[cache_key] = cm yield From 30483b3a90bc00e7f8d8664ab94fe96ed1616f96 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 22:01:26 +0100 Subject: [PATCH 38/95] Update tests/performance/server.py --- tests/performance/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index e3926f239..d6359b000 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -68,7 +68,7 @@ async def handler_get(req: web.Request) -> web.Response: def cache_manager_ctx(backend: str) -> Callable[[web.Application], AsyncIterator[None]]: async def ctx(app: web.Application) -> AsyncIterator[None]: - async with CacheManager(backend) as cm + async with CacheManager(backend) as cm: app[cache_key] = cm yield From 8df1263b5ea68cbbae1a59c803c0a15e869f315c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 22:04:23 +0100 Subject: [PATCH 39/95] Update server.py --- tests/performance/server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index d6359b000..31d61fd47 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -2,7 +2,8 @@ import logging import uuid import sys -from typing import AsyncIterator +from types import TracebackType +from typing import AsyncIterator, Callable, Optional from aiohttp import web @@ -43,7 +44,7 @@ async def __aenter__(self) -> Self: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: From 7276b3adbfcc461ffdb463ae037b16c56c92508e Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 22:05:59 +0100 Subject: [PATCH 40/95] Update server.py --- tests/performance/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index 31d61fd47..1f135f778 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -1,7 +1,7 @@ import asyncio import logging -import uuid import sys +import uuid from types import TracebackType from typing import AsyncIterator, Callable, Optional From 8d254edcd9a978c4dc36d41d1c0ef67673046dd5 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 18 May 2025 22:11:50 +0100 Subject: [PATCH 41/95] Update server.py --- tests/performance/server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index 1f135f778..ca4bf0cfd 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -21,7 +21,8 @@ def __init__(self, backend: str): from aiocache.backends.valkey import ValkeyCache from glide import GlideClientConfiguration, NodeAddress - cache = ValkeyCache(GlideClientConfiguration(addresses=[NodeAddress()], database_id=0)) + config = GlideClientConfiguration(addresses=[NodeAddress()], database_id=0) + cache = ValkeyCache(config=config) elif backend == "memcached": from aiocache.backends.memcached import MemcachedCache cache = MemcachedCache() From fa03b74dce077642b27901e8d81a293e7b4aed7a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 27 May 2025 16:15:53 +0100 Subject: [PATCH 42/95] Update test_concurrency.py --- tests/performance/test_concurrency.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/performance/test_concurrency.py b/tests/performance/test_concurrency.py index 7439b7d8c..fa70b8793 100644 --- a/tests/performance/test_concurrency.py +++ b/tests/performance/test_concurrency.py @@ -2,12 +2,14 @@ import re import subprocess import time -from multiprocessing import Process +from multiprocessing import Process, set_start_method import pytest from .server import run_server +set_start_method("spawn") + # TODO: Fix and readd "memcached" (currently fails >98% of requests) @pytest.fixture(params=("memory", "valkey")) From 4ca6c92f835ca9a264f3846fa6dd20761e7fadc4 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 27 May 2025 16:27:45 +0100 Subject: [PATCH 43/95] Update test_concurrency.py --- tests/performance/test_concurrency.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/performance/test_concurrency.py b/tests/performance/test_concurrency.py index fa70b8793..69b91d859 100644 --- a/tests/performance/test_concurrency.py +++ b/tests/performance/test_concurrency.py @@ -16,7 +16,7 @@ def server(request): p = Process(target=run_server, args=(request.param,)) p.start() - time.sleep(1) + time.sleep(2) yield p.terminate() p.join(timeout=15) From e571aac6a924eed0b8a6f71b6accd9fef17c2f21 Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 27 May 2025 20:27:10 +0330 Subject: [PATCH 44/95] exists only accepts a str as key for now --- aiocache/backends/valkey.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 7de72ae29..aeeb6dc51 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -117,12 +117,13 @@ async def _add(self, key, value, ttl=None, _conn=None): kwargs["expiry"] = ExpirySet(ExpiryType.SEC, ttl) was_set = await self.client.set(key, value, **kwargs) if was_set != "OK": - raise ValueError("Key {} already exists, use .set to update the value".format(key)) + raise ValueError( + "Key {} already exists, use .set to update the value".format(key) + ) return was_set async def _exists(self, key, _conn=None): - if isinstance(key, str): - key = [key] + key = [key] number = await self.client.exists(key) return bool(number) @@ -246,5 +247,7 @@ def parse_uri_path(cls, path): return options def __repr__(self): # pragma: no cover - return (f"ValkeyCache ({self.client.config.addresses[0].host}" - f":{self.client.config.addresses[0].port})") + return ( + f"ValkeyCache ({self.client.config.addresses[0].host}" + f":{self.client.config.addresses[0].port})" + ) From 38aba78df65cee192c910cf3375f4f2edb076c3b Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 27 May 2025 20:30:27 +0330 Subject: [PATCH 45/95] remove unused condition for raw operations --- aiocache/backends/valkey.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index aeeb6dc51..3dec1279d 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -184,8 +184,6 @@ async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): if encoding is not None: if command == "get" and value is not None: value = value.decode(encoding) - elif command in {"keys", "mget"}: - value = [v if v is None else v.decode(encoding) for v in value] return value async def _redlock_release(self, key, value): From 274caca55ee33680bb51c709d14543d8488c3e79 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 27 May 2025 18:06:49 +0100 Subject: [PATCH 46/95] Update aiocache/backends/valkey.py --- aiocache/backends/valkey.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 3dec1279d..60c9a72bc 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -139,8 +139,7 @@ async def _expire(self, key, ttl, _conn=None): return await self.client.expire(key, ttl) async def _delete(self, key, _conn=None): - if isinstance(key, str): - key = [key] + key = [key] return await self.client.delete(key) async def _clear(self, namespace=None, _conn=None): From 18e120027db5f89e96952b1984fdb96664c450e2 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 27 May 2025 18:19:18 +0100 Subject: [PATCH 47/95] no cover --- tests/performance/server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index ca4bf0cfd..24c0ad62f 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -60,7 +60,8 @@ async def handler_get(req: web.Request) -> web.Response: data = await req.app[cache_key].get("testkey") if data: return web.Response(text=data) - except asyncio.TimeoutError: + except asyncio.TimeoutError: # pragma: no cover + # This won't be reached if the concurrency tests achieve 100% success rates. return web.Response(status=404) data = str(uuid.uuid4()) From 97c6c7c54563afaee16ae092e3304e1713d2a0f7 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 8 Jun 2025 12:47:30 +0100 Subject: [PATCH 48/95] Apply suggestions from code review --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index c548ea880..a7aba6889 100644 --- a/.gitignore +++ b/.gitignore @@ -80,7 +80,6 @@ celerybeat-schedule .env # virtualenv -.venv/ venv/ ENV/ From 838b8e44746a9a3fcc5be09fee3d279e468bc95b Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 8 Jun 2025 12:49:32 +0100 Subject: [PATCH 49/95] Update .flake8 --- .flake8 | 1 + 1 file changed, 1 insertion(+) diff --git a/.flake8 b/.flake8 index 50ce4aaa1..9d1c63c2d 100644 --- a/.flake8 +++ b/.flake8 @@ -19,4 +19,5 @@ import-order-style = pycharm # flake8-quotes inline-quotes = " # flake8-requirements +known-modules = valkey-glide:[glide] requirements-file = requirements-dev.txt From b7ec6754e2773f272f50c5f596b8a9dbb9eb1c90 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 8 Jun 2025 12:49:52 +0100 Subject: [PATCH 50/95] Update Makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 479dbe5d1..8b8427a01 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ cov-report = true lint: - flake8 tests/ aiocache/ '--known-modules=valkey-glide:[glide]' + flake8 tests/ aiocache/ install-dev: pip install -e .[valkey,memcached,msgpack,dev] From ae8bef8724048b23753be61c1a6a40020edd0760 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 8 Jun 2025 12:50:25 +0100 Subject: [PATCH 51/95] Update Makefile --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index 8b8427a01..2eb6b4037 100644 --- a/Makefile +++ b/Makefile @@ -3,6 +3,7 @@ cov-report = true lint: flake8 tests/ aiocache/ + install-dev: pip install -e .[valkey,memcached,msgpack,dev] From 2026f78cb820f606f292546a93ed38fd4672a7fb Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 8 Jun 2025 14:19:03 +0100 Subject: [PATCH 52/95] Update valkey.py --- aiocache/backends/valkey.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 60c9a72bc..e661f77f4 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -17,7 +17,7 @@ from aiocache.base import API, BaseCache from aiocache.serializers import JsonSerializer -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from aiocache.serializers import BaseSerializer if sys.version_info >= (3, 11): From ae4852a45e80f45119cfc45d9b8f030f51d4da52 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 8 Jun 2025 14:20:04 +0100 Subject: [PATCH 53/95] Update .coveragerc --- .coveragerc | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.coveragerc b/.coveragerc index b9823a198..e1b64a65a 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,3 +1,10 @@ [run] branch = True -concurrency = multiprocessing +source = aiocache, tests + +[report] +exclude_also = + if TYPE_CHECKING + assert False + : \.\.\.(\s*#.*)?$ + ^ +\.\.\.$ From c342f2d02a23c84ffd131df5041165bc325a78d1 Mon Sep 17 00:00:00 2001 From: amirreza Date: Sun, 8 Jun 2025 20:33:06 +0330 Subject: [PATCH 54/95] make config parameter required --- aiocache/backends/valkey.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index e661f77f4..8b0f0391d 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -30,13 +30,11 @@ class ValkeyBackend(BaseCache[str]): - def __init__(self, config: GlideClientConfiguration = None, **kwargs): + def __init__(self, config: GlideClientConfiguration, **kwargs): self.config = config super().__init__(**kwargs) async def __aenter__(self) -> Self: - if not self.config: - raise AttributeError("Configuration must be provided for context manager") self.client = await self._connect(self.config) return self From 9270879674ae7f567a0ed613f59173daa74d670c Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 9 Jun 2025 01:58:55 +0330 Subject: [PATCH 55/95] fix docstring example of how to create a valkey connection --- aiocache/lock.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/aiocache/lock.py b/aiocache/lock.py index cd8a94f08..ab797caa1 100644 --- a/aiocache/lock.py +++ b/aiocache/lock.py @@ -50,9 +50,8 @@ class RedLock(Generic[CacheKeyType]): addresses = [NodeAddress("localhost", 6379)] conf = GlideClientConfiguration(addresses=addresses, database_id=0) - client = await GlideClient.create(conf) + cache = ValkeyCache(config=config) - cache = ValkeyCache(client) async with RedLock(cache, 'key', lease=1): # Calls will wait here result = await cache.get('key') if result is not None: @@ -68,7 +67,9 @@ class RedLock(Generic[CacheKeyType]): _EVENTS: Dict[str, asyncio.Event] = {} - def __init__(self, client: BaseCache[CacheKeyType], key: str, lease: Union[int, float]): + def __init__( + self, client: BaseCache[CacheKeyType], key: str, lease: Union[int, float] + ): self.client = client self.key = self.client.build_key(key + "-lock") self.lease = lease @@ -170,7 +171,9 @@ async def cas(self, value: Any, **kwargs: Any) -> bool: :raises: :class:`aiocache.lock.OptimisticLockError` """ - success = await self.client.set(self.key, value, _cas_token=self._token, **kwargs) + success = await self.client.set( + self.key, value, _cas_token=self._token, **kwargs + ) if not success: raise OptimisticLockError("Value has changed since the lock started") return True From 257e186948ef65f4fee51bfadc490adc7b04b948 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 9 Jun 2025 01:59:27 +0330 Subject: [PATCH 56/95] clean up code and remove unnecessary code --- aiocache/backends/valkey.py | 60 +++++-------------------------------- 1 file changed, 8 insertions(+), 52 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 8b0f0391d..fb7cb7251 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -1,7 +1,6 @@ import logging import sys -import time -from typing import Any, Callable, List, Optional, TYPE_CHECKING +from typing import Any, Callable, Optional from glide import ( ConditionalChange, @@ -9,16 +8,12 @@ ExpiryType, GlideClient, GlideClientConfiguration, - Script, Transaction, ) from glide.exceptions import RequestError as IncrbyException -from aiocache.base import API, BaseCache -from aiocache.serializers import JsonSerializer - -if TYPE_CHECKING: - from aiocache.serializers import BaseSerializer +from aiocache.base import BaseCache +from aiocache.serializers import BaseSerializer, JsonSerializer if sys.version_info >= (3, 11): from typing import Self @@ -53,9 +48,6 @@ async def _get(self, key, encoding="utf-8", _conn=None): return value return value.decode(encoding) - async def _gets(self, key, encoding="utf-8", _conn=None): - return await self._get(key, encoding=encoding, _conn=_conn) - async def _multi_get(self, keys, encoding="utf-8", _conn=None): values = await self.client.mget(keys) if encoding is None: @@ -63,8 +55,6 @@ async def _multi_get(self, keys, encoding="utf-8", _conn=None): return [v if v is None else v.decode(encoding) for v in values] async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): - success_message = "OK" - if isinstance(ttl, float): ttl = ExpirySet(ExpiryType.MILLSEC, int(ttl * 1000)) elif ttl: @@ -73,10 +63,7 @@ async def _set(self, key, value, ttl=None, _cas_token=None, _conn=None): if _cas_token is not None: return await self._cas(key, value, _cas_token, ttl=ttl, _conn=_conn) - if ttl is None: - return await self.client.set(key, value) == success_message - - return await self.client.set(key, value, expiry=ttl) == success_message + return await self.client.set(key, value, expiry=ttl) == "OK" async def _cas(self, key, value, token, ttl=None, _conn=None): if await self._get(key) == token: @@ -84,9 +71,7 @@ async def _cas(self, key, value, token, ttl=None, _conn=None): return 0 async def _multi_set(self, pairs, ttl=None, _conn=None): - ttl = ttl or 0 - - values = {key: value for key, value in pairs} + values = dict(pairs) if ttl: await self.__multi_set_ttl(values, ttl) @@ -121,9 +106,7 @@ async def _add(self, key, value, ttl=None, _conn=None): return was_set async def _exists(self, key, _conn=None): - key = [key] - number = await self.client.exists(key) - return bool(number) + return bool(await self.client.exists([key])) async def _increment(self, key, delta, _conn=None): try: @@ -137,8 +120,7 @@ async def _expire(self, key, ttl, _conn=None): return await self.client.expire(key, ttl) async def _delete(self, key, _conn=None): - key = [key] - return await self.client.delete(key) + return await self.client.delete([key]) async def _clear(self, namespace=None, _conn=None): if namespace: @@ -150,32 +132,6 @@ async def _clear(self, namespace=None, _conn=None): return True - @API.register - @API.aiocache_enabled() - @API.timeout - @API.plugins - async def script(self, script: Script, keys: List, *args): - """ - Send the raw scripts to the underlying client. Note that by using this CMD you - will lose compatibility with other backends. - - Due to limitations with aiomcache client, args have to be provided as bytes. - For rest of backends, str. - - :param script: glide.Script object. - :param keys: list of keys of the script - :param args: arguments of the script - :returns: whatever the underlying client returns - :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout - """ - start = time.monotonic() - ret = await self._script(script, keys, *args) - logger.debug("%s (%.4f)s", script, time.monotonic() - start) - return ret - - async def _script(self, script, keys: List, *args): - return await self.client.invoke_script(script, keys=keys, args=args) - async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): value = await getattr(self.client, command)(*args, **kwargs) if encoding is not None: @@ -213,7 +169,7 @@ class ValkeyCache(ValkeyBackend): def __init__( self, - serializer: Optional["BaseSerializer"] = None, + serializer: Optional[BaseSerializer] = None, namespace: str = "", key_builder: Callable[[str, str], str] = lambda k, ns: f"{ns}:{k}" if ns else k, **kwargs: Any, From 43f1222b8703cddcbb6234c0a28a23373a7f6192 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 9 Jun 2025 01:59:50 +0330 Subject: [PATCH 57/95] alter examples ti use one cache client --- examples/optimistic_lock.py | 33 ++++++++++++++++----------------- examples/python_object.py | 11 ++++------- 2 files changed, 20 insertions(+), 24 deletions(-) diff --git a/examples/optimistic_lock.py b/examples/optimistic_lock.py index b8545657e..b00cd5a38 100644 --- a/examples/optimistic_lock.py +++ b/examples/optimistic_lock.py @@ -10,6 +10,7 @@ logger = logging.getLogger(__name__) addresses = [NodeAddress("localhost", 6379)] config = GlideClientConfiguration(addresses=addresses, database_id=0) +cache = ValkeyCache(config=config, namespace="main") async def expensive_function(): @@ -18,27 +19,25 @@ async def expensive_function(): return "result" -async def my_view(): - - async with ValkeyCache(config=config, namespace="main") as cache: - async with OptimisticLock(cache, "key") as lock: - result = await expensive_function() - try: - await lock.cas(result) - except OptimisticLockError: - logger.warning( - "I failed setting the value because it is different since the lock started!" - ) - return result +async def my_view(cache): + async with OptimisticLock(cache, "key") as lock: + result = await expensive_function() + try: + await lock.cas(result) + except OptimisticLockError: + logger.warning( + "I failed setting the value because it is different since the lock started!" + ) + return result async def concurrent(): - async with ValkeyCache(config=config, namespace="main") as cache: + async with cache: await cache.set("key", "initial_value") - # All three calls will read 'initial_value' as the value to check and only - # the first one finishing will succeed because the others, when trying to set - # the value, will see that the value is not the same as when the lock started - await asyncio.gather(my_view(), my_view(), my_view()) + # All three calls will read 'initial_value' as the value to check and only + # the first one finishing will succeed because the others, when trying to set + # the value, will see that the value is not the same as when the lock started + await asyncio.gather(my_view(cache), my_view(cache), my_view(cache)) async def test_redis(): diff --git a/examples/python_object.py b/examples/python_object.py index 5cb83fa77..d1ea76994 100644 --- a/examples/python_object.py +++ b/examples/python_object.py @@ -11,23 +11,20 @@ config = GlideClientConfiguration(addresses=addresses, database_id=0) -async def complex_object(): +async def complex_object(cache): obj = MyObject(x=1, y=2) - async with ValkeyCache( - config=config, namespace="main", serializer=PickleSerializer() - ) as cache: - await cache.set("key", obj) - my_object = await cache.get("key") + await cache.set("key", obj) + my_object = await cache.get("key") assert my_object.x == 1 assert my_object.y == 2 async def test_python_object(): - await complex_object() async with ValkeyCache( config=config, namespace="main", serializer=PickleSerializer() ) as cache: + await complex_object(cache) await cache.delete("key") await cache.close() From 0e370bf7b7e0ca7b7df788f5ecfdc6d530684d1e Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 9 Jun 2025 02:00:59 +0330 Subject: [PATCH 58/95] remove test of removed methods --- tests/acceptance/test_base.py | 18 ++++-------------- tests/ut/backends/test_valkey.py | 30 +++++++++++++++++++----------- 2 files changed, 23 insertions(+), 25 deletions(-) diff --git a/tests/acceptance/test_base.py b/tests/acceptance/test_base.py index a61a02c6f..d6b8adbf3 100644 --- a/tests/acceptance/test_base.py +++ b/tests/acceptance/test_base.py @@ -60,7 +60,10 @@ async def test_set_cancel_previous_ttl_handle(self, cache): async def test_multi_set(self, cache): pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] assert await cache.multi_set(pairs) is True - assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == ["value", "random_value"] + assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == [ + "value", + "random_value", + ] async def test_multi_set_with_ttl(self, cache): pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] @@ -241,19 +244,6 @@ async def test_raw(self, valkey_cache): # .raw() doesn't build key with namespace prefix, clear it manually await valkey_cache.raw("delete", "key") - async def test_script(self, valkey_cache): - from glide import Script - - set_script = Script("return server.call('set',KEYS[1], ARGV[1])") - get_script = Script("return server.call('get',KEYS[1])") - key_script = Script("return server.call('keys',KEYS[1])") - del_script = Script("server.call('del',KEYS[1])") - await valkey_cache.script(set_script, ["key"], "value") - assert await valkey_cache.script(get_script, keys=["key"]) == b"value" - assert await valkey_cache.script(key_script, keys=["k*"]) == [b"key"] - # .raw() doesn't build key with namespace prefix, clear it manually - await valkey_cache.script(del_script, "key") - async def test_clear_with_namespace_valkey(self, valkey_cache): await valkey_cache.set(Keys.KEY, "value", namespace="test") await valkey_cache.clear(namespace="test") diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 791b4278e..9a96b0695 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -1,7 +1,8 @@ +import re from unittest.mock import ANY, AsyncMock, patch import pytest -from glide import ConditionalChange, ExpirySet, ExpiryType, Script, Transaction +from glide import ConditionalChange, ExpirySet, ExpiryType, Transaction from glide.exceptions import RequestError from aiocache.backends.valkey import ValkeyBackend, ValkeyCache @@ -64,21 +65,31 @@ async def test_set(self, valkey): async def test_set_cas_token(self, mocker, valkey): mocker.patch.object(valkey, "_cas") - await valkey._set(Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client) + await valkey._set( + Keys.KEY, "value", _cas_token="old_value", _conn=valkey.client + ) valkey._cas.assert_called_with( Keys.KEY, "value", "old_value", ttl=None, _conn=valkey.client ) async def test_set_cas_token_ttl(self, mocker, valkey): mocker.patch.object(valkey, "_cas") - await valkey._set(Keys.KEY, "value", ttl=1, _cas_token="old_value", _conn=valkey.client) + await valkey._set( + Keys.KEY, "value", ttl=1, _cas_token="old_value", _conn=valkey.client + ) valkey._cas.assert_called_with( - Keys.KEY, "value", "old_value", ttl=ExpirySet(ExpiryType.SEC, 1), _conn=valkey.client + Keys.KEY, + "value", + "old_value", + ttl=ExpirySet(ExpiryType.SEC, 1), + _conn=valkey.client, ) async def test_set_cas_token_float_ttl(self, mocker, valkey): mocker.patch.object(valkey, "_cas") - await valkey._set(Keys.KEY, "value", ttl=1.1, _cas_token="old_value", _conn=valkey.client) + await valkey._set( + Keys.KEY, "value", ttl=1.1, _cas_token="old_value", _conn=valkey.client + ) valkey._cas.assert_called_with( Keys.KEY, "value", @@ -197,11 +208,6 @@ async def test_clear_no_namespace(self, valkey): await valkey._clear() assert valkey.client.flushdb.call_count == 1 - async def test_script(self, valkey): - script = Script("server.call('get', Keys[1]") - await valkey._script(script, Keys.KEY) - valkey.client.invoke_script.assert_called_with(script, Keys.KEY, ()) - async def test_redlock_release(self, mocker, valkey): mocker.patch.object(valkey, "_get", return_value="random") await valkey._redlock_release(Keys.KEY, "random") @@ -240,7 +246,9 @@ def test_parse_uri_path(self, path, expected, valkey_config): ["my_ns", "my_ns:" + ensure_key(Keys.KEY)], ), # noqa: B950 ) - def test_build_key_double_dot(self, set_test_namespace, valkey_cache, namespace, expected): + def test_build_key_double_dot( + self, set_test_namespace, valkey_cache, namespace, expected + ): assert valkey_cache.build_key(Keys.KEY, namespace) == expected def test_build_key_no_namespace(self, valkey_cache): From b860010f8bbf163e0cf6b89f29c73382687ab648 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 9 Jun 2025 02:01:19 +0330 Subject: [PATCH 59/95] adjust test with how config is handled now --- tests/ut/backends/test_valkey.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 9a96b0695..04c02bed2 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -38,9 +38,11 @@ async def valkey(valkey_config): class TestValkeyBackend: - async def test_context_manager_raise_if_no_config(self): - msg = "Configuration must be provided for context manager" - with pytest.raises(AttributeError, match=msg): + async def test_backend_raise_if_no_backend(self): + msg = re.escape( + "ValkeyBackend.__init__() missing 1 required positional argument: 'config'" + ) + with pytest.raises(TypeError, match=msg): async with ValkeyBackend() as _: pass From 1cc064abdb920df5cb0a4273b9ccb211417183d6 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 9 Jun 2025 02:01:35 +0330 Subject: [PATCH 60/95] adjust test with how set handles expiry now --- tests/ut/backends/test_valkey.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 04c02bed2..2ce70fa90 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -58,7 +58,7 @@ async def test_gets(self, mocker, valkey): async def test_set(self, valkey): await valkey._set(Keys.KEY, "value") - valkey.client.set.assert_called_with(Keys.KEY, "value") + valkey.client.set.assert_called_with(Keys.KEY, "value", expiry=None) await valkey._set(Keys.KEY, "value", ttl=1) valkey.client.set.assert_called_with( From 965cfa6d30719666198d0629c0f519560da04709 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 9 Jun 2025 02:16:56 +0330 Subject: [PATCH 61/95] assign _get to _gets since they do the same thing in valkey backend --- aiocache/backends/valkey.py | 2 ++ tests/ut/backends/test_valkey.py | 3 +-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index fb7cb7251..d0fadf22a 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -48,6 +48,8 @@ async def _get(self, key, encoding="utf-8", _conn=None): return value return value.decode(encoding) + _gets = _get + async def _multi_get(self, keys, encoding="utf-8", _conn=None): values = await self.client.mget(keys) if encoding is None: diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 2ce70fa90..96ad17ac5 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -52,9 +52,8 @@ async def test_get(self, valkey): valkey.client.get.assert_called_with(Keys.KEY) async def test_gets(self, mocker, valkey): - mocker.spy(valkey, "_get") await valkey._gets(Keys.KEY) - valkey._get.assert_called_with(Keys.KEY, encoding="utf-8", _conn=ANY) + valkey.client.get.assert_called_with(Keys.KEY) async def test_set(self, valkey): await valkey._set(Keys.KEY, "value") From 8d8bf74e96bd978c032c029d1d1aa0c5e64c8ef3 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 19:33:57 +0100 Subject: [PATCH 62/95] Update aiocache/backends/valkey.py --- aiocache/backends/valkey.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index d0fadf22a..7a32da970 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -30,16 +30,10 @@ def __init__(self, config: GlideClientConfiguration, **kwargs): super().__init__(**kwargs) async def __aenter__(self) -> Self: - self.client = await self._connect(self.config) + self.client = await GlideClient.create(self.config) return self async def __aexit__(self, *args, **kwargs) -> None: - await self._disconnect() - - async def _connect(self, config: GlideClientConfiguration) -> GlideClient: - return await GlideClient.create(config=config) - - async def _disconnect(self) -> None: await self.client.close() async def _get(self, key, encoding="utf-8", _conn=None): From 19eddcf1ec6ddd6c56dfba6edac712cfc68e2632 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 19:37:01 +0100 Subject: [PATCH 63/95] Update aiocache/backends/valkey.py --- aiocache/backends/valkey.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 7a32da970..437af97f7 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -119,13 +119,13 @@ async def _delete(self, key, _conn=None): return await self.client.delete([key]) async def _clear(self, namespace=None, _conn=None): - if namespace: - _, keys = await self.client.scan(b"0", "{}:*".format(namespace)) - if keys: - return bool(await self.client.delete(keys)) - else: + if not namespace: return await self.client.flushdb() + _, keys = await self.client.scan(b"0", "{}:*".format(namespace)) + if keys: + return bool(await self.client.delete(keys)) + return True async def _raw(self, command, *args, encoding="utf-8", _conn=None, **kwargs): From f484d6d2aaff4455190d81c5399ba4c3f6267d78 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 19:43:48 +0100 Subject: [PATCH 64/95] Update aiocache/lock.py --- aiocache/lock.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiocache/lock.py b/aiocache/lock.py index ab797caa1..be245e75d 100644 --- a/aiocache/lock.py +++ b/aiocache/lock.py @@ -50,7 +50,7 @@ class RedLock(Generic[CacheKeyType]): addresses = [NodeAddress("localhost", 6379)] conf = GlideClientConfiguration(addresses=addresses, database_id=0) - cache = ValkeyCache(config=config) + cache = ValkeyCache(config=conf) async with RedLock(cache, 'key', lease=1): # Calls will wait here result = await cache.get('key') From 2b61084540d9630ebe66b345ce32ad38f6f02f03 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 19:45:00 +0100 Subject: [PATCH 65/95] Update lock.py --- aiocache/lock.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/aiocache/lock.py b/aiocache/lock.py index be245e75d..0632d8ee8 100644 --- a/aiocache/lock.py +++ b/aiocache/lock.py @@ -123,9 +123,7 @@ class OptimisticLock(Generic[CacheKeyType]): addresses = [NodeAddress("localhost", 6379)] conf = GlideClientConfiguration(addresses=addresses, database_id=0) - client = await GlideClient.create(conf) - - cache = ValkeyCache(client) + cache = ValkeyCache(conf) # The value stored in 'key' will be checked here async with OptimisticLock(cache, 'key') as lock: From 34aeaef095c16e40d0f67ba87cbda1306ecf056e Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 19:46:22 +0100 Subject: [PATCH 66/95] Update lock.py --- aiocache/lock.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/aiocache/lock.py b/aiocache/lock.py index 0632d8ee8..2037021f8 100644 --- a/aiocache/lock.py +++ b/aiocache/lock.py @@ -46,11 +46,11 @@ class RedLock(Generic[CacheKeyType]): from aiocache import ValkeyCache from aiocache.lock import RedLock - from glide import GlideClient, GlideClientConfiguration, NodeAddress + from glide import GlideClientConfiguration, NodeAddress addresses = [NodeAddress("localhost", 6379)] conf = GlideClientConfiguration(addresses=addresses, database_id=0) - cache = ValkeyCache(config=conf) + cache = ValkeyCache(conf) async with RedLock(cache, 'key', lease=1): # Calls will wait here result = await cache.get('key') @@ -119,7 +119,7 @@ class OptimisticLock(Generic[CacheKeyType]): Example usage:: from aiocache import ValkeyCache - from glide import GlideClient, GlideClientConfiguration, NodeAddress + from glide import GlideClientConfiguration, NodeAddress addresses = [NodeAddress("localhost", 6379)] conf = GlideClientConfiguration(addresses=addresses, database_id=0) From a34244c8442662bc717a7fa43c43379a0b731f8e Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 19:47:24 +0100 Subject: [PATCH 67/95] Apply suggestions from code review --- aiocache/lock.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/aiocache/lock.py b/aiocache/lock.py index 2037021f8..4e9912a70 100644 --- a/aiocache/lock.py +++ b/aiocache/lock.py @@ -45,7 +45,6 @@ class RedLock(Generic[CacheKeyType]): from aiocache import ValkeyCache from aiocache.lock import RedLock - from glide import GlideClientConfiguration, NodeAddress addresses = [NodeAddress("localhost", 6379)] @@ -118,7 +117,6 @@ class OptimisticLock(Generic[CacheKeyType]): Example usage:: from aiocache import ValkeyCache - from glide import GlideClientConfiguration, NodeAddress addresses = [NodeAddress("localhost", 6379)] From 4266c99cd6f1977f2e4ad10f92bd12b024191e63 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 19:55:13 +0100 Subject: [PATCH 68/95] Apply suggestions from code review --- examples/optimistic_lock.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/optimistic_lock.py b/examples/optimistic_lock.py index b00cd5a38..0b97a26e6 100644 --- a/examples/optimistic_lock.py +++ b/examples/optimistic_lock.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) addresses = [NodeAddress("localhost", 6379)] config = GlideClientConfiguration(addresses=addresses, database_id=0) -cache = ValkeyCache(config=config, namespace="main") +cache = ValkeyCache(config, namespace="main") async def expensive_function(): @@ -42,7 +42,7 @@ async def concurrent(): async def test_redis(): await concurrent() - async with ValkeyCache(config=config, namespace="main") as cache: + async with ValkeyCache(config, namespace="main") as cache: await cache.delete("key") From 5eac3b5b5af68d71330d7039e2314d73a5c56037 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 19:56:25 +0100 Subject: [PATCH 69/95] Fix --- examples/optimistic_lock.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/examples/optimistic_lock.py b/examples/optimistic_lock.py index 0b97a26e6..96044ed6e 100644 --- a/examples/optimistic_lock.py +++ b/examples/optimistic_lock.py @@ -10,7 +10,6 @@ logger = logging.getLogger(__name__) addresses = [NodeAddress("localhost", 6379)] config = GlideClientConfiguration(addresses=addresses, database_id=0) -cache = ValkeyCache(config, namespace="main") async def expensive_function(): @@ -31,7 +30,7 @@ async def my_view(cache): return result -async def concurrent(): +async def concurrent(cache): async with cache: await cache.set("key", "initial_value") # All three calls will read 'initial_value' as the value to check and only @@ -41,8 +40,8 @@ async def concurrent(): async def test_redis(): - await concurrent() async with ValkeyCache(config, namespace="main") as cache: + await concurrent(cache) await cache.delete("key") From 08b219678427ed33d3791332bed5d2a776f0f0d4 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 9 Jun 2025 22:27:33 +0330 Subject: [PATCH 70/95] remove unused import --- tests/ut/backends/test_valkey.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 96ad17ac5..918365410 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -1,5 +1,5 @@ import re -from unittest.mock import ANY, AsyncMock, patch +from unittest.mock import AsyncMock, patch import pytest from glide import ConditionalChange, ExpirySet, ExpiryType, Transaction From 9c1258d1834db84c868df82cfe7c6a237c1df48c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:05:24 +0100 Subject: [PATCH 71/95] Update python_object.py --- examples/python_object.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/python_object.py b/examples/python_object.py index d1ea76994..64ae884d6 100644 --- a/examples/python_object.py +++ b/examples/python_object.py @@ -22,7 +22,7 @@ async def complex_object(cache): async def test_python_object(): async with ValkeyCache( - config=config, namespace="main", serializer=PickleSerializer() + config, namespace="main", serializer=PickleSerializer() ) as cache: await complex_object(cache) await cache.delete("key") From d95921afdbe3f2e058f8ce0a9698865a95e82a4d Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:11:10 +0100 Subject: [PATCH 72/95] Update redlock.py --- examples/redlock.py | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/examples/redlock.py b/examples/redlock.py index 6577a6a36..f4c782548 100644 --- a/examples/redlock.py +++ b/examples/redlock.py @@ -17,27 +17,25 @@ async def expensive_function(): return "result" -async def my_view(): - - async with ValkeyCache(config=config, namespace="main") as cache: - async with RedLock(cache, "key", lease=2): # Wait at most 2 seconds - result = await cache.get("key") - if result is not None: - logger.info("Found the value in the cache hurray!") - return result - - result = await expensive_function() - await cache.set("key", result) +async def my_view(cache): + async with RedLock(cache, "key", lease=2): # Wait at most 2 seconds + result = await cache.get("key") + if result is not None: + logger.info("Found the value in the cache hurray!") return result + result = await expensive_function() + await cache.set("key", result) + return result + -async def concurrent(): - await asyncio.gather(my_view(), my_view(), my_view()) +async def concurrent(cache): + await asyncio.gather(my_view(cache), my_view(cache), my_view(cache)) async def test_redis(): - await concurrent() - async with ValkeyCache(config=config, namespace="main") as cache: + async with ValkeyCache(config, namespace="main") as cache: + await concurrent(cache) await cache.delete("key") await cache.close() From 6e110c1051efcb7fbf6dd4153514a1b063041cb2 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:12:44 +0100 Subject: [PATCH 73/95] Update serializer_class.py --- examples/serializer_class.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/examples/serializer_class.py b/examples/serializer_class.py index 75ae3dc6d..57dfeb940 100644 --- a/examples/serializer_class.py +++ b/examples/serializer_class.py @@ -30,7 +30,7 @@ def loads(self, value): return decompressed -async def serializer(): +async def serializer(cache): text = ( "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt" "ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation" @@ -39,21 +39,18 @@ async def serializer(): "sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit" "anim id est laborum." ) - async with ValkeyCache( - config=config, namespace="main", serializer=CompressionSerializer() - ) as cache: - await cache.set("key", text) - print("-----------------------------------") - real_value = await cache.get("key") - compressed_value = await cache.raw("get", "main:key") + await cache.set("key", text) + print("-----------------------------------") + real_value = await cache.get("key") + compressed_value = await cache.raw("get", "main:key") assert len(compressed_value) < len(real_value.encode()) async def test_serializer(): - await serializer() async with ValkeyCache( - config=config, namespace="main", serializer=CompressionSerializer() + config, namespace="main", serializer=CompressionSerializer() ) as cache: + await serializer(cache) await cache.delete("key") await cache.close() From 1169de064023a3348af0c8582f52e8c4c36935d3 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:15:17 +0100 Subject: [PATCH 74/95] Update serializer_function.py --- examples/serializer_function.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/examples/serializer_function.py b/examples/serializer_function.py index 64801e5b2..741dc54ec 100644 --- a/examples/serializer_function.py +++ b/examples/serializer_function.py @@ -35,21 +35,20 @@ def loads(value): return MyTypeSchema().loads(value) -async def serializer_function(): - async with ValkeyCache(config=config, namespace="main") as cache: - await cache.set("key", MyType(1, 2), dumps_fn=dumps) +async def serializer_function(cache): + await cache.set("key", MyType(1, 2), dumps_fn=dumps) - obj = await cache.get("key", loads_fn=loads) + obj = await cache.get("key", loads_fn=loads) - assert obj.x == 1 - assert obj.y == 2 - assert await cache.get("key") == json.loads(('{"y": 2.0, "x": 1.0}')) - assert json.loads(await cache.raw("get", "main:key")) == {"y": 2.0, "x": 1.0} + assert obj.x == 1 + assert obj.y == 2 + assert await cache.get("key") == json.loads(('{"y": 2.0, "x": 1.0}')) + assert json.loads(await cache.raw("get", "main:key")) == {"y": 2.0, "x": 1.0} async def test_serializer_function(): - await serializer_function() - async with ValkeyCache(config=config, namespace="main") as cache: + async with ValkeyCache(config, namespace="main") as cache: + await serializer_function(cache) await cache.delete("key") From 1dbe66a6bc4029ba487b2db79a04d160f0f00b75 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:17:45 +0100 Subject: [PATCH 75/95] Update simple_valkey.py --- examples/simple_valkey.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/examples/simple_valkey.py b/examples/simple_valkey.py index 029135ba7..7a7c0b909 100644 --- a/examples/simple_valkey.py +++ b/examples/simple_valkey.py @@ -8,19 +8,18 @@ config = GlideClientConfiguration(addresses=addresses, database_id=0) -async def valkey(): - async with ValkeyCache(config=config, namespace="main") as cache: - await cache.set("key", "value") - await cache.set("expire_me", "value", ttl=10) +async def valkey(cache): + await cache.set("key", "value") + await cache.set("expire_me", "value", ttl=10) - assert await cache.get("key") == "value" - assert await cache.get("expire_me") == "value" - assert await cache.raw("ttl", "main:expire_me") > 0 + assert await cache.get("key") == "value" + assert await cache.get("expire_me") == "value" + assert await cache.raw("ttl", "main:expire_me") > 0 async def test_valkey(): - await valkey() - async with ValkeyCache(config=config, namespace="main") as cache: + async with ValkeyCache(config, namespace="main") as cache: + await valkey(cache) await cache.delete("key") await cache.delete("expire_me") await cache.close() From c41496d10589df5798a67b0e19c749e497c7c242 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:19:06 +0100 Subject: [PATCH 76/95] Update conftest.py --- tests/acceptance/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/acceptance/conftest.py b/tests/acceptance/conftest.py index ce3192b49..e692cbcd3 100644 --- a/tests/acceptance/conftest.py +++ b/tests/acceptance/conftest.py @@ -9,7 +9,7 @@ async def valkey_cache(valkey_config): from aiocache.backends.valkey import ValkeyCache - async with ValkeyCache(namespace="test", config=valkey_config) as cache: + async with ValkeyCache(valkey_config, namespace="test") as cache: yield cache await asyncio.gather(*(cache.delete(k) for k in (*Keys, KEY_LOCK))) From 32ecde5a7c9592946e8e32db40ba5997bc1dff9a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:20:50 +0100 Subject: [PATCH 77/95] Update test_base.py --- tests/acceptance/test_base.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/acceptance/test_base.py b/tests/acceptance/test_base.py index d6b8adbf3..93e6d6594 100644 --- a/tests/acceptance/test_base.py +++ b/tests/acceptance/test_base.py @@ -60,10 +60,7 @@ async def test_set_cancel_previous_ttl_handle(self, cache): async def test_multi_set(self, cache): pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] assert await cache.multi_set(pairs) is True - assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == [ - "value", - "random_value", - ] + assert await cache.multi_get([Keys.KEY, Keys.KEY_1]) == ["value", "random_value"] async def test_multi_set_with_ttl(self, cache): pairs = [(Keys.KEY, "value"), [Keys.KEY_1, "random_value"]] From d6f1c358c306ebacf3e5b8088178da21e39de96a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:23:36 +0100 Subject: [PATCH 78/95] Update conftest.py --- tests/conftest.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d67e1608d..6b4ce5fa0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,6 +6,4 @@ def valkey_config(): from glide import GlideClientConfiguration, NodeAddress addresses = [NodeAddress("localhost", 6379)] - conf = GlideClientConfiguration(addresses=addresses, database_id=0) - - yield conf + return GlideClientConfiguration(addresses=addresses, database_id=0) From e0334d6f1a51bd2657c947b97ad65d6ef67c8bf7 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:24:48 +0100 Subject: [PATCH 79/95] Update conftest.py --- tests/performance/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/performance/conftest.py b/tests/performance/conftest.py index 02a44a37b..d47dd1ffb 100644 --- a/tests/performance/conftest.py +++ b/tests/performance/conftest.py @@ -7,7 +7,7 @@ async def valkey_cache(valkey_config): # when exceeding max pool size. from aiocache.backends.valkey import ValkeyCache - async with ValkeyCache(namespace="test", config=valkey_config) as cache: + async with ValkeyCache(valkey_config, namespace="test") as cache: yield cache From f7d1ece784a79ce4b86f16ce38e6ce03f8076c05 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:30:17 +0100 Subject: [PATCH 80/95] Update server.py --- tests/performance/server.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index 24c0ad62f..3c4959ca3 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -2,6 +2,7 @@ import logging import sys import uuid +from functools import partial from types import TracebackType from typing import AsyncIterator, Callable, Optional @@ -22,7 +23,7 @@ def __init__(self, backend: str): from glide import GlideClientConfiguration, NodeAddress config = GlideClientConfiguration(addresses=[NodeAddress()], database_id=0) - cache = ValkeyCache(config=config) + cache = ValkeyCache(config) elif backend == "memcached": from aiocache.backends.memcached import MemcachedCache cache = MemcachedCache() @@ -69,17 +70,14 @@ async def handler_get(req: web.Request) -> web.Response: return web.Response(text=str(data)) -def cache_manager_ctx(backend: str) -> Callable[[web.Application], AsyncIterator[None]]: - async def ctx(app: web.Application) -> AsyncIterator[None]: - async with CacheManager(backend) as cm: - app[cache_key] = cm - yield - - return ctx +async def ctx(app: web.Application, backend: str) -> AsyncIterator[None]: + async with CacheManager(backend) as cm: + app[cache_key] = cm + yield def run_server(backend: str) -> None: app = web.Application() - app.cleanup_ctx.append(cache_manager_ctx(backend)) + app.cleanup_ctx.append(partial(ctx, backend=backend)) app.router.add_route("GET", "/", handler_get) web.run_app(app) From 8cd8ce629efd3ef450b73b4e9a9740c49f692f8d Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:33:23 +0100 Subject: [PATCH 81/95] Update test_concurrency.py --- tests/performance/test_concurrency.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/performance/test_concurrency.py b/tests/performance/test_concurrency.py index 69b91d859..71ebc8f84 100644 --- a/tests/performance/test_concurrency.py +++ b/tests/performance/test_concurrency.py @@ -8,6 +8,7 @@ from .server import run_server +# Spawn is needed to avoid potential segfaults in forked processes. set_start_method("spawn") From 7b4b2c6a0c72c7418c82740407960b6a1dff9480 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:38:57 +0100 Subject: [PATCH 82/95] Update tests/performance/test_footprint.py --- tests/performance/test_footprint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/performance/test_footprint.py b/tests/performance/test_footprint.py index 6f8f4cde2..fe1af2a8e 100644 --- a/tests/performance/test_footprint.py +++ b/tests/performance/test_footprint.py @@ -8,7 +8,7 @@ @pytest.fixture -async def valkey_client() -> AsyncIterator["GlideClient"]: +async def valkey_client() -> AsyncIterator[GlideClient]: addresses = [NodeAddress("localhost", 6379)] conf = GlideClientConfiguration(addresses=addresses) client = await GlideClient.create(conf) From 1bee5cadbe54abbfdbe446ce6d6ec90a063d1a13 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:49:14 +0100 Subject: [PATCH 83/95] Update test_memcached.py --- tests/ut/backends/test_memcached.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/ut/backends/test_memcached.py b/tests/ut/backends/test_memcached.py index 77c28a3a8..f0de04cb4 100644 --- a/tests/ut/backends/test_memcached.py +++ b/tests/ut/backends/test_memcached.py @@ -249,11 +249,7 @@ def test_parse_uri_path(self): @pytest.mark.parametrize( "namespace, expected", - ( - [None, "test" + ensure_key(Keys.KEY)], - ["", ensure_key(Keys.KEY)], - ["my_ns", "my_ns" + ensure_key(Keys.KEY)], - ), # noqa: B950 + ([None, "test" + ensure_key(Keys.KEY)], ["", ensure_key(Keys.KEY)], ["my_ns", "my_ns" + ensure_key(Keys.KEY)]), # noqa: B950 ) def test_build_key_bytes(self, set_test_namespace, memcached_cache, namespace, expected): assert memcached_cache.build_key(Keys.KEY, namespace) == expected.encode() From d0f516dfc2696134c3f045c533e156fa12da0d9b Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:50:07 +0100 Subject: [PATCH 84/95] Update conftest.py --- tests/ut/conftest.py | 21 +++------------------ 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/tests/ut/conftest.py b/tests/ut/conftest.py index 1d1fbf0c4..cd445e293 100644 --- a/tests/ut/conftest.py +++ b/tests/ut/conftest.py @@ -17,24 +17,9 @@ def mock_base_cache(): """Return BaseCache instance with unimplemented methods mocked out.""" plugin = create_autospec(BasePlugin, instance=True) cache = ConcreteBaseCache(timeout=0.002, plugins=(plugin,)) - methods = ( - "_add", - "_get", - "_gets", - "_set", - "_multi_get", - "_multi_set", - "_delete", - "_exists", - "_increment", - "_expire", - "_clear", - "_raw", - "_close", - "_redlock_release", - "acquire_conn", - "release_conn", - ) + methods = ("_add", "_get", "_gets", "_set", "_multi_get", "_multi_set", "_delete", + "_exists", "_increment", "_expire", "_clear", "_raw", "_close", + "_redlock_release", "acquire_conn", "release_conn") with ExitStack() as stack: for f in methods: stack.enter_context(patch.object(cache, f, autospec=True)) From 85f9eb056e7d324600e33a4098166aed2e7bc4b7 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:50:44 +0100 Subject: [PATCH 85/95] Update tests/ut/conftest.py --- tests/ut/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ut/conftest.py b/tests/ut/conftest.py index cd445e293..5196f8005 100644 --- a/tests/ut/conftest.py +++ b/tests/ut/conftest.py @@ -43,7 +43,7 @@ def base_cache(): async def valkey_cache(valkey_config): from aiocache.backends.valkey import ValkeyCache - async with ValkeyCache(config=valkey_config) as cache: + async with ValkeyCache(valkey_config) as cache: yield cache From f1ddde38d1eb9fd153d40962563d033689075938 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:55:03 +0100 Subject: [PATCH 86/95] Update server.py --- tests/performance/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/performance/server.py b/tests/performance/server.py index 3c4959ca3..41d8bc943 100644 --- a/tests/performance/server.py +++ b/tests/performance/server.py @@ -4,7 +4,7 @@ import uuid from functools import partial from types import TracebackType -from typing import AsyncIterator, Callable, Optional +from typing import AsyncIterator, Optional from aiohttp import web From 5e3db2ad698cc82f491075cc9880c0bfa15b02b3 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 9 Jun 2025 20:56:14 +0100 Subject: [PATCH 87/95] Update valkey.py --- aiocache/backends/valkey.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/aiocache/backends/valkey.py b/aiocache/backends/valkey.py index 437af97f7..b8560acf7 100644 --- a/aiocache/backends/valkey.py +++ b/aiocache/backends/valkey.py @@ -165,12 +165,14 @@ class ValkeyCache(ValkeyBackend): def __init__( self, + config: GlideClientConfiguration, serializer: Optional[BaseSerializer] = None, namespace: str = "", key_builder: Callable[[str, str], str] = lambda k, ns: f"{ns}:{k}" if ns else k, **kwargs: Any, ): super().__init__( + config, serializer=serializer or JsonSerializer(), namespace=namespace, key_builder=key_builder, From 9909434e399c5e85a91a5bdb669ee2c5ea689c3c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Wed, 11 Jun 2025 17:38:52 +0100 Subject: [PATCH 88/95] Apply suggestions from code review --- tests/ut/backends/test_valkey.py | 62 ++++++++++++++------------------ 1 file changed, 26 insertions(+), 36 deletions(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 918365410..b7d5eff35 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -13,45 +13,35 @@ @pytest.fixture async def valkey(valkey_config): - valkey = await ValkeyBackend(config=valkey_config).__aenter__() - with patch.object(valkey, "client", autospec=True) as m: - # These methods actually return an awaitable. - for method in ( - "eval", - "expire", - "get", - "execute_command", - "exists", - "incrby", - "persist", - "delete", - "scan", - "flushdb", - ): - setattr(m, method, AsyncMock(return_value=None, spec_set=())) - m.mget = AsyncMock(return_value=[None], spec_set=()) - m.set = AsyncMock(return_value="OK", spec_set=()) - - yield valkey - - await valkey.__aexit__() + async with ValkeyBackend(config=valkey_config) as valkey: + with patch.object(valkey, "client", autospec=True) as m: + # These methods actually return an awaitable. + for method in ( + "eval", + "expire", + "get", + "execute_command", + "exists", + "incrby", + "persist", + "delete", + "scan", + "flushdb", + ): + setattr(m, method, AsyncMock(return_value=None, spec_set=())) + m.mget = AsyncMock(return_value=[None], spec_set=()) + m.set = AsyncMock(return_value="OK", spec_set=()) + + yield valkey class TestValkeyBackend: - async def test_backend_raise_if_no_backend(self): - msg = re.escape( - "ValkeyBackend.__init__() missing 1 required positional argument: 'config'" - ) - with pytest.raises(TypeError, match=msg): - async with ValkeyBackend() as _: - pass - async def test_get(self, valkey): valkey.client.get.return_value = b"value" assert await valkey._get(Keys.KEY) == "value" valkey.client.get.assert_called_with(Keys.KEY) - async def test_gets(self, mocker, valkey): + async def test_gets(self, valkey): await valkey._gets(Keys.KEY) valkey.client.get.assert_called_with(Keys.KEY) @@ -234,7 +224,7 @@ def test_default_serializer(self, valkey_config): @pytest.mark.parametrize( "path,expected", - [("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})], + (("", {}), ("/", {}), ("/1", {"db": "1"}), ("/1/2/3", {"db": "1"})), ) def test_parse_uri_path(self, path, expected, valkey_config): assert ValkeyCache(config=valkey_config).parse_uri_path(path) == expected @@ -242,10 +232,10 @@ def test_parse_uri_path(self, path, expected, valkey_config): @pytest.mark.parametrize( "namespace, expected", ( - [None, "test:" + ensure_key(Keys.KEY)], - ["", ensure_key(Keys.KEY)], - ["my_ns", "my_ns:" + ensure_key(Keys.KEY)], - ), # noqa: B950 + (None, "test:" + ensure_key(Keys.KEY)), + ("", ensure_key(Keys.KEY)), + ("my_ns", "my_ns:" + ensure_key(Keys.KEY)), + ), ) def test_build_key_double_dot( self, set_test_namespace, valkey_cache, namespace, expected From 5b5e687f39ca5a30a3b43dc79c68c15bbda52d2c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Wed, 11 Jun 2025 17:40:30 +0100 Subject: [PATCH 89/95] Update tests/ut/backends/test_valkey.py --- tests/ut/backends/test_valkey.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index b7d5eff35..085e7752c 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -31,7 +31,6 @@ async def valkey(valkey_config): setattr(m, method, AsyncMock(return_value=None, spec_set=())) m.mget = AsyncMock(return_value=[None], spec_set=()) m.set = AsyncMock(return_value="OK", spec_set=()) - yield valkey From 2356c9d3df8ce6460623fedc9b9e39f52390bebc Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Wed, 11 Jun 2025 17:41:43 +0100 Subject: [PATCH 90/95] Update tests/ut/backends/test_valkey.py --- tests/ut/backends/test_valkey.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/ut/backends/test_valkey.py b/tests/ut/backends/test_valkey.py index 085e7752c..a6bb11154 100644 --- a/tests/ut/backends/test_valkey.py +++ b/tests/ut/backends/test_valkey.py @@ -1,4 +1,3 @@ -import re from unittest.mock import AsyncMock, patch import pytest From 4c2cc30d512420ced74af7113746191d7639273b Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Wed, 11 Jun 2025 18:05:32 +0100 Subject: [PATCH 91/95] Update examples/optimistic_lock.py --- examples/optimistic_lock.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/examples/optimistic_lock.py b/examples/optimistic_lock.py index 96044ed6e..fdcc41085 100644 --- a/examples/optimistic_lock.py +++ b/examples/optimistic_lock.py @@ -31,12 +31,11 @@ async def my_view(cache): async def concurrent(cache): - async with cache: - await cache.set("key", "initial_value") - # All three calls will read 'initial_value' as the value to check and only - # the first one finishing will succeed because the others, when trying to set - # the value, will see that the value is not the same as when the lock started - await asyncio.gather(my_view(cache), my_view(cache), my_view(cache)) + await cache.set("key", "initial_value") + # All three calls will read 'initial_value' as the value to check and only + # the first one finishing will succeed because the others, when trying to set + # the value, will see that the value is not the same as when the lock started + await asyncio.gather(my_view(cache), my_view(cache), my_view(cache)) async def test_redis(): From 4f60a889430dd255a6910fde9237ebc3a33e35bd Mon Sep 17 00:00:00 2001 From: amirreza Date: Thu, 12 Jun 2025 04:22:59 +0330 Subject: [PATCH 92/95] test _raw with encoding=None --- tests/acceptance/test_base.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/acceptance/test_base.py b/tests/acceptance/test_base.py index 93e6d6594..f6627f6ad 100644 --- a/tests/acceptance/test_base.py +++ b/tests/acceptance/test_base.py @@ -241,6 +241,13 @@ async def test_raw(self, valkey_cache): # .raw() doesn't build key with namespace prefix, clear it manually await valkey_cache.raw("delete", "key") + async def test_raw_no_encoding(self, valkey_cache): + await valkey_cache.set(Keys.KEY, "value") + # you can't pass `encoding` to raw() + assert ( + await valkey_cache._raw("get", Keys.KEY, encoding=None) == "value".encode() + ) + async def test_clear_with_namespace_valkey(self, valkey_cache): await valkey_cache.set(Keys.KEY, "value", namespace="test") await valkey_cache.clear(namespace="test") From ce6cc6a450de850546720b5c4c27b21a50ccd709 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 17 Jun 2025 13:15:41 +0100 Subject: [PATCH 93/95] Update tests/acceptance/test_base.py --- tests/acceptance/test_base.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/tests/acceptance/test_base.py b/tests/acceptance/test_base.py index f6627f6ad..33fc27cd1 100644 --- a/tests/acceptance/test_base.py +++ b/tests/acceptance/test_base.py @@ -241,12 +241,14 @@ async def test_raw(self, valkey_cache): # .raw() doesn't build key with namespace prefix, clear it manually await valkey_cache.raw("delete", "key") - async def test_raw_no_encoding(self, valkey_cache): - await valkey_cache.set(Keys.KEY, "value") - # you can't pass `encoding` to raw() - assert ( - await valkey_cache._raw("get", Keys.KEY, encoding=None) == "value".encode() - ) + async def test_raw_no_encoding(self, valkey_config): + serializer = NullSerializer(encoding=None) + async with ValkeyCache(valkey_config, namespace="test", serializer=serializer) as cache: + await valkey_cache.set(Keys.KEY, "value") + + assert await valkey_cache.raw("get", Keys.KEY) == b"value" + + await valkey_cache.delete(Key.KEY) async def test_clear_with_namespace_valkey(self, valkey_cache): await valkey_cache.set(Keys.KEY, "value", namespace="test") From 809c10101cc649008e9b0d3d43094ea0e692dea4 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 17 Jun 2025 13:19:10 +0100 Subject: [PATCH 94/95] Update test_base.py --- tests/acceptance/test_base.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/acceptance/test_base.py b/tests/acceptance/test_base.py index 33fc27cd1..15cdd3333 100644 --- a/tests/acceptance/test_base.py +++ b/tests/acceptance/test_base.py @@ -2,8 +2,10 @@ import pytest +from aiocache import ValkeyCache from aiocache.backends.memory import SimpleMemoryCache from aiocache.base import _Conn +from aiocache.serializers import NullSerializer from ..utils import Keys @@ -244,11 +246,11 @@ async def test_raw(self, valkey_cache): async def test_raw_no_encoding(self, valkey_config): serializer = NullSerializer(encoding=None) async with ValkeyCache(valkey_config, namespace="test", serializer=serializer) as cache: - await valkey_cache.set(Keys.KEY, "value") + await cache.set(Keys.KEY, "value") - assert await valkey_cache.raw("get", Keys.KEY) == b"value" + assert await cache.raw("get", Keys.KEY) == b"value" - await valkey_cache.delete(Key.KEY) + await cache.delete(Keys.KEY) async def test_clear_with_namespace_valkey(self, valkey_cache): await valkey_cache.set(Keys.KEY, "value", namespace="test") From 7a5c3a12271c7586314e8b01132c9b60fff13134 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 17 Jun 2025 13:23:59 +0100 Subject: [PATCH 95/95] Update test_base.py --- tests/acceptance/test_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/acceptance/test_base.py b/tests/acceptance/test_base.py index 15cdd3333..614bb0b0b 100644 --- a/tests/acceptance/test_base.py +++ b/tests/acceptance/test_base.py @@ -2,7 +2,6 @@ import pytest -from aiocache import ValkeyCache from aiocache.backends.memory import SimpleMemoryCache from aiocache.base import _Conn from aiocache.serializers import NullSerializer @@ -244,6 +243,8 @@ async def test_raw(self, valkey_cache): await valkey_cache.raw("delete", "key") async def test_raw_no_encoding(self, valkey_config): + from aiocache.backends.valkey import ValkeyCache + serializer = NullSerializer(encoding=None) async with ValkeyCache(valkey_config, namespace="test", serializer=serializer) as cache: await cache.set(Keys.KEY, "value")