Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions langgraph/checkpoint/redis/aio.py
Original file line number Diff line number Diff line change
Expand Up @@ -1063,7 +1063,7 @@ async def aput(

if self.cluster_mode:
# For cluster mode, execute operation directly
await self._redis.json().set(
await self._redis.json().set( # type: ignore[misc]
checkpoint_key, "$", checkpoint_data
)
else:
Expand Down Expand Up @@ -1146,7 +1146,7 @@ async def aput_writes(
)

# Redis JSON.SET is an UPSERT by default
await self._redis.json().set(key, "$", cast(Any, write_obj))
await self._redis.json().set(key, "$", cast(Any, write_obj)) # type: ignore[misc]
created_keys.append(key)

# Apply TTL to newly created keys
Expand Down Expand Up @@ -1304,14 +1304,14 @@ async def aput_writes(
# Update has_writes flag separately for older Redis
if checkpoint_key:
try:
checkpoint_data = await self._redis.json().get(
checkpoint_data = await self._redis.json().get( # type: ignore[misc]
checkpoint_key
)
if isinstance(
checkpoint_data, dict
) and not checkpoint_data.get("has_writes"):
checkpoint_data["has_writes"] = True
await self._redis.json().set(
await self._redis.json().set( # type: ignore[misc]
checkpoint_key, "$", checkpoint_data
)
except Exception:
Expand Down Expand Up @@ -1479,7 +1479,7 @@ async def aget_channel_values(
)

# Single JSON.GET operation to retrieve checkpoint with inline channel_values
checkpoint_data = await self._redis.json().get(checkpoint_key, "$.checkpoint")
checkpoint_data = await self._redis.json().get(checkpoint_key, "$.checkpoint") # type: ignore[misc]

if not checkpoint_data:
return {}
Expand Down
4 changes: 2 additions & 2 deletions langgraph/checkpoint/redis/ashallow.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ async def aget_tuple(self, config: RunnableConfig) -> Optional[CheckpointTuple]:
)

# Single fetch gets everything inline - matching sync implementation
full_checkpoint_data = await self._redis.json().get(checkpoint_key)
full_checkpoint_data = await self._redis.json().get(checkpoint_key) # type: ignore[misc]
if not full_checkpoint_data or not isinstance(full_checkpoint_data, dict):
return None

Expand Down Expand Up @@ -544,7 +544,7 @@ async def aget_channel_values(
)

# Single JSON.GET operation to retrieve checkpoint with inline channel_values
checkpoint_data = await self._redis.json().get(checkpoint_key, "$.checkpoint")
checkpoint_data = await self._redis.json().get(checkpoint_key, "$.checkpoint") # type: ignore[misc]

if not checkpoint_data:
return {}
Expand Down
3 changes: 2 additions & 1 deletion langgraph/checkpoint/redis/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,8 @@ def _load_writes_from_redis(self, write_key: str) -> List[Tuple[str, str, Any]]:
return []

# Get the full JSON document
result = self._redis.json().get(write_key)
# Cast needed: redis-py types json().get() as List[JsonType] but returns dict
result = cast(Optional[Dict[str, Any]], self._redis.json().get(write_key))
if not result:
return []

Expand Down
3 changes: 2 additions & 1 deletion langgraph/store/redis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -540,8 +540,9 @@ def _batch_search_ops(
score = (1.0 - float(dist)) if dist is not None else 0.0
if not isinstance(store_doc, dict):
try:
# Cast needed: redis-py types json().get() incorrectly
store_doc = json.loads(
store_doc
cast(str, store_doc)
) # Attempt to parse if it's a JSON string
except (json.JSONDecodeError, TypeError):
logger.error(f"Failed to parse store_doc: {store_doc}")
Expand Down
2 changes: 1 addition & 1 deletion langgraph/store/redis/aio.py
Original file line number Diff line number Diff line change
Expand Up @@ -781,7 +781,7 @@ async def _batch_search_ops(
)
result_map[store_key] = doc
# Fetch individually in cluster mode
store_doc_item = await self._redis.json().get(store_key)
store_doc_item = await self._redis.json().get(store_key) # type: ignore[misc]
store_docs.append(store_doc_item)
store_docs_raw = store_docs
else:
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langgraph-checkpoint-redis"
version = "0.3.0"
version = "0.3.1"
description = "Redis implementation of the LangGraph agent checkpoint saver and store."
authors = ["Redis Inc. <[email protected]>", "Brian Sam-Bodden <[email protected]>"]
license = "MIT"
Expand Down Expand Up @@ -104,7 +104,7 @@ exclude = '''
disallow_untyped_defs = true
explicit_package_bases = true
warn_no_return = false
warn_unused_ignores = true
warn_unused_ignores = false
warn_redundant_casts = true
allow_redefinition = true
ignore_missing_imports = true
Expand Down