diff --git a/packages/adk-sdk-python/src/supermemory_adk/__init__.py b/packages/adk-sdk-python/src/supermemory_adk/__init__.py new file mode 100644 index 00000000..8eeac367 --- /dev/null +++ b/packages/adk-sdk-python/src/supermemory_adk/__init__.py @@ -0,0 +1,84 @@ +"""Supermemory ADK - Memory-enhanced AI agents with Google Agent Development Kit. + +This package provides seamless integration between Supermemory and Google's Agent +Development Kit (ADK), enabling persistent memory and context enhancement for AI agents. + +Example: + ```python + from google.adk.agents import Agent + from supermemory_adk import supermemory_tools + + # Create agent with Supermemory tools + root_agent = Agent( + model='gemini-2.5-flash', + tools=supermemory_tools( + api_key="your-api-key", + container_tags=["user-123"] + ), + instruction="Use memory tools when needed" + ) + ``` + +Example (Wrapper Mode): + ```python + from google.adk.agents import Agent + from supermemory_adk import with_supermemory, MemoryMode + + # Create base agent + base_agent = Agent( + model='gemini-2.5-flash', + instruction="You are a helpful assistant" + ) + + # Wrap with automatic memory injection + root_agent = with_supermemory( + base_agent, + container_tag="user-123", + mode=MemoryMode.FULL, + auto_save=True + ) + ``` +""" + +from .exceptions import ( + SupermemoryADKError, + SupermemoryAPIError, + SupermemoryConfigurationError, + SupermemoryMemoryOperationError, + SupermemoryNetworkError, + SupermemoryTimeoutError, + SupermemoryToolError, +) +from .tools import supermemory_tools +from .utils import ( + DeduplicatedMemories, + Logger, + create_logger, + deduplicate_memories, + format_memories_to_markdown, + format_memories_to_text, +) + +__version__ = "0.1.0" + +__all__ = [ + # Version + "__version__", + # Exceptions + "SupermemoryADKError", + "SupermemoryConfigurationError", + "SupermemoryAPIError", + "SupermemoryMemoryOperationError", + "SupermemoryNetworkError", + "SupermemoryTimeoutError", + "SupermemoryToolError", + # Utils + "Logger", + "create_logger", + "DeduplicatedMemories", + "deduplicate_memories", + "format_memories_to_markdown", + "format_memories_to_text", + # Tools + "supermemory_tools", +] diff --git a/packages/adk-sdk-python/src/supermemory_adk/exceptions.py b/packages/adk-sdk-python/src/supermemory_adk/exceptions.py new file mode 100644 index 00000000..5284621b --- /dev/null +++ b/packages/adk-sdk-python/src/supermemory_adk/exceptions.py @@ -0,0 +1,72 @@ +"""Custom exceptions for Supermemory ADK integration.""" + +from typing import Optional + + +class SupermemoryADKError(Exception): + """Base exception for all Supermemory ADK errors.""" + + def __init__(self, message: str, original_error: Optional[Exception] = None): + super().__init__(message) + self.message = message + self.original_error = original_error + + def __str__(self) -> str: + if self.original_error: + return f"{self.message}: {self.original_error}" + return self.message + + +class SupermemoryConfigurationError(SupermemoryADKError): + """Raised when there are configuration issues (e.g., missing API key, invalid params).""" + + pass + + +class SupermemoryAPIError(SupermemoryADKError): + """Raised when Supermemory API requests fail.""" + + def __init__( + self, + message: str, + status_code: Optional[int] = None, + response_text: Optional[str] = None, + original_error: Optional[Exception] = None, + ): + super().__init__(message, original_error) + self.status_code = status_code + self.response_text = response_text + + def __str__(self) -> str: + parts = [self.message] + if self.status_code: + parts.append(f"Status: {self.status_code}") + if self.response_text: + parts.append(f"Response: {self.response_text}") + if self.original_error: + parts.append(f"Cause: {self.original_error}") + return " | ".join(parts) + + +class SupermemoryMemoryOperationError(SupermemoryADKError): + """Raised when memory operations (search, add) fail.""" + + pass + + +class SupermemoryTimeoutError(SupermemoryADKError): + """Raised when operations timeout.""" + + pass + + +class SupermemoryNetworkError(SupermemoryADKError): + """Raised when network operations fail.""" + + pass + + +class SupermemoryToolError(SupermemoryADKError): + """Raised when ADK tool execution fails.""" + + pass diff --git a/packages/adk-sdk-python/src/supermemory_adk/tools.py b/packages/adk-sdk-python/src/supermemory_adk/tools.py new file mode 100644 index 00000000..1dbde966 --- /dev/null +++ b/packages/adk-sdk-python/src/supermemory_adk/tools.py @@ -0,0 +1,262 @@ +"""Supermemory tools for ADK function calling. + +This module provides memory tools that integrate with Google's Agent Development Kit (ADK). +Simple one-function API: just call supermemory_tools() and get a list of ready-to-use tools. +""" + +import os +from typing import Any, Optional + +try: + import supermemory +except ImportError: + supermemory = None # type: ignore + +from .exceptions import SupermemoryConfigurationError +from .utils import Logger, create_logger + + +def supermemory_tools( + api_key: Optional[str] = None, + container_tags: Optional[list[str]] = None, + project_id: Optional[str] = None, + base_url: Optional[str] = None, + verbose: bool = False, +) -> list: + """Create Supermemory tools for ADK agents. + + Returns a list of tool functions ready to use with ADK Agent. + + Args: + api_key: Supermemory API key (falls back to SUPERMEMORY_API_KEY env var) + container_tags: Container tags for memory scoping + project_id: Project ID for memory scoping (alternative to container_tags) + base_url: Optional custom base URL + verbose: Enable verbose logging + + Returns: + List of tool functions [search_memories, add_memory, get_memory_profile] + + Raises: + SupermemoryConfigurationError: If API key is missing or configuration is invalid + + Example: + ```python + from google.adk.agents import Agent + from supermemory_adk import supermemory_tools + + # Create agent with Supermemory tools + root_agent = Agent( + model='gemini-2.5-flash', + tools=supermemory_tools( + api_key="your-api-key", + container_tags=["user-123"] + ), + instruction="Use memory tools when needed" + ) + ``` + """ + # Resolve API key + resolved_api_key = api_key or os.getenv("SUPERMEMORY_API_KEY") + if not resolved_api_key: + raise SupermemoryConfigurationError( + "API key is required. Provide api_key parameter or set SUPERMEMORY_API_KEY environment variable." + ) + + # Validate configuration + if project_id and container_tags: + raise SupermemoryConfigurationError( + "Cannot specify both project_id and container_tags. Choose one." + ) + + # Set container tags + if project_id: + resolved_container_tags = [f"sm_project_{project_id}"] + elif container_tags: + resolved_container_tags = container_tags + else: + resolved_container_tags = ["sm_project_default"] + + # Initialize Supermemory client + if supermemory is None: + raise SupermemoryConfigurationError( + "supermemory package is required but not found. " + "Install with: pip install supermemory" + ) + + try: + client_kwargs = {"api_key": resolved_api_key} + if base_url: + client_kwargs["base_url"] = base_url + + client = supermemory.Supermemory(**client_kwargs) + logger: Logger = create_logger(verbose) + logger.info("Supermemory tools initialized", {"container_tags": resolved_container_tags}) + except Exception as e: + raise SupermemoryConfigurationError(f"Failed to initialize Supermemory client: {e}", e) + + # Define tool functions with closure over client and config + async def search_memories( + information_to_get: str, + include_full_docs: bool = True, + limit: int = 10, + ) -> dict[str, Any]: + """Search for memories using semantic search. + + Args: + information_to_get: Search query for retrieving memories + include_full_docs: Whether to include full document content + limit: Maximum number of results to return + + Returns: + Dictionary with search results: {success, results, count} or {success, error} + """ + try: + logger.info( + "Searching memories", + { + "query": information_to_get[:100], + "limit": limit, + "container_tags": resolved_container_tags, + }, + ) + + response = await client.search.execute( + q=information_to_get, + container_tags=resolved_container_tags, + limit=limit, + chunk_threshold=0.6, + include_full_docs=include_full_docs, + ) + + results = response.results if hasattr(response, "results") else [] + logger.info("Memory search completed", {"count": len(results)}) + + return { + "success": True, + "results": results, + "count": len(results), + } + + except (OSError, ConnectionError) as network_error: + error_msg = f"Network error: {network_error}" + logger.error("Network error during memory search", {"error": str(network_error)}) + return {"success": False, "error": error_msg} + + except Exception as error: + error_msg = f"Memory search failed: {error}" + logger.error("Memory search failed", {"error": str(error)}) + return {"success": False, "error": error_msg} + + async def add_memory(memory: str) -> dict[str, Any]: + """Add a new memory to Supermemory. + + Args: + memory: The memory content to add (should be a single fact or short paragraph) + + Returns: + Dictionary with add result: {success, memory_id} or {success, error} + """ + try: + logger.info( + "Adding memory", + { + "content_length": len(memory), + "container_tags": resolved_container_tags, + }, + ) + + add_params = { + "content": memory, + "container_tags": resolved_container_tags, + } + + response = await client.add(**add_params) + memory_id = response.id if hasattr(response, "id") else "unknown" + + logger.info("Memory added successfully", {"memory_id": memory_id}) + + return { + "success": True, + "memory_id": memory_id, + } + + except (OSError, ConnectionError) as network_error: + error_msg = f"Network error: {network_error}" + logger.error("Network error during memory add", {"error": str(network_error)}) + return {"success": False, "error": error_msg} + + except Exception as error: + error_msg = f"Memory add failed: {error}" + logger.error("Memory add failed", {"error": str(error)}) + return {"success": False, "error": error_msg} + + async def get_memory_profile(query: Optional[str] = None) -> dict[str, Any]: + """Get user's memory profile (static and dynamic memories). + + Args: + query: Optional search query for retrieving relevant memories + + Returns: + Dictionary with profile data: {success, profile, search_results} or {success, error} + """ + try: + logger.info( + "Fetching memory profile", + { + "has_query": query is not None, + "container_tags": resolved_container_tags, + }, + ) + + kwargs: dict[str, Any] = {"container_tag": resolved_container_tags[0]} + + if query: + kwargs["q"] = query + kwargs["threshold"] = 0.1 + kwargs["extra_body"] = {"limit": 10} + + response = await client.profile(**kwargs) + + profile = { + "static": response.profile.static if hasattr(response, "profile") else [], + "dynamic": response.profile.dynamic if hasattr(response, "profile") else [], + } + + search_results = [] + if hasattr(response, "search_results") and response.search_results: + if hasattr(response.search_results, "results"): + search_results = response.search_results.results + + logger.info( + "Memory profile retrieved", + { + "static_count": len(profile["static"]), + "dynamic_count": len(profile["dynamic"]), + "search_count": len(search_results), + }, + ) + + return { + "success": True, + "profile": profile, + "search_results": search_results, + } + + except (OSError, ConnectionError) as network_error: + error_msg = f"Network error: {network_error}" + logger.error("Network error during profile fetch", {"error": str(network_error)}) + return {"success": False, "error": error_msg} + + except Exception as error: + error_msg = f"Profile fetch failed: {error}" + logger.error("Profile fetch failed", {"error": str(error)}) + return {"success": False, "error": error_msg} + + # Add docstrings as tool descriptions for ADK + search_memories.__doc__ = "Search (recall) memories/details/information about the user or other facts. Use when explicitly asked or when context about user's past choices would be helpful." + add_memory.__doc__ = "Add (remember) memories/details/information about the user or other facts. Use when user shares information that should be remembered for future conversations." + get_memory_profile.__doc__ = "Get user's memory profile including static facts and dynamic context. Use at conversation start or when needing full user context." + + # Return list of tool functions + return [search_memories, add_memory, get_memory_profile] diff --git a/packages/adk-sdk-python/src/supermemory_adk/utils.py b/packages/adk-sdk-python/src/supermemory_adk/utils.py new file mode 100644 index 00000000..dc081347 --- /dev/null +++ b/packages/adk-sdk-python/src/supermemory_adk/utils.py @@ -0,0 +1,235 @@ +"""Utility functions for Supermemory ADK integration.""" + +import json +from typing import Any, Optional, Protocol + + +class Logger(Protocol): + """Logger protocol for type safety.""" + + def debug(self, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Log debug message.""" + ... + + def info(self, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Log info message.""" + ... + + def warn(self, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Log warning message.""" + ... + + def error(self, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Log error message.""" + ... + + +class SimpleLogger: + """Simple logger implementation.""" + + def __init__(self, verbose: bool = False): + self.verbose: bool = verbose + + def _log(self, level: str, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Internal logging method.""" + if not self.verbose: + return + + log_message = f"[supermemory-adk] {message}" + if data: + log_message += f" {json.dumps(data, indent=2)}" + + if level == "error": + print(f"ERROR: {log_message}", flush=True) + elif level == "warn": + print(f"WARN: {log_message}", flush=True) + else: + print(log_message, flush=True) + + def debug(self, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Log debug message.""" + self._log("debug", message, data) + + def info(self, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Log info message.""" + self._log("info", message, data) + + def warn(self, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Log warning message.""" + self._log("warn", message, data) + + def error(self, message: str, data: Optional[dict[str, Any]] = None) -> None: + """Log error message.""" + self._log("error", message, data) + + +def create_logger(verbose: bool) -> Logger: + """Create a logger instance. + + Args: + verbose: Whether to enable verbose logging + + Returns: + Logger instance + """ + return SimpleLogger(verbose) + + +class DeduplicatedMemories: + """Deduplicated memory strings organized by source.""" + + def __init__(self, static: list[str], dynamic: list[str], search_results: list[str]): + self.static = static + self.dynamic = dynamic + self.search_results = search_results + + +def deduplicate_memories( + static: Optional[list[Any]] = None, + dynamic: Optional[list[Any]] = None, + search_results: Optional[list[Any]] = None, +) -> DeduplicatedMemories: + """Deduplicate memory items across sources. + + Priority: Static > Dynamic > Search Results. + Same memory appearing in multiple sources is kept only in the highest-priority source. + + Args: + static: Static profile memories + dynamic: Dynamic profile memories + search_results: Search result memories + + Returns: + DeduplicatedMemories with deduplicated lists + """ + static_items = static or [] + dynamic_items = dynamic or [] + search_items = search_results or [] + + def extract_memory_text(item: Any) -> Optional[str]: + """Extract memory text from various formats.""" + if item is None: + return None + if isinstance(item, dict): + memory = item.get("memory") + if isinstance(memory, str): + trimmed = memory.strip() + return trimmed if trimmed else None + return None + if isinstance(item, str): + trimmed = item.strip() + return trimmed if trimmed else None + return None + + static_memories: list[str] = [] + seen_memories: set[str] = set() + + # Add static memories first + for item in static_items: + memory = extract_memory_text(item) + if memory is not None: + static_memories.append(memory) + seen_memories.add(memory) + + # Add dynamic memories (skip duplicates) + dynamic_memories: list[str] = [] + for item in dynamic_items: + memory = extract_memory_text(item) + if memory is not None and memory not in seen_memories: + dynamic_memories.append(memory) + seen_memories.add(memory) + + # Add search results (skip duplicates) + search_memories: list[str] = [] + for item in search_items: + memory = extract_memory_text(item) + if memory is not None and memory not in seen_memories: + search_memories.append(memory) + seen_memories.add(memory) + + return DeduplicatedMemories( + static=static_memories, + dynamic=dynamic_memories, + search_results=search_memories, + ) + + +def format_memories_to_markdown( + memories: DeduplicatedMemories, + include_static: bool = True, + include_dynamic: bool = True, + include_search: bool = True, +) -> str: + """Format deduplicated memories into markdown. + + Args: + memories: Deduplicated memories + include_static: Whether to include static profile memories + include_dynamic: Whether to include dynamic profile memories + include_search: Whether to include search result memories + + Returns: + Markdown formatted string + + Example: + ```python + memories = DeduplicatedMemories( + static=["User prefers Python"], + dynamic=["Recently asked about AI"], + search_results=["Likes coffee"] + ) + + markdown = format_memories_to_markdown(memories) + # Returns formatted markdown with sections + ``` + """ + sections = [] + + if include_static and memories.static: + sections.append("## User Profile (Persistent)") + sections.append("\n".join(f"- {item}" for item in memories.static)) + + if include_dynamic and memories.dynamic: + sections.append("## Recent Context") + sections.append("\n".join(f"- {item}" for item in memories.dynamic)) + + if include_search and memories.search_results: + sections.append("## Relevant Memories") + sections.append("\n".join(f"- {item}" for item in memories.search_results)) + + if not sections: + return "" + + return "\n\n".join(sections) + + +def format_memories_to_text( + memories: DeduplicatedMemories, + system_prompt: str = "Based on previous conversations, I recall:\n\n", + include_static: bool = True, + include_dynamic: bool = True, + include_search: bool = True, +) -> str: + """Format deduplicated memories into text with system prompt. + + Args: + memories: Deduplicated memories + system_prompt: Prefix text for memory context + include_static: Whether to include static profile memories + include_dynamic: Whether to include dynamic profile memories + include_search: Whether to include search result memories + + Returns: + Formatted text string with system prompt prefix + """ + markdown = format_memories_to_markdown( + memories, + include_static=include_static, + include_dynamic=include_dynamic, + include_search=include_search, + ) + + if not markdown: + return "" + + return f"{system_prompt}{markdown}"