diff --git a/agent-memory-client/agent_memory_client/__init__.py b/agent-memory-client/agent_memory_client/__init__.py index eede607..d00ff58 100644 --- a/agent-memory-client/agent_memory_client/__init__.py +++ b/agent-memory-client/agent_memory_client/__init__.py @@ -18,12 +18,16 @@ # Re-export essential models for convenience ModelNameLiteral, ) +from .tool_schema import ToolSchema, ToolSchemaCollection __all__ = [ # Client classes "MemoryAPIClient", "MemoryClientConfig", "create_memory_client", + # Tool schema classes + "ToolSchema", + "ToolSchemaCollection", # Exceptions "MemoryClientError", "MemoryValidationError", diff --git a/agent-memory-client/agent_memory_client/client.py b/agent-memory-client/agent_memory_client/client.py index 8e43295..1431f0f 100644 --- a/agent-memory-client/agent_memory_client/client.py +++ b/agent-memory-client/agent_memory_client/client.py @@ -48,6 +48,7 @@ WorkingMemory, WorkingMemoryResponse, ) +from .tool_schema import ToolSchema, ToolSchemaCollection # === Tool Call Type Definitions === @@ -1044,7 +1045,7 @@ async def search_memory_tool( } @classmethod - def get_memory_search_tool_schema(cls) -> dict[str, Any]: + def get_memory_search_tool_schema(cls) -> ToolSchema: """ Get OpenAI-compatible tool schema for memory search. @@ -1053,19 +1054,20 @@ def get_memory_search_tool_schema(cls) -> dict[str, Any]: memory search as a tool that LLMs can call. Returns: - Tool schema dictionary compatible with OpenAI tool calling format + ToolSchema object with customizable description and parameters Example: ```python # Register with OpenAI import openai - tools = [MemoryAPIClient.get_memory_search_tool_schema()] + schema = MemoryAPIClient.get_memory_search_tool_schema() + schema.set_description("Custom search description") response = await openai.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": "What did I say about my preferences?"}], - tools=tools, + tools=[schema.to_dict()], tool_choice="auto" ) ``` @@ -1081,66 +1083,69 @@ async def handle_tool_calls(client, tool_calls): yield result ``` """ - return { - "type": "function", - "function": { - "name": "search_memory", - "description": "Search long-term memory for relevant information using semantic vector search. Use this when you need to find previously stored information about the user, such as their preferences, past conversations, or important facts. Examples: 'Find information about user food preferences', 'What did they say about their job?', 'Look for travel preferences'. This searches only long-term memory, not current working memory - use get_working_memory for current session info. IMPORTANT: The result includes 'memories' with an 'id' field; use these IDs when calling edit_long_term_memory or delete_long_term_memories.", - "parameters": { - "type": "object", - "properties": { - "query": { - "type": "string", - "description": "The query for vector search describing what information you're looking for", - }, - "topics": { - "type": "array", - "items": {"type": "string"}, - "description": "Optional list of topics to filter by (e.g., ['preferences', 'work', 'personal'])", - }, - "entities": { - "type": "array", - "items": {"type": "string"}, - "description": "Optional list of entities to filter by (e.g., ['John', 'project_alpha', 'meetings'])", - }, - "memory_type": { - "type": "string", - "enum": ["episodic", "semantic", "message"], - "description": "Optional filter by memory type: 'episodic' (events/experiences), 'semantic' (facts/knowledge), 'message' (conversation history)", - }, - "max_results": { - "type": "integer", - "minimum": 1, - "maximum": 20, - "default": 10, - "description": "Maximum number of results to return", - }, - "offset": { - "type": "integer", - "minimum": 0, - "default": 0, - "description": "Offset for pagination (default: 0)", - }, - "min_relevance": { - "type": "number", - "minimum": 0.0, - "maximum": 1.0, - "description": "Optional minimum relevance score (0.0-1.0, higher = more relevant)", - }, - "user_id": { - "type": "string", - "description": "Optional user ID to filter memories by (e.g., 'user123')", - }, - "optimize_query": { - "type": "boolean", - "default": False, - "description": "Whether to optimize the query for vector search (default: False - LLMs typically provide already optimized queries)", + return ToolSchema( + { + "type": "function", + "function": { + "name": "search_memory", + "description": "Search long-term memory for relevant information using semantic vector search. Use this when you need to find previously stored information about the user, such as their preferences, past conversations, or important facts. Examples: 'Find information about user food preferences', 'What did they say about their job?', 'Look for travel preferences'. This searches only long-term memory, not current working memory - use get_working_memory for current session info. IMPORTANT: The result includes 'memories' with an 'id' field; use these IDs when calling edit_long_term_memory or delete_long_term_memories.", + "parameters": { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query for vector search describing what information you're looking for", + }, + "topics": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional list of topics to filter by (e.g., ['preferences', 'work', 'personal'])", + }, + "entities": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional list of entities to filter by (e.g., ['John', 'project_alpha', 'meetings'])", + }, + "memory_type": { + "type": "string", + "enum": ["episodic", "semantic", "message"], + "description": "Optional filter by memory type: 'episodic' (events/experiences), 'semantic' (facts/knowledge), 'message' (conversation history)", + }, + "max_results": { + "type": "integer", + "minimum": 1, + "maximum": 20, + "default": 10, + "description": "Maximum number of results to return", + }, + "offset": { + "type": "integer", + "minimum": 0, + "default": 0, + "description": "Offset for pagination (default: 0)", + }, + "min_relevance": { + "type": "number", + "minimum": 0.0, + "maximum": 1.0, + "description": "Optional minimum relevance score (0.0-1.0, higher = more relevant)", + }, + "user_id": { + "type": "string", + "description": "Optional user ID to filter memories by (e.g., 'user123')", + }, + "optimize_query": { + "type": "boolean", + "default": False, + "description": "Whether to optimize the query for vector search (default: False - LLMs typically provide already optimized queries)", + }, }, + "required": ["query"], }, - "required": ["query"], }, }, - } + schema_format="openai", + ) # === Working Memory Tool Integration === @@ -1465,441 +1470,484 @@ async def update_memory_data_tool( } @classmethod - def get_working_memory_tool_schema(cls) -> dict[str, Any]: + def get_working_memory_tool_schema(cls) -> ToolSchema: """ Get OpenAI-compatible tool schema for reading working memory. Returns: - Tool schema dictionary compatible with OpenAI tool calling format + ToolSchema object with customizable description and parameters """ - return { - "type": "function", - "function": { - "name": "get_or_create_working_memory", - "description": "Get the current working memory state including recent messages, temporarily stored memories, and session-specific data. Creates a new session if one doesn't exist. Returns information about whether the session was created or found existing. Use this to check what's already in the current conversation context before deciding whether to search long-term memory or add new information. Examples: Check if user preferences are already loaded in this session, review recent conversation context, see what structured data has been stored for this session.", - "parameters": { - "type": "object", - "properties": {}, - "required": [], + return ToolSchema( + { + "type": "function", + "function": { + "name": "get_or_create_working_memory", + "description": "Get the current working memory state including recent messages, temporarily stored memories, and session-specific data. Creates a new session if one doesn't exist. Returns information about whether the session was created or found existing. Use this to check what's already in the current conversation context before deciding whether to search long-term memory or add new information. Examples: Check if user preferences are already loaded in this session, review recent conversation context, see what structured data has been stored for this session.", + "parameters": { + "type": "object", + "properties": {}, + "required": [], + }, }, }, - } + schema_format="openai", + ) @classmethod - def get_add_memory_tool_schema(cls) -> dict[str, Any]: + def get_add_memory_tool_schema(cls) -> ToolSchema: """ Get OpenAI-compatible tool schema for adding memories to working memory. Returns: - Tool schema dictionary compatible with OpenAI tool calling format + ToolSchema object with customizable description and parameters """ - return { - "type": "function", - "function": { - "name": "add_memory_to_working_memory", - "description": ( - "Store new important information as a structured memory. Use this when users share preferences, facts, or important details that should be remembered for future conversations. " - "Examples: 'User is vegetarian', 'Lives in Seattle', 'Works as a software engineer', 'Prefers morning meetings'. The system automatically promotes important memories to long-term storage. " - "For time-bound (episodic) information, include a grounded date phrase in the text (e.g., 'on August 14, 2025') and call get_current_datetime to resolve relative expressions like 'today'/'yesterday'; the backend will set the structured event_date during extraction/promotion. " - "Always check if similar information already exists before creating new memories." - ), - "parameters": { - "type": "object", - "properties": { - "text": { - "type": "string", - "description": "The memory content to store", - }, - "memory_type": { - "type": "string", - "enum": ["episodic", "semantic"], - "description": "Type of memory: 'episodic' (events/experiences), 'semantic' (facts/preferences)", - }, - "topics": { - "type": "array", - "items": {"type": "string"}, - "description": "Optional topics for categorization (e.g., ['preferences', 'budget', 'destinations'])", - }, - "entities": { - "type": "array", - "items": {"type": "string"}, - "description": "Optional entities mentioned (e.g., ['Paris', 'hotel', 'vegetarian'])", + return ToolSchema( + { + "type": "function", + "function": { + "name": "add_memory_to_working_memory", + "description": ( + "Store new important information as a structured memory. Use this when users share preferences, facts, or important details that should be remembered for future conversations. " + "Examples: 'User is vegetarian', 'Lives in Seattle', 'Works as a software engineer', 'Prefers morning meetings'. The system automatically promotes important memories to long-term storage. " + "For time-bound (episodic) information, include a grounded date phrase in the text (e.g., 'on August 14, 2025') and call get_current_datetime to resolve relative expressions like 'today'/'yesterday'; the backend will set the structured event_date during extraction/promotion. " + "Always check if similar information already exists before creating new memories." + ), + "parameters": { + "type": "object", + "properties": { + "text": { + "type": "string", + "description": "The memory content to store", + }, + "memory_type": { + "type": "string", + "enum": ["episodic", "semantic"], + "description": "Type of memory: 'episodic' (events/experiences), 'semantic' (facts/preferences)", + }, + "topics": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional topics for categorization (e.g., ['preferences', 'budget', 'destinations'])", + }, + "entities": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional entities mentioned (e.g., ['Paris', 'hotel', 'vegetarian'])", + }, }, + "required": ["text", "memory_type"], }, - "required": ["text", "memory_type"], }, }, - } + schema_format="openai", + ) @classmethod - def get_update_memory_data_tool_schema(cls) -> dict[str, Any]: + def get_update_memory_data_tool_schema(cls) -> ToolSchema: """ Get OpenAI-compatible tool schema for updating working memory data. Returns: - Tool schema dictionary compatible with OpenAI tool calling format + ToolSchema object with customizable description and parameters """ - return { - "type": "function", - "function": { - "name": "update_working_memory_data", - "description": "Store or update structured session data (JSON objects) in working memory. Use this for complex session-specific information that needs to be accessed and modified during the conversation. Examples: Travel itinerary {'destination': 'Paris', 'dates': ['2024-03-15', '2024-03-20']}, project details {'name': 'Website Redesign', 'deadline': '2024-04-01', 'status': 'in_progress'}. Different from add_memory_to_working_memory which stores simple text facts.", - "parameters": { - "type": "object", - "properties": { - "data": { - "type": "object", - "description": "JSON data to store or update in working memory", - }, - "merge_strategy": { - "type": "string", - "enum": ["replace", "merge", "deep_merge"], - "default": "merge", - "description": "How to handle existing data: 'replace' (overwrite), 'merge' (shallow merge), 'deep_merge' (recursive merge)", + return ToolSchema( + { + "type": "function", + "function": { + "name": "update_working_memory_data", + "description": "Store or update structured session data (JSON objects) in working memory. Use this for complex session-specific information that needs to be accessed and modified during the conversation. Examples: Travel itinerary {'destination': 'Paris', 'dates': ['2024-03-15', '2024-03-20']}, project details {'name': 'Website Redesign', 'deadline': '2024-04-01', 'status': 'in_progress'}. Different from add_memory_to_working_memory which stores simple text facts.", + "parameters": { + "type": "object", + "properties": { + "data": { + "type": "object", + "description": "JSON data to store or update in working memory", + }, + "merge_strategy": { + "type": "string", + "enum": ["replace", "merge", "deep_merge"], + "default": "merge", + "description": "How to handle existing data: 'replace' (overwrite), 'merge' (shallow merge), 'deep_merge' (recursive merge)", + }, }, + "required": ["data"], }, - "required": ["data"], }, }, - } + schema_format="openai", + ) @classmethod - def get_long_term_memory_tool_schema(cls) -> dict[str, Any]: + def get_long_term_memory_tool_schema(cls) -> ToolSchema: """ Get OpenAI-compatible tool schema for retrieving a long-term memory by ID. Returns: - Tool schema dictionary compatible with OpenAI tool calling format + ToolSchema object with customizable description and parameters """ - return { - "type": "function", - "function": { - "name": "get_long_term_memory", - "description": "Retrieve a specific long-term memory by its unique ID to see full details. Use this when you have a memory ID from search_memory results and need complete information before editing or to show detailed memory content to the user. Example: After search_memory('job information') returns memories with IDs, call get_long_term_memory(memory_id=) to inspect before editing. Always obtain the memory_id from search_memory.", - "parameters": { - "type": "object", - "properties": { - "memory_id": { - "type": "string", - "description": "The unique ID of the memory to retrieve", + return ToolSchema( + { + "type": "function", + "function": { + "name": "get_long_term_memory", + "description": "Retrieve a specific long-term memory by its unique ID to see full details. Use this when you have a memory ID from search_memory results and need complete information before editing or to show detailed memory content to the user. Example: After search_memory('job information') returns memories with IDs, call get_long_term_memory(memory_id=) to inspect before editing. Always obtain the memory_id from search_memory.", + "parameters": { + "type": "object", + "properties": { + "memory_id": { + "type": "string", + "description": "The unique ID of the memory to retrieve", + }, }, + "required": ["memory_id"], }, - "required": ["memory_id"], }, }, - } + schema_format="openai", + ) @classmethod - def edit_long_term_memory_tool_schema(cls) -> dict[str, Any]: + def edit_long_term_memory_tool_schema(cls) -> ToolSchema: """ Get OpenAI-compatible tool schema for editing a long-term memory. Returns: - Tool schema dictionary compatible with OpenAI tool calling format + ToolSchema object with customizable description and parameters """ - return { - "type": "function", - "function": { - "name": "edit_long_term_memory", - "description": ( - "Update an existing long-term memory with new or corrected information. Use this when users provide corrections ('Actually, I work at Microsoft, not Google'), updates ('I got promoted to Senior Engineer'), or additional details. Only specify the fields you want to change - other fields remain unchanged. " - "Examples: Update job title from 'Engineer' to 'Senior Engineer', change location from 'New York' to 'Seattle', correct food preference from 'coffee' to 'tea'. " - "For time-bound (episodic) updates, ALWAYS set event_date (ISO 8601 UTC) and include a grounded, human-readable date in the text. Use get_current_datetime to resolve 'today'/'yesterday'/'last week' before setting event_date. " - "IMPORTANT: First call search_memory to get candidate memories; then pass the chosen memory's 'id' as memory_id." - ), - "parameters": { - "type": "object", - "properties": { - "memory_id": { - "type": "string", - "description": "The unique ID of the memory to edit (required)", - }, - "text": { - "type": "string", - "description": "Updated text content for the memory", - }, - "topics": { - "type": "array", - "items": {"type": "string"}, - "description": "Updated list of topics for the memory", - }, - "entities": { - "type": "array", - "items": {"type": "string"}, - "description": "Updated list of entities mentioned in the memory", - }, - "memory_type": { - "type": "string", - "enum": ["episodic", "semantic"], - "description": "Updated memory type: 'episodic' (events/experiences), 'semantic' (facts/preferences)", - }, - "namespace": { - "type": "string", - "description": "Updated namespace for organizing the memory", - }, - "user_id": { - "type": "string", - "description": "Updated user ID associated with the memory", - }, - "session_id": { - "type": "string", - "description": "Updated session ID where the memory originated", - }, - "event_date": { - "type": "string", - "description": "Updated event date for episodic memories (ISO 8601 format: '2024-01-15T14:30:00Z')", + return ToolSchema( + { + "type": "function", + "function": { + "name": "edit_long_term_memory", + "description": ( + "Update an existing long-term memory with new or corrected information. Use this when users provide corrections ('Actually, I work at Microsoft, not Google'), updates ('I got promoted to Senior Engineer'), or additional details. Only specify the fields you want to change - other fields remain unchanged. " + "Examples: Update job title from 'Engineer' to 'Senior Engineer', change location from 'New York' to 'Seattle', correct food preference from 'coffee' to 'tea'. " + "For time-bound (episodic) updates, ALWAYS set event_date (ISO 8601 UTC) and include a grounded, human-readable date in the text. Use get_current_datetime to resolve 'today'/'yesterday'/'last week' before setting event_date. " + "IMPORTANT: First call search_memory to get candidate memories; then pass the chosen memory's 'id' as memory_id." + ), + "parameters": { + "type": "object", + "properties": { + "memory_id": { + "type": "string", + "description": "The unique ID of the memory to edit (required)", + }, + "text": { + "type": "string", + "description": "Updated text content for the memory", + }, + "topics": { + "type": "array", + "items": {"type": "string"}, + "description": "Updated list of topics for the memory", + }, + "entities": { + "type": "array", + "items": {"type": "string"}, + "description": "Updated list of entities mentioned in the memory", + }, + "memory_type": { + "type": "string", + "enum": ["episodic", "semantic"], + "description": "Updated memory type: 'episodic' (events/experiences), 'semantic' (facts/preferences)", + }, + "namespace": { + "type": "string", + "description": "Updated namespace for organizing the memory", + }, + "user_id": { + "type": "string", + "description": "Updated user ID associated with the memory", + }, + "session_id": { + "type": "string", + "description": "Updated session ID where the memory originated", + }, + "event_date": { + "type": "string", + "description": "Updated event date for episodic memories (ISO 8601 format: '2024-01-15T14:30:00Z')", + }, }, + "required": ["memory_id"], }, - "required": ["memory_id"], }, }, - } + schema_format="openai", + ) @classmethod - def create_long_term_memory_tool_schema(cls) -> dict[str, Any]: + def create_long_term_memory_tool_schema(cls) -> ToolSchema: """ Get OpenAI-compatible tool schema for creating long-term memories directly. Returns: - Tool schema dictionary compatible with OpenAI tool calling format + ToolSchema object with customizable description and parameters """ - return { - "type": "function", - "function": { - "name": "create_long_term_memory", - "description": ( - "Create long-term memories directly for immediate storage and retrieval. " - "Use this for important information that should be permanently stored without going through working memory. " - "This is the 'eager' approach - memories are created immediately in long-term storage. " - "Examples: User preferences, important facts, key events that need to be searchable right away. " - "For episodic memories, include event_date in ISO format." - ), - "parameters": { - "type": "object", - "properties": { - "memories": { - "type": "array", - "items": { - "type": "object", - "properties": { - "text": { - "type": "string", - "description": "The memory content to store", - }, - "memory_type": { - "type": "string", - "enum": ["episodic", "semantic"], - "description": "Type of memory: 'episodic' (events/experiences), 'semantic' (facts/preferences)", - }, - "topics": { - "type": "array", - "items": {"type": "string"}, - "description": "Optional topics for categorization", - }, - "entities": { - "type": "array", - "items": {"type": "string"}, - "description": "Optional entities mentioned in the memory", - }, - "event_date": { - "type": "string", - "description": "Optional event date for episodic memories (ISO 8601 format: '2024-01-15T14:30:00Z')", + return ToolSchema( + { + "type": "function", + "function": { + "name": "create_long_term_memory", + "description": ( + "Create long-term memories directly for immediate storage and retrieval. " + "Use this for important information that should be permanently stored without going through working memory. " + "This is the 'eager' approach - memories are created immediately in long-term storage. " + "Examples: User preferences, important facts, key events that need to be searchable right away. " + "For episodic memories, include event_date in ISO format." + ), + "parameters": { + "type": "object", + "properties": { + "memories": { + "type": "array", + "items": { + "type": "object", + "properties": { + "text": { + "type": "string", + "description": "The memory content to store", + }, + "memory_type": { + "type": "string", + "enum": ["episodic", "semantic"], + "description": "Type of memory: 'episodic' (events/experiences), 'semantic' (facts/preferences)", + }, + "topics": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional topics for categorization", + }, + "entities": { + "type": "array", + "items": {"type": "string"}, + "description": "Optional entities mentioned in the memory", + }, + "event_date": { + "type": "string", + "description": "Optional event date for episodic memories (ISO 8601 format: '2024-01-15T14:30:00Z')", + }, }, + "required": ["text", "memory_type"], }, - "required": ["text", "memory_type"], + "description": "List of memories to create", }, - "description": "List of memories to create", }, + "required": ["memories"], }, - "required": ["memories"], }, }, - } + schema_format="openai", + ) @classmethod - def delete_long_term_memories_tool_schema(cls) -> dict[str, Any]: + def delete_long_term_memories_tool_schema(cls) -> ToolSchema: """ Get OpenAI-compatible tool schema for deleting long-term memories. Returns: - Tool schema dictionary compatible with OpenAI tool calling format + ToolSchema object with customizable description and parameters """ - return { - "type": "function", - "function": { - "name": "delete_long_term_memories", - "description": "Permanently delete long-term memories that are outdated, incorrect, or no longer needed. Use this when users explicitly request information removal ('Delete that old job information'), when you find duplicate memories that should be consolidated, or when memories contain outdated information that might confuse future conversations. Examples: Remove old job info after user changes careers, delete duplicate food preferences, remove outdated contact information. IMPORTANT: First call search_memory to get candidate memories; then pass the selected memories' 'id' values as memory_ids. This action cannot be undone.", - "parameters": { - "type": "object", - "properties": { - "memory_ids": { - "type": "array", - "items": {"type": "string"}, - "description": "List of memory IDs to delete", + return ToolSchema( + { + "type": "function", + "function": { + "name": "delete_long_term_memories", + "description": "Permanently delete long-term memories that are outdated, incorrect, or no longer needed. Use this when users explicitly request information removal ('Delete that old job information'), when you find duplicate memories that should be consolidated, or when memories contain outdated information that might confuse future conversations. Examples: Remove old job info after user changes careers, delete duplicate food preferences, remove outdated contact information. IMPORTANT: First call search_memory to get candidate memories; then pass the selected memories' 'id' values as memory_ids. This action cannot be undone.", + "parameters": { + "type": "object", + "properties": { + "memory_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of memory IDs to delete", + }, }, + "required": ["memory_ids"], }, - "required": ["memory_ids"], }, }, - } + schema_format="openai", + ) @classmethod - def get_all_memory_tool_schemas(cls) -> Sequence[dict[str, Any]]: + def get_all_memory_tool_schemas(cls) -> ToolSchemaCollection: """ Get all memory-related tool schemas for easy LLM integration. Returns: - List of all memory tool schemas + ToolSchemaCollection with all memory tool schemas Example: ```python # Get all memory tools for OpenAI tools = MemoryAPIClient.get_all_memory_tool_schemas() + # Customize specific tools + tools.set_description("search_memory", "Custom search description") + response = await openai.chat.completions.create( model="gpt-4", messages=messages, - tools=tools, + tools=tools.to_list(), tool_choice="auto" ) ``` """ - return [ - cls.get_memory_search_tool_schema(), - cls.get_working_memory_tool_schema(), - cls.get_add_memory_tool_schema(), - cls.get_update_memory_data_tool_schema(), - cls.get_long_term_memory_tool_schema(), - cls.create_long_term_memory_tool_schema(), - cls.edit_long_term_memory_tool_schema(), - cls.delete_long_term_memories_tool_schema(), - cls.get_current_datetime_tool_schema(), - ] + return ToolSchemaCollection( + [ + cls.get_memory_search_tool_schema(), + cls.get_working_memory_tool_schema(), + cls.get_add_memory_tool_schema(), + cls.get_update_memory_data_tool_schema(), + cls.get_long_term_memory_tool_schema(), + cls.create_long_term_memory_tool_schema(), + cls.edit_long_term_memory_tool_schema(), + cls.delete_long_term_memories_tool_schema(), + cls.get_current_datetime_tool_schema(), + ] + ) @classmethod - def get_all_memory_tool_schemas_anthropic(cls) -> Sequence[dict[str, Any]]: + def get_all_memory_tool_schemas_anthropic(cls) -> ToolSchemaCollection: """ Get all memory-related tool schemas in Anthropic format. Returns: - List of all memory tool schemas formatted for Anthropic API + ToolSchemaCollection with all memory tool schemas formatted for Anthropic API Example: ```python # Get all memory tools for Anthropic tools = MemoryAPIClient.get_all_memory_tool_schemas_anthropic() + # Customize specific tools + tools.set_description("search_memory", "Custom search description") + response = anthropic.messages.create( model="claude-3-opus-20240229", messages=messages, - tools=tools, + tools=tools.to_list(), max_tokens=1024 ) ``` """ - return [ - cls.get_memory_search_tool_schema_anthropic(), - cls.get_working_memory_tool_schema_anthropic(), - cls.get_add_memory_tool_schema_anthropic(), - cls.get_update_memory_data_tool_schema_anthropic(), - cls.get_long_term_memory_tool_schema_anthropic(), - cls.create_long_term_memory_tool_schema_anthropic(), - cls.edit_long_term_memory_tool_schema_anthropic(), - cls.delete_long_term_memories_tool_schema_anthropic(), - cls.get_current_datetime_tool_schema_anthropic(), - ] + return ToolSchemaCollection( + [ + cls.get_memory_search_tool_schema_anthropic(), + cls.get_working_memory_tool_schema_anthropic(), + cls.get_add_memory_tool_schema_anthropic(), + cls.get_update_memory_data_tool_schema_anthropic(), + cls.get_long_term_memory_tool_schema_anthropic(), + cls.create_long_term_memory_tool_schema_anthropic(), + cls.edit_long_term_memory_tool_schema_anthropic(), + cls.delete_long_term_memories_tool_schema_anthropic(), + cls.get_current_datetime_tool_schema_anthropic(), + ] + ) @classmethod - def get_current_datetime_tool_schema(cls) -> dict[str, Any]: + def get_current_datetime_tool_schema(cls) -> ToolSchema: """OpenAI-compatible tool schema for current UTC datetime.""" - return { - "type": "function", - "function": { - "name": "get_current_datetime", - "description": ( - "Return the current datetime in UTC to ground relative time expressions. " - "Use this before setting `event_date` or including a human-readable date in text when the user says " - "'today', 'yesterday', 'last week', etc." - ), - "parameters": {"type": "object", "properties": {}, "required": []}, + return ToolSchema( + { + "type": "function", + "function": { + "name": "get_current_datetime", + "description": ( + "Return the current datetime in UTC to ground relative time expressions. " + "Use this before setting `event_date` or including a human-readable date in text when the user says " + "'today', 'yesterday', 'last week', etc." + ), + "parameters": {"type": "object", "properties": {}, "required": []}, + }, }, - } + schema_format="openai", + ) @classmethod - def get_current_datetime_tool_schema_anthropic(cls) -> dict[str, Any]: + def get_current_datetime_tool_schema_anthropic(cls) -> ToolSchema: """Anthropic-compatible tool schema for current UTC datetime.""" return cls._convert_openai_to_anthropic_schema( cls.get_current_datetime_tool_schema() ) @classmethod - def get_memory_search_tool_schema_anthropic(cls) -> dict[str, Any]: + def get_memory_search_tool_schema_anthropic(cls) -> ToolSchema: """Get memory search tool schema in Anthropic format.""" openai_schema = cls.get_memory_search_tool_schema() return cls._convert_openai_to_anthropic_schema(openai_schema) @classmethod - def get_working_memory_tool_schema_anthropic(cls) -> dict[str, Any]: + def get_working_memory_tool_schema_anthropic(cls) -> ToolSchema: """Get working memory tool schema in Anthropic format.""" openai_schema = cls.get_working_memory_tool_schema() return cls._convert_openai_to_anthropic_schema(openai_schema) @classmethod - def get_add_memory_tool_schema_anthropic(cls) -> dict[str, Any]: + def get_add_memory_tool_schema_anthropic(cls) -> ToolSchema: """Get add memory tool schema in Anthropic format.""" openai_schema = cls.get_add_memory_tool_schema() return cls._convert_openai_to_anthropic_schema(openai_schema) @classmethod - def get_update_memory_data_tool_schema_anthropic(cls) -> dict[str, Any]: + def get_update_memory_data_tool_schema_anthropic(cls) -> ToolSchema: """Get update memory data tool schema in Anthropic format.""" openai_schema = cls.get_update_memory_data_tool_schema() return cls._convert_openai_to_anthropic_schema(openai_schema) @classmethod - def get_long_term_memory_tool_schema_anthropic(cls) -> dict[str, Any]: + def get_long_term_memory_tool_schema_anthropic(cls) -> ToolSchema: """Get long-term memory tool schema in Anthropic format.""" openai_schema = cls.get_long_term_memory_tool_schema() return cls._convert_openai_to_anthropic_schema(openai_schema) @classmethod - def create_long_term_memory_tool_schema_anthropic(cls) -> dict[str, Any]: + def create_long_term_memory_tool_schema_anthropic(cls) -> ToolSchema: """Get create long-term memory tool schema in Anthropic format.""" openai_schema = cls.create_long_term_memory_tool_schema() return cls._convert_openai_to_anthropic_schema(openai_schema) @classmethod - def edit_long_term_memory_tool_schema_anthropic(cls) -> dict[str, Any]: + def edit_long_term_memory_tool_schema_anthropic(cls) -> ToolSchema: """Get edit long-term memory tool schema in Anthropic format.""" openai_schema = cls.edit_long_term_memory_tool_schema() return cls._convert_openai_to_anthropic_schema(openai_schema) @classmethod - def delete_long_term_memories_tool_schema_anthropic(cls) -> dict[str, Any]: + def delete_long_term_memories_tool_schema_anthropic(cls) -> ToolSchema: """Get delete long-term memories tool schema in Anthropic format.""" openai_schema = cls.delete_long_term_memories_tool_schema() return cls._convert_openai_to_anthropic_schema(openai_schema) @staticmethod def _convert_openai_to_anthropic_schema( - openai_schema: dict[str, Any], - ) -> dict[str, Any]: + openai_schema: ToolSchema | dict[str, Any], + ) -> ToolSchema: """ Convert OpenAI tool schema to Anthropic format. Args: - openai_schema: Tool schema in OpenAI format + openai_schema: Tool schema in OpenAI format (ToolSchema or dict) Returns: - Tool schema in Anthropic format + ToolSchema in Anthropic format """ - function_def = openai_schema["function"] + # Handle both ToolSchema and dict inputs + if isinstance(openai_schema, ToolSchema): + schema_dict = openai_schema.to_dict() + else: + schema_dict = openai_schema - return { - "name": function_def["name"], - "description": function_def["description"], - "input_schema": function_def["parameters"], - } + function_def = schema_dict["function"] + + return ToolSchema( + { + "name": function_def["name"], + "description": function_def["description"], + "input_schema": function_def["parameters"], + }, + schema_format="anthropic", + ) # === Function Call Resolution === diff --git a/agent-memory-client/agent_memory_client/tool_schema.py b/agent-memory-client/agent_memory_client/tool_schema.py new file mode 100644 index 0000000..f0809cb --- /dev/null +++ b/agent-memory-client/agent_memory_client/tool_schema.py @@ -0,0 +1,323 @@ +""" +Tool schema classes for customizing memory tool descriptions. + +This module provides wrapper classes that allow users to customize +tool descriptions and other properties before passing them to LLMs. +""" + +from __future__ import annotations + +from collections.abc import Iterator +from typing import TYPE_CHECKING, Any, Literal + +if TYPE_CHECKING: + from typing_extensions import Self + + +class ToolSchema: + """ + Wrapper for tool schema dictionaries that provides a fluent API + for customizing tool descriptions and other properties. + + Supports both OpenAI and Anthropic schema formats. + + Example: + ```python + schema = MemoryAPIClient.get_memory_search_tool_schema() + schema.set_description("Custom description for my use case") + schema.set_name("my_custom_search_tool") + + # Pass to LLM + tools = [schema.to_dict()] + ``` + """ + + def __init__( + self, + schema: dict[str, Any], + schema_format: Literal["openai", "anthropic"] = "openai", + ): + """ + Initialize a ToolSchema wrapper. + + Args: + schema: The raw schema dictionary + schema_format: The schema format ("openai" or "anthropic") + """ + self._schema = schema + self._format = schema_format + + @property + def format(self) -> Literal["openai", "anthropic"]: + """Get the schema format.""" + return self._format + + def set_description(self, description: str) -> Self: + """ + Set a custom description for the tool. + + Args: + description: The new description text + + Returns: + Self for method chaining + + Raises: + ValueError: If the schema structure is malformed + """ + try: + if self._format == "openai": + self._schema["function"]["description"] = description + else: # anthropic + self._schema["description"] = description + except (KeyError, TypeError) as e: + raise ValueError( + f"Malformed schema structure for {self._format} format. " + f"Expected {'function.description' if self._format == 'openai' else 'description'} path. " + f"Original error: {e}" + ) from e + return self + + def set_name(self, name: str) -> Self: + """ + Set a custom name for the tool. + + Args: + name: The new tool name + + Returns: + Self for method chaining + + Raises: + ValueError: If the schema structure is malformed + """ + try: + if self._format == "openai": + self._schema["function"]["name"] = name + else: # anthropic + self._schema["name"] = name + except (KeyError, TypeError) as e: + raise ValueError( + f"Malformed schema structure for {self._format} format. " + f"Expected {'function.name' if self._format == 'openai' else 'name'} path. " + f"Original error: {e}" + ) from e + return self + + def set_parameter_description(self, param_name: str, description: str) -> Self: + """ + Set a custom description for a specific parameter. + + Args: + param_name: The name of the parameter to update + description: The new description for the parameter + + Returns: + Self for method chaining + """ + if self._format == "openai": + props = self._schema["function"]["parameters"]["properties"] + else: # anthropic + props = self._schema["input_schema"]["properties"] + + if param_name in props: + props[param_name]["description"] = description + else: + raise KeyError( + f"Parameter '{param_name}' does not exist in the schema properties: {list(props.keys())}" + ) + return self + + def get_description(self) -> str: + """ + Get the current tool description. + + Raises: + ValueError: If the schema structure is malformed + """ + try: + if self._format == "openai": + return self._schema["function"]["description"] + return self._schema["description"] + except (KeyError, TypeError) as e: + raise ValueError( + f"Malformed schema structure for {self._format} format. " + f"Expected {'function.description' if self._format == 'openai' else 'description'} path. " + f"Original error: {e}" + ) from e + + def get_name(self) -> str: + """ + Get the current tool name. + + Raises: + ValueError: If the schema structure is malformed + """ + try: + if self._format == "openai": + return self._schema["function"]["name"] + return self._schema["name"] + except (KeyError, TypeError) as e: + raise ValueError( + f"Malformed schema structure for {self._format} format. " + f"Expected {'function.name' if self._format == 'openai' else 'name'} path. " + f"Original error: {e}" + ) from e + + def get_parameter_description(self, param_name: str) -> str | None: + """ + Get the description for a specific parameter. + + Args: + param_name: The name of the parameter + + Returns: + The parameter description, or None if not found + """ + if self._format == "openai": + props = self._schema["function"]["parameters"]["properties"] + else: # anthropic + props = self._schema["input_schema"]["properties"] + + if param_name in props: + return props[param_name].get("description") + return None + + def to_dict(self) -> dict[str, Any]: + """Return the schema as a dictionary for LLM consumption.""" + import copy + + return copy.deepcopy(self._schema) + + def copy(self) -> ToolSchema: + """Create an independent copy of this schema.""" + import copy + + return ToolSchema(copy.deepcopy(self._schema), self._format) + + # Dict-like access for backwards compatibility + def __getitem__(self, key: str) -> Any: + return self._schema[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._schema[key] = value + + def __contains__(self, key: str) -> bool: + return key in self._schema + + def __repr__(self) -> str: + return f"ToolSchema(name={self.get_name()!r}, format={self._format!r})" + + +class ToolSchemaCollection: + """ + Collection of tool schemas with bulk customization support. + + Example: + ```python + all_tools = MemoryAPIClient.get_all_memory_tool_schemas() + all_tools.set_description("search_memory", "Custom search description") + + # Get specific tool + search_tool = all_tools.get_by_name("search_memory") + search_tool.set_parameter_description("query", "Custom query description") + + # Use all tools + tools = all_tools.to_list() + ``` + """ + + def __init__(self, schemas: list[ToolSchema]): + """ + Initialize a ToolSchemaCollection. + + Args: + schemas: List of ToolSchema objects + """ + self._schemas = schemas + + def get_by_name(self, name: str) -> ToolSchema | None: + """ + Get a specific tool schema by name. + + Args: + name: The tool name to find + + Returns: + The ToolSchema if found, None otherwise + """ + for schema in self._schemas: + if schema.get_name() == name: + return schema + return None + + def set_description(self, name: str, description: str) -> Self: + """ + Set description for a specific tool by name. + + Args: + name: The tool name to update + description: The new description + + Returns: + Self for method chaining + + Raises: + KeyError: If no tool with the given name exists in the collection + """ + schema = self.get_by_name(name) + if schema is None: + raise KeyError( + f"Tool '{name}' not found in collection. " + f"Available tools: {self.names()}" + ) + schema.set_description(description) + return self + + def set_name(self, old_name: str, new_name: str) -> Self: + """ + Rename a specific tool. + + Args: + old_name: The current tool name + new_name: The new tool name + + Returns: + Self for method chaining + + Raises: + KeyError: If no tool with the given name exists in the collection + """ + schema = self.get_by_name(old_name) + if schema is None: + raise KeyError( + f"Tool '{old_name}' not found in collection. " + f"Available tools: {self.names()}" + ) + schema.set_name(new_name) + return self + + def to_list(self) -> list[dict[str, Any]]: + """Return all schemas as a list of dictionaries.""" + return [s.to_dict() for s in self._schemas] + + def copy(self) -> ToolSchemaCollection: + """Create an independent copy of this collection.""" + return ToolSchemaCollection([s.copy() for s in self._schemas]) + + def names(self) -> list[str]: + """Get all tool names in the collection.""" + return [s.get_name() for s in self._schemas] + + def __iter__(self) -> Iterator[ToolSchema]: + return iter(self._schemas) + + def __len__(self) -> int: + return len(self._schemas) + + def __getitem__(self, index: int) -> ToolSchema: + return self._schemas[index] + + def __repr__(self) -> str: + names = [s.get_name() for s in self._schemas] + return f"ToolSchemaCollection({names!r})" diff --git a/agent-memory-client/tests/test_tool_schemas.py b/agent-memory-client/tests/test_tool_schemas.py index 7182166..7b90466 100644 --- a/agent-memory-client/tests/test_tool_schemas.py +++ b/agent-memory-client/tests/test_tool_schemas.py @@ -5,7 +5,7 @@ is not exposed to LLM tools (it should only be used server-side). """ -from agent_memory_client import MemoryAPIClient +from agent_memory_client import MemoryAPIClient, ToolSchema, ToolSchemaCollection class TestToolSchemaStructure: @@ -310,3 +310,193 @@ def test_anthropic_schemas_exclude_message_type_for_creation(self): assert ( "message" not in memory_type_prop["enum"] ), f"Anthropic creation/editing tool '{function_name}' should not expose 'message' memory type in nested properties" + + +class TestToolSchemaCustomization: + """Tests for ToolSchema customization methods.""" + + def test_tool_schema_set_description(self): + """Test setting custom description on a tool schema.""" + schema = MemoryAPIClient.get_memory_search_tool_schema() + assert isinstance(schema, ToolSchema) + + original_desc = schema.get_description() + custom_desc = "My custom search description for LLM" + + # Test fluent API returns self + result = schema.set_description(custom_desc) + assert result is schema + + # Test description was updated + assert schema.get_description() == custom_desc + assert schema["function"]["description"] == custom_desc + assert original_desc != custom_desc + + def test_tool_schema_set_name(self): + """Test setting custom name on a tool schema.""" + schema = MemoryAPIClient.get_memory_search_tool_schema() + + original_name = schema.get_name() + custom_name = "my_custom_search" + + result = schema.set_name(custom_name) + assert result is schema + + assert schema.get_name() == custom_name + assert schema["function"]["name"] == custom_name + assert original_name != custom_name + + def test_tool_schema_set_parameter_description(self): + """Test setting custom parameter description.""" + schema = MemoryAPIClient.get_memory_search_tool_schema() + + custom_param_desc = "The search query to find relevant memories" + result = schema.set_parameter_description("query", custom_param_desc) + assert result is schema + + params = schema["function"]["parameters"]["properties"] + assert params["query"]["description"] == custom_param_desc + + def test_tool_schema_to_dict(self): + """Test converting ToolSchema to dict.""" + schema = MemoryAPIClient.get_memory_search_tool_schema() + schema.set_description("Custom description") + + result = schema.to_dict() + assert isinstance(result, dict) + assert result["function"]["description"] == "Custom description" + # Ensure it's a new dict, not the same reference + assert result is not schema._schema + + def test_tool_schema_copy(self): + """Test copying a ToolSchema.""" + schema = MemoryAPIClient.get_memory_search_tool_schema() + original_desc = schema.get_description() + + copy = schema.copy() + assert isinstance(copy, ToolSchema) + + # Modify copy + copy.set_description("Modified copy") + + # Original should be unchanged + assert schema.get_description() == original_desc + assert copy.get_description() == "Modified copy" + + def test_tool_schema_backwards_compatibility(self): + """Test that dict-like access still works for backwards compatibility.""" + schema = MemoryAPIClient.get_memory_search_tool_schema() + + # Test __getitem__ + assert schema["type"] == "function" + assert schema["function"]["name"] == "search_memory" + + # Test nested access + assert "query" in schema["function"]["parameters"]["properties"] + + def test_anthropic_schema_customization(self): + """Test customization works for Anthropic schemas.""" + schema = MemoryAPIClient.get_memory_search_tool_schema_anthropic() + assert isinstance(schema, ToolSchema) + + custom_desc = "Custom Anthropic search description" + schema.set_description(custom_desc) + + # Anthropic format has description at top level + assert schema.get_description() == custom_desc + assert schema["description"] == custom_desc + + +class TestToolSchemaCollectionCustomization: + """Tests for ToolSchemaCollection customization methods.""" + + def test_collection_returns_tool_schema_collection(self): + """Test that get_all_memory_tool_schemas returns ToolSchemaCollection.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas() + assert isinstance(collection, ToolSchemaCollection) + + def test_collection_get_by_name(self): + """Test getting a specific tool by name from collection.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas() + + search_tool = collection.get_by_name("search_memory") + assert search_tool is not None + assert isinstance(search_tool, ToolSchema) + assert search_tool.get_name() == "search_memory" + + # Test non-existent tool + non_existent = collection.get_by_name("non_existent_tool") + assert non_existent is None + + def test_collection_set_description(self): + """Test setting description on a tool in the collection.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas() + + custom_desc = "Bulk customized search description" + result = collection.set_description("search_memory", custom_desc) + assert result is collection + + search_tool = collection.get_by_name("search_memory") + assert search_tool.get_description() == custom_desc + + def test_collection_to_list(self): + """Test converting collection to list of dicts.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas() + collection.set_description("search_memory", "Custom desc") + + result = collection.to_list() + assert isinstance(result, list) + assert all(isinstance(item, dict) for item in result) + + # Find search_memory in list + search_dict = next( + (item for item in result if item["function"]["name"] == "search_memory"), + None, + ) + assert search_dict is not None + assert search_dict["function"]["description"] == "Custom desc" + + def test_collection_iteration(self): + """Test iterating over collection.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas() + + count = 0 + for schema in collection: + assert isinstance(schema, ToolSchema) + count += 1 + + assert count == len(collection) + assert count > 0 + + def test_collection_len(self): + """Test length of collection.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas() + assert len(collection) == 9 # All 9 memory tools + + def test_anthropic_collection_customization(self): + """Test customization works for Anthropic collection.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas_anthropic() + assert isinstance(collection, ToolSchemaCollection) + + custom_desc = "Custom Anthropic collection desc" + collection.set_description("search_memory", custom_desc) + + search_tool = collection.get_by_name("search_memory") + assert search_tool.get_description() == custom_desc + + def test_collection_indexing(self): + """Test indexing into collection.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas() + + first_tool = collection[0] + assert isinstance(first_tool, ToolSchema) + + def test_collection_names(self): + """Test getting all tool names from collection.""" + collection = MemoryAPIClient.get_all_memory_tool_schemas() + + names = collection.names() + assert isinstance(names, list) + assert "search_memory" in names + assert "create_long_term_memory" in names + assert len(names) == 9 diff --git a/docs/python-sdk.md b/docs/python-sdk.md index e110066..0d529c4 100644 --- a/docs/python-sdk.md +++ b/docs/python-sdk.md @@ -227,6 +227,136 @@ The SDK provides these tools for LLM integration: - `create_long_term_memories` (deprecated) → use `eagerly_create_long_term_memory` - `add_memory_to_working_memory` (deprecated) → use `lazily_create_long_term_memory` +### Customizing Tool Descriptions + +The SDK provides `ToolSchema` and `ToolSchemaCollection` wrapper classes that allow you to customize tool descriptions, names, and parameter descriptions before passing them to LLMs. This is useful for: + +- Adjusting descriptions to match your application's tone or domain +- Renaming tools to avoid conflicts with other tools +- Adding context-specific information to parameter descriptions + +#### Basic Customization + +```python +from agent_memory_client import MemoryAPIClient + +# Get a tool schema and customize it +schema = MemoryAPIClient.get_memory_search_tool_schema() +schema.set_description("Search through the user's personal knowledge base") +schema.set_name("search_knowledge_base") + +# Customize parameter descriptions +schema.set_parameter_description("query", "Natural language search query") + +# Use with LLM +response = await openai_client.chat.completions.create( + model="gpt-4o", + messages=messages, + tools=[schema.to_dict()] +) +``` + +#### Method Chaining + +All setter methods return `self` for fluent method chaining: + +```python +schema = (MemoryAPIClient.get_memory_search_tool_schema() + .set_description("Find relevant information from memory") + .set_name("find_info") + .set_parameter_description("query", "What to search for")) +``` + +#### Bulk Customization with Collections + +When working with all tools, use `ToolSchemaCollection` for bulk operations: + +```python +# Get all tools as a collection +all_tools = MemoryAPIClient.get_all_memory_tool_schemas() + +# Customize specific tools by name +all_tools.set_description("search_memory", "Find relevant memories") +all_tools.set_name("search_memory", "find_memories") + +# Get a specific tool for detailed customization +search_tool = all_tools.get_by_name("find_memories") +if search_tool: + search_tool.set_parameter_description("max_results", "Max results to return") + +# List all tool names +print(all_tools.names()) # ['find_memories', 'get_or_create_working_memory', ...] + +# Convert to list for LLM consumption +response = await openai_client.chat.completions.create( + model="gpt-4o", + messages=messages, + tools=all_tools.to_list() +) +``` + +#### Creating Independent Copies + +Use `copy()` to create independent copies that won't affect the original: + +```python +# Create a copy for customization +custom_schema = MemoryAPIClient.get_memory_search_tool_schema().copy() +custom_schema.set_description("Custom description") + +# Original is unchanged +original = MemoryAPIClient.get_memory_search_tool_schema() +assert original.get_description() != custom_schema.get_description() +``` + +#### Anthropic Format + +The same customization API works for Anthropic tool schemas: + +```python +# Anthropic format +schema = MemoryAPIClient.get_memory_search_tool_schema_anthropic() +schema.set_description("Custom Anthropic description") + +# Check the format +print(schema.format) # "anthropic" + +# Use with Anthropic +response = await anthropic_client.messages.create( + model="claude-3-5-sonnet-20241022", + messages=messages, + tools=[schema.to_dict()] +) +``` + +#### ToolSchema API Reference + +| Method | Description | +|--------|-------------| +| `set_description(text)` | Set the tool description | +| `set_name(name)` | Set the tool name | +| `set_parameter_description(param, text)` | Set a parameter's description | +| `get_description()` | Get the current description | +| `get_name()` | Get the current name | +| `get_parameter_description(param)` | Get a parameter's description | +| `to_dict()` | Convert to dict (returns deep copy) | +| `copy()` | Create an independent copy | +| `format` | Property: "openai" or "anthropic" | + +#### ToolSchemaCollection API Reference + +| Method | Description | +|--------|-------------| +| `get_by_name(name)` | Get a specific tool by name | +| `set_description(name, text)` | Set description for a tool by name | +| `set_name(old_name, new_name)` | Rename a tool | +| `names()` | Get list of all tool names | +| `to_list()` | Convert to list of dicts | +| `copy()` | Create an independent copy | +| `len(collection)` | Get number of tools | +| `collection[index]` | Access tool by index | +| `for tool in collection` | Iterate over tools | + ## Memory Operations ### Creating Memories