mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-10 16:48:30 +00:00
fix: add type annotations to CacheHandler methods
- Replace Optional with union syntax - Rename input parameter to input_data to avoid shadowing - Add JSON serialization for dict cache keys - Add thread-safety TODO note
This commit is contained in:
49
src/crewai/agents/cache/cache_handler.py
vendored
49
src/crewai/agents/cache/cache_handler.py
vendored
@@ -1,15 +1,50 @@
|
||||
from typing import Any, Dict, Optional
|
||||
"""Cache handler for storing and retrieving tool execution results.
|
||||
|
||||
This module provides a caching mechanism for tool outputs in the CrewAI framework,
|
||||
allowing agents to reuse previous tool execution results when the same tool is
|
||||
called with identical arguments.
|
||||
|
||||
Classes:
|
||||
CacheHandler: Manages the caching of tool execution results using an in-memory
|
||||
dictionary with serialized tool arguments as keys.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, PrivateAttr
|
||||
|
||||
|
||||
class CacheHandler(BaseModel):
|
||||
"""Callback handler for tool usage."""
|
||||
"""Callback handler for tool usage.
|
||||
|
||||
_cache: Dict[str, Any] = PrivateAttr(default_factory=dict)
|
||||
|
||||
def add(self, tool, input, output):
|
||||
self._cache[f"{tool}-{input}"] = output
|
||||
Notes:
|
||||
TODO: Make thread-safe, currently not thread-safe.
|
||||
"""
|
||||
|
||||
def read(self, tool, input) -> Optional[str]:
|
||||
return self._cache.get(f"{tool}-{input}")
|
||||
_cache: dict[str, Any] = PrivateAttr(default_factory=dict)
|
||||
|
||||
def add(self, tool: str, input_data: dict[str, Any] | None, output: str) -> None:
|
||||
"""Add a tool execution result to the cache.
|
||||
|
||||
Args:
|
||||
tool: The name of the tool.
|
||||
input_data: The input arguments for the tool.
|
||||
output: The output from the tool execution.
|
||||
"""
|
||||
cache_key = json.dumps(input_data, sort_keys=True) if input_data else ""
|
||||
self._cache[f"{tool}-{cache_key}"] = output
|
||||
|
||||
def read(self, tool: str, input_data: dict[str, Any] | None) -> str | None:
|
||||
"""Read a tool execution result from the cache.
|
||||
|
||||
Args:
|
||||
tool: The name of the tool.
|
||||
input_data: The input arguments for the tool.
|
||||
|
||||
Returns:
|
||||
The cached output if found, None otherwise.
|
||||
"""
|
||||
cache_key = json.dumps(input_data, sort_keys=True) if input_data else ""
|
||||
return self._cache.get(f"{tool}-{cache_key}")
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Tools handler for managing tool execution and caching."""
|
||||
|
||||
from crewai.agents.cache.cache_handler import CacheHandler
|
||||
from crewai.tools.cache_tools.cache_tools import CacheTools
|
||||
from crewai.tools.tool_calling import InstructorToolCalling, ToolCalling
|
||||
from crewai.agents.cache.cache_handler import CacheHandler
|
||||
|
||||
|
||||
class ToolsHandler:
|
||||
@@ -39,6 +39,6 @@ class ToolsHandler:
|
||||
if self.cache and should_cache and calling.tool_name != CacheTools().name:
|
||||
self.cache.add(
|
||||
tool=calling.tool_name,
|
||||
input=calling.arguments,
|
||||
input_data=calling.arguments,
|
||||
output=output,
|
||||
)
|
||||
|
||||
@@ -189,7 +189,7 @@ class ToolUsage:
|
||||
|
||||
if self.tools_handler and self.tools_handler.cache:
|
||||
result = self.tools_handler.cache.read(
|
||||
tool=calling.tool_name, input=calling.arguments
|
||||
tool=calling.tool_name, input_data=calling.arguments
|
||||
) # type: ignore
|
||||
from_cache = result is not None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user