diff --git a/docs/concepts/memory.mdx b/docs/concepts/memory.mdx index 751b6dd2e..613398646 100644 --- a/docs/concepts/memory.mdx +++ b/docs/concepts/memory.mdx @@ -185,7 +185,12 @@ my_crew = Crew( process=Process.sequential, memory=True, verbose=True, - embedder=OpenAIEmbeddingFunction(api_key=os.getenv("OPENAI_API_KEY"), model_name="text-embedding-3-small"), + embedder={ + "provider": "openai", + "config": { + "model": 'text-embedding-3-small' + } + } ) ``` @@ -242,13 +247,16 @@ my_crew = Crew( process=Process.sequential, memory=True, verbose=True, - embedder=OpenAIEmbeddingFunction( - api_key="YOUR_API_KEY", - api_base="YOUR_API_BASE_PATH", - api_type="azure", - api_version="YOUR_API_VERSION", - model_name="text-embedding-3-small" - ) + embedder={ + "provider": "openai", + "config": { + "api_key": "YOUR_API_KEY", + "api_base": "YOUR_API_BASE_PATH", + "api_type": "azure", + "api_version": "YOUR_API_VERSION", + "model_name": 'text-embedding-3-small' + } + } ) ``` @@ -264,12 +272,15 @@ my_crew = Crew( process=Process.sequential, memory=True, verbose=True, - embedder=GoogleVertexEmbeddingFunction( - project_id="YOUR_PROJECT_ID", - region="YOUR_REGION", - api_key="YOUR_API_KEY", - model_name="textembedding-gecko" - ) + embedder={ + "provider": "vertexai", + "config": { + "project_id"="YOUR_PROJECT_ID", + "region"="YOUR_REGION", + "api_key"="YOUR_API_KEY", + "model_name"="textembedding-gecko" + } + } ) ``` diff --git a/src/crewai/agent.py b/src/crewai/agent.py index dec0effd7..22f08284e 100644 --- a/src/crewai/agent.py +++ b/src/crewai/agent.py @@ -1,6 +1,6 @@ import shutil import subprocess -from typing import Any, Dict, List, Literal, Optional, Union +from typing import Any, Dict, List, Literal, Optional, Sequence, Union from pydantic import Field, InstanceOf, PrivateAttr, model_validator @@ -54,7 +54,6 @@ class Agent(BaseAgent): llm: The language model that will run the agent. function_calling_llm: The language model that will handle the tool calling for this agent, it overrides the crew function_calling_llm. max_iter: Maximum number of iterations for an agent to execute a task. - memory: Whether the agent should have memory or not. max_rpm: Maximum number of requests per minute for the agent execution to be respected. verbose: Whether the agent execution should be in verbose mode. allow_delegation: Whether the agent is allowed to delegate tasks to other agents. @@ -71,9 +70,7 @@ class Agent(BaseAgent): ) agent_ops_agent_name: str = None # type: ignore # Incompatible types in assignment (expression has type "None", variable has type "str") agent_ops_agent_id: str = None # type: ignore # Incompatible types in assignment (expression has type "None", variable has type "str") - cache_handler: InstanceOf[CacheHandler] = Field( - default=None, description="An instance of the CacheHandler class." - ) + step_callback: Optional[Any] = Field( default=None, description="Callback to be executed after each step of the agent execution.", @@ -107,10 +104,6 @@ class Agent(BaseAgent): default=True, description="Keep messages under the context window size by summarizing content.", ) - max_iter: int = Field( - default=20, - description="Maximum number of iterations for an agent to execute a task before giving it's best answer", - ) max_retry_limit: int = Field( default=2, description="Maximum number of retries for an agent to execute a task when an error occurs.", @@ -195,13 +188,15 @@ class Agent(BaseAgent): if task.output_json: # schema = json.dumps(task.output_json, indent=2) schema = generate_model_description(task.output_json) + task_prompt += "\n" + self.i18n.slice( + "formatted_task_instructions" + ).format(output_format=schema) elif task.output_pydantic: schema = generate_model_description(task.output_pydantic) - - task_prompt += "\n" + self.i18n.slice("formatted_task_instructions").format( - output_format=schema - ) + task_prompt += "\n" + self.i18n.slice( + "formatted_task_instructions" + ).format(output_format=schema) if context: task_prompt = self.i18n.slice("task_with_context").format( @@ -329,14 +324,14 @@ class Agent(BaseAgent): tools = agent_tools.tools() return tools - def get_multimodal_tools(self) -> List[Tool]: + def get_multimodal_tools(self) -> Sequence[BaseTool]: from crewai.tools.agent_tools.add_image_tool import AddImageTool return [AddImageTool()] def get_code_execution_tools(self): try: - from crewai_tools import CodeInterpreterTool + from crewai_tools import CodeInterpreterTool # type: ignore # Set the unsafe_mode based on the code_execution_mode attribute unsafe_mode = self.code_execution_mode == "unsafe" diff --git a/src/crewai/agents/agent_builder/base_agent.py b/src/crewai/agents/agent_builder/base_agent.py index a8c829a2a..e602e42a9 100644 --- a/src/crewai/agents/agent_builder/base_agent.py +++ b/src/crewai/agents/agent_builder/base_agent.py @@ -24,6 +24,7 @@ from crewai.tools import BaseTool from crewai.tools.base_tool import Tool from crewai.utilities import I18N, Logger, RPMController from crewai.utilities.config import process_config +from crewai.utilities.converter import Converter T = TypeVar("T", bound="BaseAgent") @@ -42,7 +43,7 @@ class BaseAgent(ABC, BaseModel): max_rpm (Optional[int]): Maximum number of requests per minute for the agent execution. allow_delegation (bool): Allow delegation of tasks to agents. tools (Optional[List[Any]]): Tools at the agent's disposal. - max_iter (Optional[int]): Maximum iterations for an agent to execute a task. + max_iter (int): Maximum iterations for an agent to execute a task. agent_executor (InstanceOf): An instance of the CrewAgentExecutor class. llm (Any): Language model that will run the agent. crew (Any): Crew to which the agent belongs. @@ -114,7 +115,7 @@ class BaseAgent(ABC, BaseModel): tools: Optional[List[Any]] = Field( default_factory=list, description="Tools at agents' disposal" ) - max_iter: Optional[int] = Field( + max_iter: int = Field( default=25, description="Maximum iterations for an agent to execute a task" ) agent_executor: InstanceOf = Field( @@ -125,11 +126,12 @@ class BaseAgent(ABC, BaseModel): ) crew: Any = Field(default=None, description="Crew to which the agent belongs.") i18n: I18N = Field(default=I18N(), description="Internationalization settings.") - cache_handler: InstanceOf[CacheHandler] = Field( + cache_handler: Optional[InstanceOf[CacheHandler]] = Field( default=None, description="An instance of the CacheHandler class." ) tools_handler: InstanceOf[ToolsHandler] = Field( - default=None, description="An instance of the ToolsHandler class." + default_factory=ToolsHandler, + description="An instance of the ToolsHandler class.", ) max_tokens: Optional[int] = Field( default=None, description="Maximum number of tokens for the agent's execution." @@ -254,7 +256,7 @@ class BaseAgent(ABC, BaseModel): @abstractmethod def get_output_converter( self, llm: Any, text: str, model: type[BaseModel] | None, instructions: str - ): + ) -> Converter: """Get the converter class for the agent to create json/pydantic outputs.""" pass diff --git a/src/crewai/memory/entity/entity_memory.py b/src/crewai/memory/entity/entity_memory.py index 67c72e927..fd4150d2c 100644 --- a/src/crewai/memory/entity/entity_memory.py +++ b/src/crewai/memory/entity/entity_memory.py @@ -11,7 +11,7 @@ class EntityMemory(Memory): """ def __init__(self, crew=None, embedder_config=None, storage=None, path=None): - if hasattr(crew, "memory_config") and crew.memory_config is not None: + if crew and hasattr(crew, "memory_config") and crew.memory_config is not None: self.memory_provider = crew.memory_config.get("provider") else: self.memory_provider = None diff --git a/src/crewai/memory/memory.py b/src/crewai/memory/memory.py index 46af2c04d..752a683b4 100644 --- a/src/crewai/memory/memory.py +++ b/src/crewai/memory/memory.py @@ -1,14 +1,16 @@ -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union + +from pydantic import BaseModel from crewai.memory.storage.rag_storage import RAGStorage -class Memory: +class Memory(BaseModel): """ Base class for memory, now supporting agent tags and generic metadata. """ - def __init__(self, storage: RAGStorage): + def __init__(self, storage: Union[RAGStorage, Any]): self.storage = storage def save( diff --git a/src/crewai/tools/agent_tools/add_image_tool.py b/src/crewai/tools/agent_tools/add_image_tool.py index 06bdfcf5b..939dff2df 100644 --- a/src/crewai/tools/agent_tools/add_image_tool.py +++ b/src/crewai/tools/agent_tools/add_image_tool.py @@ -7,11 +7,11 @@ from crewai.utilities import I18N i18n = I18N() + class AddImageToolSchema(BaseModel): image_url: str = Field(..., description="The URL or path of the image to add") action: Optional[str] = Field( - default=None, - description="Optional context or question about the image" + default=None, description="Optional context or question about the image" ) @@ -36,10 +36,7 @@ class AddImageTool(BaseTool): "image_url": { "url": image_url, }, - } + }, ] - return { - "role": "user", - "content": content - } + return {"role": "user", "content": content}