mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-08 07:38:29 +00:00
Compare commits
7 Commits
devin/1742
...
devin/1742
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
806b780cd6 | ||
|
|
60da4b35d3 | ||
|
|
ef0b8e6913 | ||
|
|
05c66405cf | ||
|
|
fe0813e831 | ||
|
|
33cebea15b | ||
|
|
e723e5ca3f |
@@ -115,6 +115,7 @@
|
||||
"concepts/testing",
|
||||
"concepts/cli",
|
||||
"concepts/tools",
|
||||
"concepts/event-listener",
|
||||
"concepts/langchain-tools",
|
||||
"concepts/llamaindex-tools"
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "crewai"
|
||||
version = "0.105.0"
|
||||
version = "0.108.0"
|
||||
description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks."
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10,<3.13"
|
||||
|
||||
@@ -14,7 +14,7 @@ warnings.filterwarnings(
|
||||
category=UserWarning,
|
||||
module="pydantic.main",
|
||||
)
|
||||
__version__ = "0.105.0"
|
||||
__version__ = "0.108.0"
|
||||
__all__ = [
|
||||
"Agent",
|
||||
"Crew",
|
||||
|
||||
@@ -134,73 +134,25 @@ class Agent(BaseAgent):
|
||||
self.cache_handler = CacheHandler()
|
||||
self.set_cache_handler(self.cache_handler)
|
||||
|
||||
def set_knowledge(
|
||||
self,
|
||||
knowledge_sources: Optional[List[BaseKnowledgeSource]] = None,
|
||||
embedder_config: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
"""Set knowledge sources for the agent with optional embedder configuration.
|
||||
|
||||
This method allows agents to integrate external knowledge sources for enhanced
|
||||
contextual understanding and information retrieval during task execution.
|
||||
|
||||
Args:
|
||||
knowledge_sources: List of knowledge sources to integrate. These can include
|
||||
various data types such as text files, PDFs, CSV files, JSON files,
|
||||
web pages, YouTube videos, and documentation websites.
|
||||
embedder_config: Configuration for embedding generation. If not provided,
|
||||
a default configuration will be used.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provided knowledge sources are invalid.
|
||||
TypeError: If knowledge_sources is not a list or None.
|
||||
ValueError: If embedder_config is missing required keys.
|
||||
|
||||
Example:
|
||||
```python
|
||||
from crewai.knowledge.source import StringKnowledgeSource
|
||||
|
||||
content = "The capital of France is Paris."
|
||||
source = StringKnowledgeSource(content=content)
|
||||
|
||||
agent.set_knowledge(
|
||||
knowledge_sources=[source],
|
||||
embedder_config={"provider": "openai", "model": "text-embedding-3-small"}
|
||||
)
|
||||
```
|
||||
"""
|
||||
def set_knowledge(self, crew_embedder: Optional[Dict[str, Any]] = None):
|
||||
try:
|
||||
# Handle backward compatibility with crew_embedder
|
||||
if embedder_config and self.embedder is None:
|
||||
self.embedder = embedder_config
|
||||
|
||||
# Validate knowledge sources
|
||||
if knowledge_sources is not None:
|
||||
if not isinstance(knowledge_sources, list):
|
||||
raise TypeError("knowledge_sources must be a list or None")
|
||||
|
||||
if not all(isinstance(k, BaseKnowledgeSource) for k in knowledge_sources):
|
||||
raise ValueError("All knowledge sources must be instances of BaseKnowledgeSource")
|
||||
|
||||
self.knowledge_sources = knowledge_sources
|
||||
if self.embedder is None and crew_embedder:
|
||||
self.embedder = crew_embedder
|
||||
|
||||
# Create knowledge object if knowledge sources are provided
|
||||
if self.knowledge_sources:
|
||||
full_pattern = re.compile(r"[^a-zA-Z0-9\-_\r\n]|(\.\.)")
|
||||
# Create a unique collection name based on agent role and id
|
||||
knowledge_agent_name = f"{re.sub(full_pattern, '_', self.role)}_{id(self)}"
|
||||
self.knowledge = Knowledge(
|
||||
sources=self.knowledge_sources,
|
||||
embedder=self.embedder,
|
||||
collection_name=knowledge_agent_name,
|
||||
storage=self.knowledge_storage or None,
|
||||
)
|
||||
except TypeError as e:
|
||||
raise TypeError(f"Invalid Knowledge Configuration Type: {str(e)}")
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid Knowledge Configuration Value: {str(e)}")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error setting knowledge: {str(e)}")
|
||||
knowledge_agent_name = f"{re.sub(full_pattern, '_', self.role)}"
|
||||
if isinstance(self.knowledge_sources, list) and all(
|
||||
isinstance(k, BaseKnowledgeSource) for k in self.knowledge_sources
|
||||
):
|
||||
self.knowledge = Knowledge(
|
||||
sources=self.knowledge_sources,
|
||||
embedder=self.embedder,
|
||||
collection_name=knowledge_agent_name,
|
||||
storage=self.knowledge_storage or None,
|
||||
)
|
||||
except (TypeError, ValueError) as e:
|
||||
raise ValueError(f"Invalid Knowledge Configuration: {str(e)}")
|
||||
|
||||
def execute_task(
|
||||
self,
|
||||
|
||||
@@ -2,7 +2,7 @@ import uuid
|
||||
from abc import ABC, abstractmethod
|
||||
from copy import copy as shallow_copy
|
||||
from hashlib import md5
|
||||
from typing import Any, Dict, List, Optional, TypeVar, Union, cast
|
||||
from typing import Any, Dict, List, Optional, TypeVar
|
||||
|
||||
from pydantic import (
|
||||
UUID4,
|
||||
@@ -148,10 +148,6 @@ class BaseAgent(ABC, BaseModel):
|
||||
default=None,
|
||||
description="Custom knowledge storage for the agent.",
|
||||
)
|
||||
embedder_config: Optional[Dict[str, Any]] = Field(
|
||||
default=None,
|
||||
description="Configuration for embedding generation.",
|
||||
)
|
||||
security_config: SecurityConfig = Field(
|
||||
default_factory=SecurityConfig,
|
||||
description="Security configuration for the agent, including fingerprinting.",
|
||||
@@ -366,74 +362,5 @@ class BaseAgent(ABC, BaseModel):
|
||||
self._rpm_controller = rpm_controller
|
||||
self.create_agent_executor()
|
||||
|
||||
def set_knowledge(
|
||||
self,
|
||||
knowledge_sources: Optional[List[BaseKnowledgeSource]] = None,
|
||||
embedder_config: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
"""Set knowledge sources for the agent with optional embedder configuration.
|
||||
|
||||
This method allows agents to integrate external knowledge sources for enhanced
|
||||
contextual understanding and information retrieval during task execution.
|
||||
|
||||
Args:
|
||||
knowledge_sources: List of knowledge sources to integrate. These can include
|
||||
various data types such as text files, PDFs, CSV files, JSON files,
|
||||
web pages, YouTube videos, and documentation websites.
|
||||
embedder_config: Configuration for embedding generation. If not provided,
|
||||
a default configuration will be used.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provided knowledge sources are invalid.
|
||||
TypeError: If knowledge_sources is not a list or None.
|
||||
ValueError: If embedder_config is missing required keys.
|
||||
|
||||
Example:
|
||||
```python
|
||||
from crewai.knowledge.source import StringKnowledgeSource
|
||||
|
||||
content = "The capital of France is Paris."
|
||||
source = StringKnowledgeSource(content=content)
|
||||
|
||||
agent.set_knowledge(
|
||||
knowledge_sources=[source],
|
||||
embedder_config={"provider": "openai", "model": "text-embedding-3-small"}
|
||||
)
|
||||
```
|
||||
"""
|
||||
try:
|
||||
# Validate knowledge sources first
|
||||
if knowledge_sources is not None:
|
||||
if not isinstance(knowledge_sources, list):
|
||||
raise TypeError("knowledge_sources must be a list or None")
|
||||
|
||||
if not all(isinstance(k, BaseKnowledgeSource) for k in knowledge_sources):
|
||||
raise ValueError("All knowledge sources must be instances of BaseKnowledgeSource")
|
||||
|
||||
self.knowledge_sources = knowledge_sources
|
||||
|
||||
# Validate embedder configuration
|
||||
if embedder_config is not None:
|
||||
if not isinstance(embedder_config, dict):
|
||||
raise TypeError("embedder_config must be a dictionary or None")
|
||||
|
||||
if "provider" not in embedder_config:
|
||||
raise ValueError("embedder_config must contain a 'provider' key")
|
||||
|
||||
self.embedder_config = embedder_config
|
||||
|
||||
# Create knowledge object if knowledge sources are provided
|
||||
if self.knowledge_sources:
|
||||
# Create a unique collection name based on agent role and id
|
||||
knowledge_agent_name = f"{self.role.replace(' ', '_')}_{id(self)}"
|
||||
self.knowledge = Knowledge(
|
||||
sources=self.knowledge_sources,
|
||||
embedder_config=self.embedder_config,
|
||||
collection_name=knowledge_agent_name,
|
||||
)
|
||||
except TypeError as e:
|
||||
raise TypeError(f"Invalid Knowledge Configuration Type: {str(e)}")
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid Knowledge Configuration Value: {str(e)}")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error setting knowledge: {str(e)}")
|
||||
def set_knowledge(self, crew_embedder: Optional[Dict[str, Any]] = None):
|
||||
pass
|
||||
|
||||
@@ -5,7 +5,7 @@ description = "{{name}} using crewAI"
|
||||
authors = [{ name = "Your Name", email = "you@example.com" }]
|
||||
requires-python = ">=3.10,<3.13"
|
||||
dependencies = [
|
||||
"crewai[tools]>=0.105.0,<1.0.0"
|
||||
"crewai[tools]>=0.108.0,<1.0.0"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
@@ -5,7 +5,7 @@ description = "{{name}} using crewAI"
|
||||
authors = [{ name = "Your Name", email = "you@example.com" }]
|
||||
requires-python = ">=3.10,<3.13"
|
||||
dependencies = [
|
||||
"crewai[tools]>=0.105.0,<1.0.0",
|
||||
"crewai[tools]>=0.108.0,<1.0.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
@@ -5,7 +5,7 @@ description = "Power up your crews with {{folder_name}}"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10,<3.13"
|
||||
dependencies = [
|
||||
"crewai[tools]>=0.105.0"
|
||||
"crewai[tools]>=0.108.0"
|
||||
]
|
||||
|
||||
[tool.crewai]
|
||||
|
||||
@@ -621,7 +621,7 @@ class Crew(BaseModel):
|
||||
agent.i18n = i18n
|
||||
# type: ignore[attr-defined] # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]"
|
||||
agent.crew = self # type: ignore[attr-defined]
|
||||
agent.set_knowledge(embedder_config=self.embedder)
|
||||
agent.set_knowledge(crew_embedder=self.embedder)
|
||||
# TODO: Create an AgentFunctionCalling protocol for future refactoring
|
||||
if not agent.function_calling_llm: # type: ignore # "BaseAgent" has no attribute "function_calling_llm"
|
||||
agent.function_calling_llm = self.function_calling_llm # type: ignore # "BaseAgent" has no attribute "function_calling_llm"
|
||||
|
||||
@@ -9,6 +9,29 @@ from crewai.memory.storage.interface import Storage
|
||||
class Mem0Storage(Storage):
|
||||
"""
|
||||
Extends Storage to handle embedding and searching across entities using Mem0.
|
||||
|
||||
Supports configuring Redis as a vector store through the memory_config:
|
||||
|
||||
```python
|
||||
crew = Crew(
|
||||
memory=True,
|
||||
memory_config={
|
||||
"provider": "mem0",
|
||||
"config": {
|
||||
"user_id": "your-user-id",
|
||||
"api_key": os.getenv("MEM0_API_KEY"), # Use environment variable
|
||||
"vector_store": {
|
||||
"provider": "redis",
|
||||
"config": {
|
||||
"collection_name": "collection_name",
|
||||
"embedding_model_dims": 1536,
|
||||
"redis_url": "redis://redis-host:6379/0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self, type, crew=None):
|
||||
@@ -26,19 +49,49 @@ class Mem0Storage(Storage):
|
||||
if type == "user" and not user_id:
|
||||
raise ValueError("User ID is required for user memory type")
|
||||
|
||||
# API key in memory config overrides the environment variable
|
||||
# Get configuration from memory_config
|
||||
config = self.memory_config.get("config", {})
|
||||
mem0_api_key = config.get("api_key") or os.getenv("MEM0_API_KEY")
|
||||
mem0_org_id = config.get("org_id")
|
||||
mem0_project_id = config.get("project_id")
|
||||
vector_store_config = config.get("vector_store")
|
||||
|
||||
# Initialize MemoryClient with available parameters
|
||||
if mem0_org_id and mem0_project_id:
|
||||
self.memory = MemoryClient(
|
||||
api_key=mem0_api_key, org_id=mem0_org_id, project_id=mem0_project_id
|
||||
)
|
||||
# If vector store configuration is provided, use Memory.from_config
|
||||
if vector_store_config:
|
||||
try:
|
||||
from mem0.memory.main import Memory
|
||||
|
||||
# Prepare memory config with vector store configuration
|
||||
memory_config = {
|
||||
"vector_store": vector_store_config
|
||||
}
|
||||
|
||||
# Add API key if provided
|
||||
if mem0_api_key:
|
||||
memory_config["api_key"] = mem0_api_key
|
||||
|
||||
# Add org_id and project_id if provided
|
||||
if mem0_org_id:
|
||||
memory_config["org_id"] = mem0_org_id
|
||||
if mem0_project_id:
|
||||
memory_config["project_id"] = mem0_project_id
|
||||
|
||||
# Initialize Memory with configuration
|
||||
self.memory = Memory.from_config(memory_config)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Mem0 is not installed. Please install it with `pip install mem0ai`."
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to initialize Memory with vector store configuration: {e}")
|
||||
else:
|
||||
self.memory = MemoryClient(api_key=mem0_api_key)
|
||||
# Fall back to default MemoryClient initialization
|
||||
if mem0_org_id and mem0_project_id:
|
||||
self.memory = MemoryClient(
|
||||
api_key=mem0_api_key, org_id=mem0_org_id, project_id=mem0_project_id
|
||||
)
|
||||
else:
|
||||
self.memory = MemoryClient(api_key=mem0_api_key)
|
||||
|
||||
def _sanitize_role(self, role: str) -> str:
|
||||
"""
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from crewai.memory.memory import Memory
|
||||
@@ -9,6 +10,29 @@ class UserMemory(Memory):
|
||||
Inherits from the Memory class and utilizes an instance of a class that
|
||||
adheres to the Storage for data storage, specifically working with
|
||||
MemoryItem instances.
|
||||
|
||||
To configure with Redis as a vector store, provide a memory_config to the Crew:
|
||||
|
||||
```python
|
||||
crew = Crew(
|
||||
memory=True,
|
||||
memory_config={
|
||||
"provider": "mem0",
|
||||
"config": {
|
||||
"user_id": "your-user-id",
|
||||
"api_key": os.getenv("MEM0_API_KEY"), # Use environment variable
|
||||
"vector_store": {
|
||||
"provider": "redis",
|
||||
"config": {
|
||||
"collection_name": "collection_name",
|
||||
"embedding_model_dims": 1536,
|
||||
"redis_url": "redis://redis-host:6379/0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self, crew=None):
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Any, Dict, Optional, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
from .base_events import CrewEvent
|
||||
|
||||
@@ -52,9 +52,11 @@ class MethodExecutionFailedEvent(FlowEvent):
|
||||
|
||||
flow_name: str
|
||||
method_name: str
|
||||
error: Any
|
||||
error: Exception
|
||||
type: str = "method_execution_failed"
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
|
||||
class FlowFinishedEvent(FlowEvent):
|
||||
"""Event emitted when a flow completes execution"""
|
||||
|
||||
@@ -1586,76 +1586,6 @@ def test_agent_execute_task_with_ollama():
|
||||
assert "AI" in result or "artificial intelligence" in result.lower()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_base_agent_set_knowledge():
|
||||
"""Test that set_knowledge correctly sets knowledge sources and creates a Knowledge object."""
|
||||
from crewai.agents.agent_builder.base_agent import BaseAgent
|
||||
from crewai.knowledge.knowledge import Knowledge
|
||||
|
||||
# Create a test implementation of BaseAgent
|
||||
class TestAgent(BaseAgent):
|
||||
def execute_task(self, task, context=None, tools=None):
|
||||
return "Test execution"
|
||||
|
||||
def create_agent_executor(self, tools=None):
|
||||
pass
|
||||
|
||||
def _parse_tools(self, tools):
|
||||
return tools
|
||||
|
||||
def get_delegation_tools(self, agents):
|
||||
return []
|
||||
|
||||
def get_output_converter(self, llm, text, model, instructions):
|
||||
return None
|
||||
|
||||
# Create a knowledge source with some content
|
||||
content = "The capital of France is Paris."
|
||||
string_source = StringKnowledgeSource(content=content)
|
||||
|
||||
# Create an agent
|
||||
agent = TestAgent(
|
||||
role="Test Agent",
|
||||
goal="Test Goal",
|
||||
backstory="Test Backstory",
|
||||
)
|
||||
|
||||
# Mock the Knowledge class to avoid API calls
|
||||
with patch("crewai.agents.agent_builder.base_agent.Knowledge") as MockKnowledge:
|
||||
mock_knowledge_instance = MockKnowledge.return_value
|
||||
mock_knowledge_instance.sources = [string_source]
|
||||
|
||||
# Test setting knowledge
|
||||
agent.set_knowledge(knowledge_sources=[string_source])
|
||||
|
||||
# Verify that knowledge was set correctly
|
||||
assert agent.knowledge_sources == [string_source]
|
||||
assert agent.knowledge is not None
|
||||
assert MockKnowledge.called
|
||||
# Check that collection name starts with the agent role (now includes unique ID)
|
||||
assert MockKnowledge.call_args[1]["collection_name"].startswith("Test_Agent_")
|
||||
|
||||
# Test with embedder config
|
||||
embedder_config = {
|
||||
"provider": "openai",
|
||||
"model": "text-embedding-3-small"
|
||||
}
|
||||
|
||||
agent.set_knowledge(
|
||||
knowledge_sources=[string_source],
|
||||
embedder_config=embedder_config
|
||||
)
|
||||
|
||||
assert agent.embedder_config == embedder_config
|
||||
assert MockKnowledge.call_args[1]["embedder_config"] == embedder_config
|
||||
|
||||
# Test with invalid knowledge source - we need to directly test the validation logic
|
||||
# rather than relying on the Knowledge class to raise an error
|
||||
with pytest.raises(ValueError):
|
||||
# This will trigger the validation check in set_knowledge
|
||||
agent.set_knowledge(knowledge_sources=["invalid source"])
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agent_with_knowledge_sources():
|
||||
# Create a knowledge source with some content
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"input": ["The capital of France is Paris."], "model": "text-embedding-3-small",
|
||||
"encoding_format": "base64"}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '110'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.61.0
|
||||
x-stainless-arch:
|
||||
- x64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- Linux
|
||||
x-stainless-package-version:
|
||||
- 1.61.0
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/embeddings
|
||||
response:
|
||||
content: "{\n \"error\": {\n \"message\": \"Incorrect API key provided:
|
||||
sk-proj-********************************************************************************************************************************************************sLcA.
|
||||
You can find your API key at https://platform.openai.com/account/api-keys.\",\n
|
||||
\ \"type\": \"invalid_request_error\",\n \"param\": null,\n \"code\":
|
||||
\"invalid_api_key\"\n }\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- 9219d2095edc680f-SEA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '414'
|
||||
Content-Type:
|
||||
- application/json; charset=utf-8
|
||||
Date:
|
||||
- Mon, 17 Mar 2025 04:41:52 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=VL2xCt_SZgziztqb6CFL0smPGPhRpbELZKUdSHPmfLQ-1742186512-1.0.1.1-UFayToBt3jFkEkjQwZJ7A4KLy0.uZK9sqwbNqpMQ75dMEz2hycNU3NwtXor0NmM7k7XsdxtcXPfv.JcVjYatku_yE3I6qMEMGsgoog.guDU;
|
||||
path=/; expires=Mon, 17-Mar-25 05:11:52 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=O4ymzjmuwsEutsmbHpzKDz4uyyZNA1tSUX0M.FNCjro-1742186512991-0.0.1.1-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
X-Content-Type-Options:
|
||||
- nosniff
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains; preload
|
||||
vary:
|
||||
- Origin
|
||||
x-request-id:
|
||||
- req_62700144d22a58e93c0464aa643af3ec
|
||||
http_version: HTTP/1.1
|
||||
status_code: 401
|
||||
version: 1
|
||||
68
tests/memory/test_redis_mem0_storage.py
Normal file
68
tests/memory/test_redis_mem0_storage.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import os
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from crewai.memory.storage.mem0_storage import Mem0Storage
|
||||
|
||||
|
||||
class TestMem0RedisIntegration:
|
||||
@pytest.fixture
|
||||
def mock_memory(self):
|
||||
with patch("mem0.memory.main.Memory") as mock_memory:
|
||||
mock_memory_instance = MagicMock()
|
||||
mock_memory.from_config.return_value = mock_memory_instance
|
||||
yield mock_memory
|
||||
|
||||
def test_mem0_with_redis_config(self, mock_memory):
|
||||
# Create a mock crew with Redis vector store configuration
|
||||
mock_crew = MagicMock()
|
||||
mock_crew.memory_config = {
|
||||
"provider": "mem0",
|
||||
"config": {
|
||||
"user_id": "test-user",
|
||||
"api_key": "test-api-key",
|
||||
"vector_store": {
|
||||
"provider": "redis",
|
||||
"config": {
|
||||
"collection_name": "test_collection",
|
||||
"embedding_model_dims": 1536,
|
||||
"redis_url": "redis://localhost:6379/0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Create Mem0Storage instance
|
||||
with patch("crewai.memory.storage.mem0_storage.MemoryClient"):
|
||||
storage = Mem0Storage(type="user", crew=mock_crew)
|
||||
|
||||
# Check that Memory.from_config was called with correct parameters
|
||||
mock_memory.from_config.assert_called_once()
|
||||
config_arg = mock_memory.from_config.call_args[0][0]
|
||||
assert "vector_store" in config_arg
|
||||
assert config_arg["vector_store"]["provider"] == "redis"
|
||||
assert config_arg["vector_store"]["config"]["redis_url"] == "redis://localhost:6379/0"
|
||||
|
||||
def test_fallback_to_memory_client(self):
|
||||
# Create a mock crew without vector store configuration
|
||||
mock_crew = MagicMock()
|
||||
mock_crew.memory_config = {
|
||||
"provider": "mem0",
|
||||
"config": {
|
||||
"user_id": "test-user",
|
||||
"api_key": "test-api-key"
|
||||
}
|
||||
}
|
||||
|
||||
# Mock MemoryClient
|
||||
with patch("crewai.memory.storage.mem0_storage.MemoryClient") as mock_client:
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client.return_value = mock_client_instance
|
||||
|
||||
# Create Mem0Storage instance
|
||||
storage = Mem0Storage(type="user", crew=mock_crew)
|
||||
|
||||
# Check that MemoryClient was called (fallback path)
|
||||
mock_client.assert_called_once()
|
||||
assert mock_client.call_args[1]["api_key"] == "test-api-key"
|
||||
Reference in New Issue
Block a user