mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-10 16:48:30 +00:00
Integrate Mem0
This commit is contained in:
@@ -25,6 +25,7 @@ The `Crew` class has been enriched with several attributes to support advanced f
|
||||
- **Crew Sharing (`share_crew`)**: Enables sharing of crew information with CrewAI for continuous improvement and training models. The privacy implications and benefits of this feature, including how it contributes to model improvement, should be outlined.
|
||||
- **Usage Metrics (`usage_metrics`)**: Stores all metrics for the language model (LLM) usage during all tasks' execution, providing insights into operational efficiency and areas for improvement. Detailed information on accessing and interpreting these metrics for performance analysis should be provided.
|
||||
- **Memory Usage (`memory`)**: Indicates whether the crew should use memory to store memories of its execution, enhancing task execution and agent learning.
|
||||
- **Memory Provider (`memory_provider`)**: Specifies the memory provider to be used by the crew for storing memories.
|
||||
- **Embedder Configuration (`embedder`)**: Specifies the configuration for the embedder to be used by the crew for understanding and generating language. This attribute supports customization of the language model provider.
|
||||
- **Cache Management (`cache`)**: Determines whether the crew should use a cache to store the results of tool executions, optimizing performance.
|
||||
- **Output Logging (`output_log_file`)**: Specifies the file path for logging the output of the crew execution.
|
||||
|
||||
@@ -22,6 +22,7 @@ A crew in crewAI represents a collaborative group of agents working together to
|
||||
| **Language** _(optional)_ | `language` | Language used for the crew, defaults to English. |
|
||||
| **Language File** _(optional)_ | `language_file` | Path to the language file to be used for the crew. |
|
||||
| **Memory** _(optional)_ | `memory` | Utilized for storing execution memories (short-term, long-term, entity memory). |
|
||||
| **Memory Provider** _(optional)_ | `memory_provider` | Specifies the memory provider to be used by the crew for storing memories. |
|
||||
| **Cache** _(optional)_ | `cache` | Specifies whether to use a cache for storing the results of tools' execution. |
|
||||
| **Embedder** _(optional)_ | `embedder` | Configuration for the embedder to be used by the crew. Mostly used by memory for now. |
|
||||
| **Full Output** _(optional)_ | `full_output` | Whether the crew should return the full output with all tasks outputs or just the final output. |
|
||||
|
||||
@@ -197,6 +197,25 @@ crewai reset_memories [OPTIONS]
|
||||
- **Type:** Flag (boolean)
|
||||
- **Default:** False
|
||||
|
||||
## Use Mem0 to store memories
|
||||
[Mem0](https://www.mem0.ai/) enhances AI assistants and agents with an intelligent memory layer, enabling personalized AI interactions. Mem0 remembers user preferences, adapts to individual needs, and continuously improves over time, making it ideal for customer support chatbots, AI assistants, and autonomous systems.
|
||||
|
||||
In order to use Mem0 as the memory provider for your crew, you will have to set `MEM0_API_KEY` as an environment variable. Refer to the [Mem0 documentation](https://docs.mem0.ai/platform/quickstart) for more information on how to obtain an API key.
|
||||
|
||||
### Example: Using Mem0 as the Memory Provider
|
||||
```python
|
||||
from crewai import Crew, Agent, Task, Process
|
||||
|
||||
my_crew = Crew(
|
||||
agents=[...],
|
||||
tasks=[...],
|
||||
process=Process.sequential,
|
||||
memory=True,
|
||||
verbose=True,
|
||||
memory_provider="mem0"
|
||||
)
|
||||
```
|
||||
|
||||
## Benefits of Using crewAI's Memory System
|
||||
- **Adaptive Learning:** Crews become more efficient over time, adapting to new information and refining their approach to tasks.
|
||||
- **Enhanced Personalization:** Memory enables agents to remember user preferences and historical interactions, leading to personalized experiences.
|
||||
|
||||
19
poetry.lock
generated
19
poetry.lock
generated
@@ -1005,13 +1005,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "embedchain"
|
||||
version = "0.1.120"
|
||||
version = "0.1.121"
|
||||
description = "Simplest open source retrieval (RAG) framework"
|
||||
optional = false
|
||||
python-versions = "<=3.13,>=3.9"
|
||||
files = [
|
||||
{file = "embedchain-0.1.120-py3-none-any.whl", hash = "sha256:9eaa946f8a7b394080c56067849d7852a78361dd5e7b099ebf42989c07a1814d"},
|
||||
{file = "embedchain-0.1.120.tar.gz", hash = "sha256:6061c261a054d677e5b9c4062146d45e04e8572c67152120913d61aee4c22ae3"},
|
||||
{file = "embedchain-0.1.121-py3-none-any.whl", hash = "sha256:c756e8750fb9e3431b6d2a0b0dfbb0dfebeae2d7669d3dd6894311a632abfe77"},
|
||||
{file = "embedchain-0.1.121.tar.gz", hash = "sha256:1427a43fd92b0e5303d0d733ebcd5310df14da8bd8dba0b08818d0d3658e7c3e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1025,7 +1025,7 @@ langchain = ">0.2,<=0.3"
|
||||
langchain-cohere = ">=0.1.4,<0.2.0"
|
||||
langchain-community = ">=0.2.6,<0.3.0"
|
||||
langchain-openai = ">=0.1.7,<0.2.0"
|
||||
mem0ai = ">=0.0.9,<0.0.10"
|
||||
mem0ai = ">=0.0.20,<0.0.21"
|
||||
openai = ">=1.1.1"
|
||||
posthog = ">=3.0.2,<4.0.0"
|
||||
pypdf = ">=4.0.1,<5.0.0"
|
||||
@@ -2707,20 +2707,22 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "mem0ai"
|
||||
version = "0.0.9"
|
||||
version = "0.0.20"
|
||||
description = "Long-term memory for AI Agents"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "mem0ai-0.0.9-py3-none-any.whl", hash = "sha256:d4de435729af4fd3d597d022ffb2af89a0630d6c3b4769792bbe27d2ce816858"},
|
||||
{file = "mem0ai-0.0.9.tar.gz", hash = "sha256:e4374d5d04aa3f543cd3325f700e4b62f5358ae1c6fa5c44b2ff790c10c4e5f1"},
|
||||
{file = "mem0ai-0.0.20-py3-none-any.whl", hash = "sha256:c19b2082173c818f3516279f0924bfd763e2d18175560332c94e415e5131fd3b"},
|
||||
{file = "mem0ai-0.0.20.tar.gz", hash = "sha256:459b96850156c8e51e321e3ab4e5f86fb00d75532c16ad41a3eb09578e0ce00a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
openai = ">=1.33.0,<2.0.0"
|
||||
posthog = ">=3.5.0,<4.0.0"
|
||||
pydantic = ">=2.7.3,<3.0.0"
|
||||
pytz = ">=2024.1,<2025.0"
|
||||
qdrant-client = ">=1.9.1,<2.0.0"
|
||||
sqlalchemy = ">=2.0.31,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "mergedeep"
|
||||
@@ -5572,6 +5574,7 @@ description = "Automatically mock your HTTP interactions to simplify and speed u
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "vcrpy-6.0.1-py2.py3-none-any.whl", hash = "sha256:621c3fb2d6bd8aa9f87532c688e4575bcbbde0c0afeb5ebdb7e14cac409edfdd"},
|
||||
{file = "vcrpy-6.0.1.tar.gz", hash = "sha256:9e023fee7f892baa0bbda2f7da7c8ac51165c1c6e38ff8688683a12a4bde9278"},
|
||||
]
|
||||
|
||||
@@ -6073,4 +6076,4 @@ tools = ["crewai-tools"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<=3.13"
|
||||
content-hash = "91ba982ea96ca7be017d536784223d4ef83e86de05d11eb1c3ce0fc1b726f283"
|
||||
content-hash = "a69ccfe611edf0c3857cdf1dbcbdbd443b0130bff418713dce3993025cc14311"
|
||||
|
||||
@@ -27,7 +27,7 @@ python-dotenv = "^1.0.0"
|
||||
appdirs = "^1.4.4"
|
||||
jsonref = "^1.1.0"
|
||||
agentops = { version = "^0.3.0", optional = true }
|
||||
embedchain = "^0.1.114"
|
||||
embedchain = "^0.1.121"
|
||||
json-repair = "^0.25.2"
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
||||
@@ -176,9 +176,11 @@ class Agent(BaseAgent):
|
||||
|
||||
if self.crew and self.crew.memory:
|
||||
contextual_memory = ContextualMemory(
|
||||
self.crew.memory_provider,
|
||||
self.crew._short_term_memory,
|
||||
self.crew._long_term_memory,
|
||||
self.crew._entity_memory,
|
||||
self.crew._user_memory,
|
||||
)
|
||||
memory = contextual_memory.build_context_for_task(task, context)
|
||||
if memory.strip() != "":
|
||||
|
||||
@@ -26,6 +26,7 @@ from crewai.crews.crew_output import CrewOutput
|
||||
from crewai.memory.entity.entity_memory import EntityMemory
|
||||
from crewai.memory.long_term.long_term_memory import LongTermMemory
|
||||
from crewai.memory.short_term.short_term_memory import ShortTermMemory
|
||||
from crewai.memory.user.user_memory import UserMemory
|
||||
from crewai.process import Process
|
||||
from crewai.task import Task
|
||||
from crewai.tasks.conditional_task import ConditionalTask
|
||||
@@ -92,6 +93,7 @@ class Crew(BaseModel):
|
||||
_short_term_memory: Optional[InstanceOf[ShortTermMemory]] = PrivateAttr()
|
||||
_long_term_memory: Optional[InstanceOf[LongTermMemory]] = PrivateAttr()
|
||||
_entity_memory: Optional[InstanceOf[EntityMemory]] = PrivateAttr()
|
||||
_user_memory: Optional[InstanceOf[UserMemory]] = PrivateAttr()
|
||||
_train: Optional[bool] = PrivateAttr(default=False)
|
||||
_train_iteration: Optional[int] = PrivateAttr()
|
||||
_inputs: Optional[Dict[str, Any]] = PrivateAttr(default=None)
|
||||
@@ -112,6 +114,10 @@ class Crew(BaseModel):
|
||||
default=False,
|
||||
description="Whether the crew should use memory to store memories of it's execution",
|
||||
)
|
||||
memory_provider: Optional[str] = Field(
|
||||
default=None,
|
||||
description="The memory provider to be used for the crew.",
|
||||
)
|
||||
embedder: Optional[dict] = Field(
|
||||
default={"provider": "openai"},
|
||||
description="Configuration for the embedder to be used for the crew.",
|
||||
@@ -215,9 +221,16 @@ class Crew(BaseModel):
|
||||
if self.memory:
|
||||
self._long_term_memory = LongTermMemory()
|
||||
self._short_term_memory = ShortTermMemory(
|
||||
crew=self, embedder_config=self.embedder
|
||||
memory_provider=self.memory_provider,
|
||||
crew=self,
|
||||
embedder_config=self.embedder,
|
||||
)
|
||||
self._entity_memory = EntityMemory(crew=self, embedder_config=self.embedder)
|
||||
self._entity_memory = EntityMemory(
|
||||
memory_provider=self.memory_provider,
|
||||
crew=self,
|
||||
embedder_config=self.embedder,
|
||||
)
|
||||
self._user_memory = UserMemory(crew=self)
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from .entity.entity_memory import EntityMemory
|
||||
from .long_term.long_term_memory import LongTermMemory
|
||||
from .short_term.short_term_memory import ShortTermMemory
|
||||
from .user.user_memory import UserMemory
|
||||
|
||||
@@ -1,13 +1,22 @@
|
||||
from typing import Optional
|
||||
|
||||
from crewai.memory import EntityMemory, LongTermMemory, ShortTermMemory
|
||||
from crewai.memory import EntityMemory, LongTermMemory, ShortTermMemory, UserMemory
|
||||
|
||||
|
||||
class ContextualMemory:
|
||||
def __init__(self, stm: ShortTermMemory, ltm: LongTermMemory, em: EntityMemory):
|
||||
def __init__(
|
||||
self,
|
||||
memory_provider: str,
|
||||
stm: ShortTermMemory,
|
||||
ltm: LongTermMemory,
|
||||
em: EntityMemory,
|
||||
um: UserMemory,
|
||||
):
|
||||
self.memory_provider = memory_provider
|
||||
self.stm = stm
|
||||
self.ltm = ltm
|
||||
self.em = em
|
||||
self.um = um
|
||||
|
||||
def build_context_for_task(self, task, context) -> str:
|
||||
"""
|
||||
@@ -23,6 +32,8 @@ class ContextualMemory:
|
||||
context.append(self._fetch_ltm_context(task.description))
|
||||
context.append(self._fetch_stm_context(query))
|
||||
context.append(self._fetch_entity_context(query))
|
||||
if self.memory_provider == "mem0":
|
||||
context.append(self._fetch_user_memory(query))
|
||||
return "\n".join(filter(None, context))
|
||||
|
||||
def _fetch_stm_context(self, query) -> str:
|
||||
@@ -60,6 +71,19 @@ class ContextualMemory:
|
||||
"""
|
||||
em_results = self.em.search(query)
|
||||
formatted_results = "\n".join(
|
||||
[f"- {result['context']}" for result in em_results] # type: ignore # Invalid index type "str" for "str"; expected type "SupportsIndex | slice"
|
||||
[
|
||||
f"- {result['memory'] if self.memory_provider == 'mem0' else result['context']}"
|
||||
for result in em_results
|
||||
] # type: ignore # Invalid index type "str" for "str"; expected type "SupportsIndex | slice"
|
||||
)
|
||||
return f"Entities:\n{formatted_results}" if em_results else ""
|
||||
|
||||
def _fetch_user_memory(self, query) -> str:
|
||||
"""
|
||||
Fetches relevant user memory information from User Memory related to the task's description and expected_output,
|
||||
"""
|
||||
um_results = self.um.search(query)
|
||||
formatted_results = "\n".join(
|
||||
[f"- {result['memory']}" for result in um_results]
|
||||
)
|
||||
return f"User memories/preferences:\n{formatted_results}" if um_results else ""
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from crewai.memory.entity.entity_memory_item import EntityMemoryItem
|
||||
from crewai.memory.memory import Memory
|
||||
from crewai.memory.storage.rag_storage import RAGStorage
|
||||
from crewai.memory.storage.mem0_storage import Mem0Storage
|
||||
|
||||
|
||||
class EntityMemory(Memory):
|
||||
@@ -10,18 +11,33 @@ class EntityMemory(Memory):
|
||||
Inherits from the Memory class.
|
||||
"""
|
||||
|
||||
def __init__(self, crew=None, embedder_config=None):
|
||||
storage = RAGStorage(
|
||||
type="entities",
|
||||
allow_reset=False,
|
||||
embedder_config=embedder_config,
|
||||
crew=crew,
|
||||
)
|
||||
def __init__(self, memory_provider, crew=None, embedder_config=None):
|
||||
self.memory_provider = memory_provider
|
||||
if self.memory_provider == "mem0":
|
||||
storage = Mem0Storage(
|
||||
type="entities",
|
||||
crew=crew,
|
||||
)
|
||||
else:
|
||||
storage = RAGStorage(
|
||||
type="entities",
|
||||
allow_reset=False,
|
||||
embedder_config=embedder_config,
|
||||
crew=crew,
|
||||
)
|
||||
super().__init__(storage)
|
||||
|
||||
def save(self, item: EntityMemoryItem) -> None: # type: ignore # BUG?: Signature of "save" incompatible with supertype "Memory"
|
||||
"""Saves an entity item into the SQLite storage."""
|
||||
data = f"{item.name}({item.type}): {item.description}"
|
||||
if self.memory_provider == "mem0":
|
||||
data = f"""
|
||||
Remember details about the following entity:
|
||||
Name: {item.name}
|
||||
Type: {item.type}
|
||||
Entity Description: {item.description}
|
||||
"""
|
||||
else:
|
||||
data = f"{item.name}({item.type}): {item.description}"
|
||||
super().save(data, item.metadata)
|
||||
|
||||
def reset(self) -> None:
|
||||
|
||||
@@ -2,6 +2,7 @@ from typing import Any, Dict, Optional
|
||||
from crewai.memory.memory import Memory
|
||||
from crewai.memory.short_term.short_term_memory_item import ShortTermMemoryItem
|
||||
from crewai.memory.storage.rag_storage import RAGStorage
|
||||
from crewai.memory.storage.mem0_storage import Mem0Storage
|
||||
|
||||
|
||||
class ShortTermMemory(Memory):
|
||||
@@ -13,10 +14,14 @@ class ShortTermMemory(Memory):
|
||||
MemoryItem instances.
|
||||
"""
|
||||
|
||||
def __init__(self, crew=None, embedder_config=None):
|
||||
storage = RAGStorage(
|
||||
type="short_term", embedder_config=embedder_config, crew=crew
|
||||
)
|
||||
def __init__(self, memory_provider=None, crew=None, embedder_config=None):
|
||||
self.memory_provider = memory_provider
|
||||
if self.memory_provider == "mem0":
|
||||
storage = Mem0Storage(type="short_term", crew=crew)
|
||||
else:
|
||||
storage = RAGStorage(
|
||||
type="short_term", embedder_config=embedder_config, crew=crew
|
||||
)
|
||||
super().__init__(storage)
|
||||
|
||||
def save(
|
||||
@@ -26,6 +31,8 @@ class ShortTermMemory(Memory):
|
||||
agent: Optional[str] = None,
|
||||
) -> None:
|
||||
item = ShortTermMemoryItem(data=value, metadata=metadata, agent=agent)
|
||||
if self.memory_provider == "mem0":
|
||||
item.data = f"Remember the following insights from Agent run: {item.data}"
|
||||
|
||||
super().save(value=item.data, metadata=item.metadata, agent=item.agent)
|
||||
|
||||
|
||||
42
src/crewai/memory/storage/mem0_storage.py
Normal file
42
src/crewai/memory/storage/mem0_storage.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from mem0 import MemoryClient
|
||||
from crewai.memory.storage.interface import Storage
|
||||
|
||||
|
||||
class Mem0Storage(Storage):
|
||||
"""
|
||||
Extends Storage to handle embedding and searching across entities using Mem0.
|
||||
"""
|
||||
|
||||
def __init__(self, type, crew=None):
|
||||
super().__init__()
|
||||
if (
|
||||
not os.getenv("OPENAI_API_KEY")
|
||||
and not os.getenv("OPENAI_BASE_URL") == "https://api.openai.com/v1"
|
||||
):
|
||||
os.environ["OPENAI_API_KEY"] = "fake"
|
||||
|
||||
agents = crew.agents if crew else []
|
||||
agents = [agent.role for agent in agents]
|
||||
agents = "_".join(agents)
|
||||
|
||||
self.app_id = agents
|
||||
self.memory = MemoryClient(api_key=os.getenv("MEM0_API_KEY"))
|
||||
|
||||
def save(self, value: Any, metadata: Dict[str, Any]) -> None:
|
||||
self.memory.add(value, metadata=metadata, app_id=self.app_id)
|
||||
|
||||
def search(
|
||||
self,
|
||||
query: str,
|
||||
limit: int = 3,
|
||||
filters: Optional[dict] = None,
|
||||
score_threshold: float = 0.35,
|
||||
) -> List[Any]:
|
||||
params = {"query": query, "limit": limit, "app_id": self.app_id}
|
||||
if filters:
|
||||
params["filters"] = filters
|
||||
results = self.memory.search(**params)
|
||||
return [r for r in results if r["score"] >= score_threshold]
|
||||
0
src/crewai/memory/user/__init__.py
Normal file
0
src/crewai/memory/user/__init__.py
Normal file
23
src/crewai/memory/user/user_memory.py
Normal file
23
src/crewai/memory/user/user_memory.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from crewai.memory.memory import Memory
|
||||
from crewai.memory.user.user_memory_item import UserMemoryItem
|
||||
from crewai.memory.storage.mem0_storage import Mem0Storage
|
||||
|
||||
|
||||
class UserMemory(Memory):
|
||||
"""
|
||||
UserMemory class for handling user memory storage and retrieval.
|
||||
Inherits from the Memory class and utilizes an instance of a class that
|
||||
adheres to the Storage for data storage, specifically working with
|
||||
MemoryItem instances.
|
||||
"""
|
||||
|
||||
def __init__(self, crew=None):
|
||||
storage = Mem0Storage(type="user", crew=crew)
|
||||
super().__init__(storage)
|
||||
|
||||
def save(self, item: UserMemoryItem) -> None:
|
||||
data = f"Remember the details about the user: {item.data}"
|
||||
super().save(data, item.metadata, user=item.user)
|
||||
|
||||
def search(self, query: str, score_threshold: float = 0.35):
|
||||
return self.storage.search(query=query, score_threshold=score_threshold)
|
||||
8
src/crewai/memory/user/user_memory_item.py
Normal file
8
src/crewai/memory/user/user_memory_item.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
|
||||
class UserMemoryItem:
|
||||
def __init__(self, data: Any, user: str, metadata: Optional[Dict[str, Any]] = None):
|
||||
self.data = data
|
||||
self.user = user
|
||||
self.metadata = metadata if metadata is not None else {}
|
||||
270
tests/memory/cassettes/test_save_and_search_with_provider.yaml
Normal file
270
tests/memory/cassettes/test_save_and_search_with_provider.yaml
Normal file
@@ -0,0 +1,270 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: ''
|
||||
headers:
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
host:
|
||||
- api.mem0.ai
|
||||
user-agent:
|
||||
- python-httpx/0.27.0
|
||||
method: GET
|
||||
uri: https://api.mem0.ai/v1/memories/?user_id=test
|
||||
response:
|
||||
body:
|
||||
string: '[]'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8b477138bad847b9-BOM
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Sat, 17 Aug 2024 06:00:11 GMT
|
||||
NEL:
|
||||
- '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}'
|
||||
Report-To:
|
||||
- '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=uuyH2foMJVDpV%2FH52g1q%2FnvXKe3dBKVzvsK0mqmSNezkiszNR9OgrEJfVqmkX%2FlPFRP2sH4zrOuzGo6k%2FjzsjYJczqSWJUZHN2pPujiwnr1E9W%2BdLGKmG6%2FqPrGYAy2SBRWkkJVWsTO3OQ%3D%3D"}],"group":"cf-nel","max_age":604800}'
|
||||
Server:
|
||||
- cloudflare
|
||||
allow:
|
||||
- GET, POST, DELETE, OPTIONS
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cross-origin-opener-policy:
|
||||
- same-origin
|
||||
referrer-policy:
|
||||
- same-origin
|
||||
vary:
|
||||
- Accept, origin, Cookie
|
||||
x-content-type-options:
|
||||
- nosniff
|
||||
x-frame-options:
|
||||
- DENY
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"batch": [{"properties": {"python_version": "3.12.4 (v3.12.4:8e8a4baf65,
|
||||
Jun 6 2024, 17:33:18) [Clang 13.0.0 (clang-1300.0.29.30)]", "os": "darwin",
|
||||
"os_version": "Darwin Kernel Version 23.4.0: Wed Feb 21 21:44:54 PST 2024; root:xnu-10063.101.15~2/RELEASE_ARM64_T6030",
|
||||
"os_release": "23.4.0", "processor": "arm", "machine": "arm64", "function":
|
||||
"mem0.client.main.MemoryClient", "$lib": "posthog-python", "$lib_version": "3.5.0",
|
||||
"$geoip_disable": true}, "timestamp": "2024-08-17T06:00:11.526640+00:00", "context":
|
||||
{}, "distinct_id": "fd411bd3-99a2-42d6-acd7-9fca8ad09580", "event": "client.init"}],
|
||||
"historical_migration": false, "sentAt": "2024-08-17T06:00:11.701621+00:00",
|
||||
"api_key": "phc_hgJkUVJFYtmaJqrvf6CYN67TIQ8yhXAkWzUn9AMU4yX"}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '740'
|
||||
Content-Type:
|
||||
- application/json
|
||||
User-Agent:
|
||||
- posthog-python/3.5.0
|
||||
method: POST
|
||||
uri: https://us.i.posthog.com/batch/
|
||||
response:
|
||||
body:
|
||||
string: '{"status":"Ok"}'
|
||||
headers:
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '15'
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Sat, 17 Aug 2024 06:00:12 GMT
|
||||
access-control-allow-credentials:
|
||||
- 'true'
|
||||
server:
|
||||
- envoy
|
||||
vary:
|
||||
- origin, access-control-request-method, access-control-request-headers
|
||||
x-envoy-upstream-service-time:
|
||||
- '69'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Remember the following insights
|
||||
from Agent run: test value with provider"}], "metadata": {"task": "test_task_provider",
|
||||
"agent": "test_agent_provider"}, "app_id": "Researcher"}'
|
||||
headers:
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '219'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.mem0.ai
|
||||
user-agent:
|
||||
- python-httpx/0.27.0
|
||||
method: POST
|
||||
uri: https://api.mem0.ai/v1/memories/
|
||||
response:
|
||||
body:
|
||||
string: '{"message":"ok"}'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8b477140282547b9-BOM
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '16'
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Sat, 17 Aug 2024 06:00:13 GMT
|
||||
NEL:
|
||||
- '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}'
|
||||
Report-To:
|
||||
- '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=FRjJKSk3YxVj03wA7S05H8ts35KnWfqS3wb6Rfy4kVZ4BgXfw7nJbm92wI6vEv5fWcAcHVnOlkJDggs11B01BMuB2k3a9RqlBi0dJNiMuk%2Bgm5xE%2BODMPWJctYNRwQMjNVbteUpS%2Fad8YA%3D%3D"}],"group":"cf-nel","max_age":604800}'
|
||||
Server:
|
||||
- cloudflare
|
||||
allow:
|
||||
- GET, POST, DELETE, OPTIONS
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cross-origin-opener-policy:
|
||||
- same-origin
|
||||
referrer-policy:
|
||||
- same-origin
|
||||
vary:
|
||||
- Accept, origin, Cookie
|
||||
x-content-type-options:
|
||||
- nosniff
|
||||
x-frame-options:
|
||||
- DENY
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"query": "test value with provider", "limit": 3, "app_id": "Researcher"}'
|
||||
headers:
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '73'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.mem0.ai
|
||||
user-agent:
|
||||
- python-httpx/0.27.0
|
||||
method: POST
|
||||
uri: https://api.mem0.ai/v1/memories/search/
|
||||
response:
|
||||
body:
|
||||
string: '[]'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8b47714d083b47b9-BOM
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Sat, 17 Aug 2024 06:00:14 GMT
|
||||
NEL:
|
||||
- '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}'
|
||||
Report-To:
|
||||
- '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=2DRWL1cdKdMvnE8vx1fPUGeTITOgSGl3N5g84PS6w30GRqpfz79BtSx6REhpnOiFV8kM6KGqln0iCZ5yoHc2jBVVJXhPJhQ5t0uerD9JFnkphjISrJOU1MJjZWneT9PlNABddxvVNCmluA%3D%3D"}],"group":"cf-nel","max_age":604800}'
|
||||
Server:
|
||||
- cloudflare
|
||||
allow:
|
||||
- POST, OPTIONS
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cross-origin-opener-policy:
|
||||
- same-origin
|
||||
referrer-policy:
|
||||
- same-origin
|
||||
vary:
|
||||
- Accept, origin, Cookie
|
||||
x-content-type-options:
|
||||
- nosniff
|
||||
x-frame-options:
|
||||
- DENY
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"batch": [{"properties": {"python_version": "3.12.4 (v3.12.4:8e8a4baf65,
|
||||
Jun 6 2024, 17:33:18) [Clang 13.0.0 (clang-1300.0.29.30)]", "os": "darwin",
|
||||
"os_version": "Darwin Kernel Version 23.4.0: Wed Feb 21 21:44:54 PST 2024; root:xnu-10063.101.15~2/RELEASE_ARM64_T6030",
|
||||
"os_release": "23.4.0", "processor": "arm", "machine": "arm64", "function":
|
||||
"mem0.client.main.MemoryClient", "$lib": "posthog-python", "$lib_version": "3.5.0",
|
||||
"$geoip_disable": true}, "timestamp": "2024-08-17T06:00:13.593952+00:00", "context":
|
||||
{}, "distinct_id": "fd411bd3-99a2-42d6-acd7-9fca8ad09580", "event": "client.add"}],
|
||||
"historical_migration": false, "sentAt": "2024-08-17T06:00:13.858277+00:00",
|
||||
"api_key": "phc_hgJkUVJFYtmaJqrvf6CYN67TIQ8yhXAkWzUn9AMU4yX"}'
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '739'
|
||||
Content-Type:
|
||||
- application/json
|
||||
User-Agent:
|
||||
- posthog-python/3.5.0
|
||||
method: POST
|
||||
uri: https://us.i.posthog.com/batch/
|
||||
response:
|
||||
body:
|
||||
string: '{"status":"Ok"}'
|
||||
headers:
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '15'
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Sat, 17 Aug 2024 06:00:13 GMT
|
||||
access-control-allow-credentials:
|
||||
- 'true'
|
||||
server:
|
||||
- envoy
|
||||
vary:
|
||||
- origin, access-control-request-method, access-control-request-headers
|
||||
x-envoy-upstream-service-time:
|
||||
- '33'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -44,3 +44,46 @@ def test_save_and_search(short_term_memory):
|
||||
find = short_term_memory.search("test value", score_threshold=0.01)[0]
|
||||
assert find["context"] == memory.data, "Data value mismatch."
|
||||
assert find["metadata"]["agent"] == "test_agent", "Agent value mismatch."
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def short_term_memory_with_provider():
|
||||
"""Fixture to create a ShortTermMemory instance with a specific memory provider"""
|
||||
agent = Agent(
|
||||
role="Researcher",
|
||||
goal="Search relevant data and provide results",
|
||||
backstory="You are a researcher at a leading tech think tank.",
|
||||
tools=[],
|
||||
verbose=True,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Perform a search on specific topics.",
|
||||
expected_output="A list of relevant URLs based on the search query.",
|
||||
agent=agent,
|
||||
)
|
||||
return ShortTermMemory(
|
||||
crew=Crew(agents=[agent], tasks=[task]), memory_provider="mem0"
|
||||
)
|
||||
|
||||
|
||||
def test_save_and_search_with_provider(short_term_memory_with_provider):
|
||||
memory = ShortTermMemoryItem(
|
||||
data="Loves to do research on the latest technologies.",
|
||||
agent="test_agent_provider",
|
||||
metadata={"task": "test_task_provider"},
|
||||
)
|
||||
short_term_memory_with_provider.save(
|
||||
value=memory.data,
|
||||
metadata=memory.metadata,
|
||||
agent=memory.agent,
|
||||
)
|
||||
|
||||
find = short_term_memory_with_provider.search(
|
||||
"Loves to do research on the latest technologies.", score_threshold=0.01
|
||||
)[0]
|
||||
assert find["memory"] in memory.data, "Data value mismatch."
|
||||
assert find["metadata"]["agent"] == "test_agent_provider", "Agent value mismatch."
|
||||
assert (
|
||||
short_term_memory_with_provider.memory_provider == "mem0"
|
||||
), "Memory provider mismatch."
|
||||
|
||||
Reference in New Issue
Block a user