Compare commits

...

5 Commits

Author SHA1 Message Date
Lucas Gomide
aa4a54f059 Merge branch 'main' into feature-isolated-memory 2025-04-14 16:37:08 -03:00
Lucas Gomide
e346064c36 feat: warn when long-term memory is defined but entity memory is not 2025-04-14 14:15:07 -03:00
Lucas Gomide
4fcca4c486 docs: enhance memory documentation 2025-04-14 14:15:07 -03:00
Lucas Gomide
4e9e190661 feat: add tests to ensure we are able to use contextual memory by set individual memories 2025-04-14 14:15:07 -03:00
Lucas Gomide
9eeed380a8 feat: support defining any memory in an isolated way
This change makes it easier to use a specific memory type without unintentionally enabling all others.

Previously, setting memory=True would implicitly configure all available memories (like LTM and STM), which might not be ideal in all cases. For example, when building a chatbot that only needs an external memory, users were forced to also configure LTM and STM — which rely on default OpenAPI embeddings — even if they weren’t needed.

With this update, users can now define a single memory in isolation, making the configuration process simpler and more flexible.
2025-04-14 14:15:07 -03:00
17 changed files with 5018 additions and 1528 deletions

View File

@@ -145,6 +145,7 @@ from crewai.memory import LongTermMemory
# Simple memory configuration
crew = Crew(memory=True) # Uses default storage locations
```
Note that External Memory wont be defined when `memory=True` is set, as we cant infer which external memory would be suitable for your case
### Custom Storage Configuration
```python
@@ -278,15 +279,19 @@ crew = Crew(
### Using External Memory
External Memory is a powerful feature that allows you to integrate external memory systems with your CrewAI applications. This is particularly useful when you want to use specialized memory providers or maintain memory across different applications.
Since its an external memory, were not able to add a default value to it - unlike with Long Term and Short Term memory.
#### Basic Usage with Mem0
The most common way to use External Memory is with Mem0 as the provider:
```python
import os
from crewai import Agent, Crew, Process, Task
from crewai.memory.external.external_memory import ExternalMemory
os.environ["MEM0_API_KEY"] = "YOUR-API-KEY"
agent = Agent(
role="You are a helpful assistant",
goal="Plan a vacation for the user",
@@ -304,7 +309,6 @@ crew = Crew(
tasks=[task],
verbose=True,
process=Process.sequential,
memory=True,
external_memory=ExternalMemory(
embedder_config={"provider": "mem0", "config": {"user_id": "U-123"}} # you can provide an entire Mem0 configuration
),
@@ -363,7 +367,6 @@ crew = Crew(
tasks=[task],
verbose=True,
process=Process.sequential,
memory=True,
external_memory=external_memory,
)

View File

@@ -156,6 +156,23 @@ class Agent(BaseAgent):
except (TypeError, ValueError) as e:
raise ValueError(f"Invalid Knowledge Configuration: {str(e)}")
def _is_any_available_memory(self) -> bool:
"""Check if any memory is available."""
if not self.crew:
return False
memory_attributes = [
"memory",
"memory_config",
"_short_term_memory",
"_long_term_memory",
"_entity_memory",
"_user_memory",
"_external_memory",
]
return any(getattr(self.crew, attr) for attr in memory_attributes)
def execute_task(
self,
task: Task,
@@ -200,7 +217,7 @@ class Agent(BaseAgent):
task=task_prompt, context=context
)
if self.crew and self.crew.memory:
if self._is_any_available_memory():
contextual_memory = ContextualMemory(
self.crew.memory_config,
self.crew._short_term_memory,

View File

@@ -72,7 +72,6 @@ class CrewAgentExecutorMixin:
"""Create and save long-term and entity memory items based on evaluation."""
if (
self.crew
and self.crew.memory
and self.crew._long_term_memory
and self.crew._entity_memory
and self.task
@@ -114,6 +113,15 @@ class CrewAgentExecutorMixin:
except Exception as e:
print(f"Failed to add to long term memory: {e}")
pass
elif (
self.crew
and self.crew._long_term_memory
and self.crew._entity_memory is None
):
self._printer.print(
content="Long term memory is enabled, but entity memory is not enabled. Please configure entity memory or set memory=True to automatically enable it.",
color="bold_yellow",
)
def _ask_human_input(self, final_answer: str) -> str:
"""Prompt human input with mode-appropriate messaging."""

View File

@@ -275,46 +275,51 @@ class Crew(BaseModel):
return self
def _initialize_user_memory(self):
if (
self.memory_config
and "user_memory" in self.memory_config
and self.memory_config.get("provider") == "mem0"
): # Check for user_memory in config
user_memory_config = self.memory_config["user_memory"]
if isinstance(
user_memory_config, dict
): # Check if it's a configuration dict
self._user_memory = UserMemory(crew=self)
else:
raise TypeError("user_memory must be a configuration dictionary")
def _initialize_default_memories(self):
self._long_term_memory = self._long_term_memory or LongTermMemory()
self._short_term_memory = self._short_term_memory or ShortTermMemory(
crew=self,
embedder_config=self.embedder,
)
self._entity_memory = self.entity_memory or EntityMemory(
crew=self, embedder_config=self.embedder
)
@model_validator(mode="after")
def create_crew_memory(self) -> "Crew":
"""Set private attributes."""
"""Initialize private memory attributes."""
self._external_memory = (
# External memory doesnt support a default value since it was designed to be managed entirely externally
self.external_memory.set_crew(self)
if self.external_memory
else None
)
self._long_term_memory = self.long_term_memory
self._short_term_memory = self.short_term_memory
self._entity_memory = self.entity_memory
# UserMemory is gonna to be deprecated in the future, but we have to initialize a default value for now
self._user_memory = None
if self.memory:
self._long_term_memory = (
self.long_term_memory if self.long_term_memory else LongTermMemory()
)
self._short_term_memory = (
self.short_term_memory
if self.short_term_memory
else ShortTermMemory(
crew=self,
embedder_config=self.embedder,
)
)
self._entity_memory = (
self.entity_memory
if self.entity_memory
else EntityMemory(crew=self, embedder_config=self.embedder)
)
self._external_memory = (
# External memory doesnt support a default value since it was designed to be managed entirely externally
self.external_memory.set_crew(self)
if self.external_memory
else None
)
if (
self.memory_config
and "user_memory" in self.memory_config
and self.memory_config.get("provider") == "mem0"
): # Check for user_memory in config
user_memory_config = self.memory_config["user_memory"]
if isinstance(
user_memory_config, dict
): # Check if it's a configuration dict
self._user_memory = UserMemory(crew=self)
else:
raise TypeError("user_memory must be a configuration dictionary")
else:
self._user_memory = None # No user memory if not in config
self._initialize_default_memories()
self._initialize_user_memory()
return self
@model_validator(mode="after")

View File

@@ -53,6 +53,10 @@ class ContextualMemory:
Fetches recent relevant insights from STM related to the task's description and expected_output,
formatted as bullet points.
"""
if self.stm is None:
return ""
stm_results = self.stm.search(query)
formatted_results = "\n".join(
[
@@ -67,6 +71,10 @@ class ContextualMemory:
Fetches historical data or insights from LTM that are relevant to the task's description and expected_output,
formatted as bullet points.
"""
if self.ltm is None:
return ""
ltm_results = self.ltm.search(task, latest_n=2)
if not ltm_results:
return None
@@ -86,6 +94,9 @@ class ContextualMemory:
Fetches relevant entity information from Entity Memory related to the task's description and expected_output,
formatted as bullet points.
"""
if self.em is None:
return ""
em_results = self.em.search(query)
formatted_results = "\n".join(
[

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,209 @@
interactions:
- request:
body: !!binary |
CuAMCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkStwwKEgoQY3Jld2FpLnRl
bGVtZXRyeRKdCAoQe1SuF2c2xWX4juAv74oXphII/LGj/b5w49QqDENyZXcgQ3JlYXRlZDABOcCZ
B6F1rTUYQRhzEqF1rTUYShsKDmNyZXdhaV92ZXJzaW9uEgkKBzAuMTE0LjBKGgoOcHl0aG9uX3Zl
cnNpb24SCAoGMy4xMi45Si4KCGNyZXdfa2V5EiIKIGM5N2I1ZmViNWQxYjY2YmI1OTAwNmFhYTAx
YTI5Y2Q2SjEKB2NyZXdfaWQSJgokMDU1YWZhNGQtNWU5MS00YWU1LTg4ZTQtMGQ3N2I2OTZiODJl
ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShEKC2NyZXdfbWVtb3J5EgIQAEoaChRjcmV3
X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUo6ChBjcmV3
X2ZpbmdlcnByaW50EiYKJGI3NzY4MjJlLTU4YzItNDg5Ni05NmVhLTlmNDQzNjc4NThjNko7Chtj
cmV3X2ZpbmdlcnByaW50X2NyZWF0ZWRfYXQSHAoaMjAyNS0wNC0xMlQxNzo1MjozMS4zOTkzMTdK
0QIKC2NyZXdfYWdlbnRzEsECCr4CW3sia2V5IjogIjA3ZDk5YjYzMDQxMWQzNWZkOTA0N2E1MzJk
NTNkZGE3IiwgImlkIjogIjI5MmZlMjI4LTNlYzEtNDE4Zi05NzQzLTFkNTI3ZGY5M2QwYyIsICJy
b2xlIjogIlJlc2VhcmNoZXIiLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMjUsICJt
YXhfcnBtIjogbnVsbCwgImZ1bmN0aW9uX2NhbGxpbmdfbGxtIjogIiIsICJsbG0iOiAiZ3B0LTRv
LW1pbmkiLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAiYWxsb3dfY29kZV9leGVjdXRp
b24/IjogZmFsc2UsICJtYXhfcmV0cnlfbGltaXQiOiAyLCAidG9vbHNfbmFtZXMiOiBbXX1dSv8B
CgpjcmV3X3Rhc2tzEvABCu0BW3sia2V5IjogIjYzOTk2NTE3ZjNmM2YxYzk0ZDZiYjYxN2FhMGIx
YzRmIiwgImlkIjogIjhlY2E1NTQzLTc3MDEtNDhjMy1hODM1LWI4YWE2YmE3YTMzZSIsICJhc3lu
Y19leGVjdXRpb24/IjogZmFsc2UsICJodW1hbl9pbnB1dD8iOiBmYWxzZSwgImFnZW50X3JvbGUi
OiAiUmVzZWFyY2hlciIsICJhZ2VudF9rZXkiOiAiMDdkOTliNjMwNDExZDM1ZmQ5MDQ3YTUzMmQ1
M2RkYTciLCAidG9vbHNfbmFtZXMiOiBbXX1degIYAYUBAAEAABKABAoQmqGVrPp33uFfE2WlsNm/
phIIx0mZ95NGSyIqDFRhc2sgQ3JlYXRlZDABObBlHqF1rTUYQbi3HqF1rTUYSi4KCGNyZXdfa2V5
EiIKIGM5N2I1ZmViNWQxYjY2YmI1OTAwNmFhYTAxYTI5Y2Q2SjEKB2NyZXdfaWQSJgokMDU1YWZh
NGQtNWU5MS00YWU1LTg4ZTQtMGQ3N2I2OTZiODJlSi4KCHRhc2tfa2V5EiIKIDYzOTk2NTE3ZjNm
M2YxYzk0ZDZiYjYxN2FhMGIxYzRmSjEKB3Rhc2tfaWQSJgokOGVjYTU1NDMtNzcwMS00OGMzLWE4
MzUtYjhhYTZiYTdhMzNlSjoKEGNyZXdfZmluZ2VycHJpbnQSJgokYjc3NjgyMmUtNThjMi00ODk2
LTk2ZWEtOWY0NDM2Nzg1OGM2SjoKEHRhc2tfZmluZ2VycHJpbnQSJgokYTk5NjE4ZTYtODFhZC00
N2YyLWE4ZGEtOTc1NjkzN2YxYmIwSjsKG3Rhc2tfZmluZ2VycHJpbnRfY3JlYXRlZF9hdBIcChoy
MDI1LTA0LTEyVDE3OjUyOjMxLjM5ODIxNEo7ChFhZ2VudF9maW5nZXJwcmludBImCiRlZjkxZGYx
NS0zNmNiLTQ0MDQtOWFkMi05MmM1OTQ1NGU2ZTZ6AhgBhQEAAQAA
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '1635'
Content-Type:
- application/x-protobuf
User-Agent:
- OTel-OTLP-Exporter-Python/1.31.1
method: POST
uri: https://telemetry.crewai.com:4319/v1/traces
response:
body:
string: "\n\0"
headers:
Content-Length:
- '2'
Content-Type:
- application/x-protobuf
Date:
- Sat, 12 Apr 2025 20:52:35 GMT
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "You are Researcher. You''re
an expert in research and you love to learn new things.\nYour personal goal
is: You research about math.\nTo give my best complete final answer to the task
respond using the exact following format:\n\nThought: I now can give a great
answer\nFinal Answer: Your final answer must be the great and the most complete
as possible, it must be outcome described.\n\nI MUST use these formats, my job
depends on it!"}, {"role": "user", "content": "\nCurrent Task: Research a topic
to teach a kid aged 6 about math.\n\nThis is the expected criteria for your
final answer: A topic, explanation, angle, and examples.\nyou MUST return the
actual complete content as the final answer, not a summary.\n\n# Useful context:
\n<MagicMock name=''build_context_for_task()'' id=''13586043616''>\n\nBegin!
This is VERY important to you, use the tools available and give your best Final
Answer, your job depends on it!\n\nThought:"}], "model": "gpt-4o-mini", "stop":
["\nObservation:"]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1031'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.68.2
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.68.2
x-stainless-raw-response:
- 'true'
x-stainless-read-timeout:
- '600.0'
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.9
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
content: "{\n \"id\": \"chatcmpl-BLc7zvaXoFAArOIDK9TrMtdq8kKY0\",\n \"object\":
\"chat.completion\",\n \"created\": 1744491151,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"I now can give a great answer. \\nFinal
Answer: \\n\\n**Topic: Introduction to Addition with Fun Objects**\\n\\n**Explanation:**
\ \\nAt its core, addition is all about combining two or more groups of things
to find out how many there are in total. We can think of addition as a way to
bring friends together! Imagine if you have some apples and your friend brings
some more apples; together, you want to find out how many apples there are altogether.
This is the foundation of addition, and it can be made fun and engaging for
a 6-year-old.\\n\\n**Angle:** \\nTo make this relatable and enjoyable, we can
use everyday objects that kids love, such as toys, fruits, or even drawings.
We can create little stories or scenarios that involve addition, turning it
into a game where they get to count and add things together. By using real items,
children can see and feel what addition means, making it easier to grasp the
concept.\\n\\n**Examples:** \\n1. **Using Fruits:** \\n Let's say you have
3 oranges. You can say, \\\"I have 3 oranges.\\\" Then, if your friend brings
you 2 more oranges, you can introduce the addition by saying, \\\"Now, how many
do we have all together?\\\" \\n - So you would show it as: 3 (oranges you
have) + 2 (oranges your friend brought) = ? \\n To find the answer, you can
count all the oranges together: 1, 2, 3 (your oranges) and 4, 5 (your friend's
oranges). \\n - The answer is 5 oranges in total!\\n\\n2. **Using Toys:**
\ \\n If a child has 4 toy cars and finds 3 more under the couch, we can ask,
\\\"How many cars do you have now?\\\" \\n - Write it down: 4 (toy cars)
+ 3 (found cars) = ? \\n Then, count the toy cars together: 1, 2, 3, 4 (original
cars), 5, 6, 7. \\n - The answer is 7 toy cars!\\n\\n3. **Story Scenario:**
\ \\n Create an engaging story: \\\"Once upon a time, there were 2 friendly
puppies. One day, 3 more puppies came to play. How many puppies are playing
now?\\\" \\n - Present it as: 2 (original puppies) + 3 (new puppies) = ?
\ \\n Count the puppies: 1, 2 (the first two) and then 3, 4, 5 (the new ones).
\ \\n - The answer is 5 puppies playing!\\n\\nBy presenting addition through
fun scenarios and interactive counting, a 6-year-old can learn and understand
addition while enjoying the process. They can even use crayons to draw the items
or fruit to count in a playful, hands-on approach. This makes math not just
a subject, but also a delightful adventure!\",\n \"refusal\": null,\n
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 206,\n \"completion_tokens\":
609,\n \"total_tokens\": 815,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n
\ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
\"default\",\n \"system_fingerprint\": \"fp_44added55e\"\n}\n"
headers:
CF-RAY:
- 92f59ba1fa19572a-GRU
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Sat, 12 Apr 2025 20:52:44 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=C7ejNhC7vNTBO9VtBqzN_ce__mP2Dz8noDo2lIcNBn0-1744491164-1.0.1.1-kQgWk4d54JIGxg_yCJ.7uV9HkU8JXrhpfIth0WHDdqf9ESzAsQyDu0xKVLYnga.xswBnm5kePpuFCcnIqGKgyag31cEyuiFFf6JHTvQcvWI;
path=/; expires=Sat, 12-Apr-25 21:22:44 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=QuvcyYK0MZfY9dNclglrzesXcplWfoZN.rd4J57.xtY-1744491164641-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '12806'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '30000'
x-ratelimit-limit-tokens:
- '150000000'
x-ratelimit-remaining-requests:
- '29999'
x-ratelimit-remaining-tokens:
- '149999777'
x-ratelimit-reset-requests:
- 2ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_49f3c203229149ce08c0813ac4071355
http_version: HTTP/1.1
status_code: 200
version: 1

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,205 @@
interactions:
- request:
body: !!binary |
CuAMCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkStwwKEgoQY3Jld2FpLnRl
bGVtZXRyeRKdCAoQE1JYPHUcNy20EEB8E7lQKRIIeom6mAik9I0qDENyZXcgQ3JlYXRlZDABOdhP
ANFPrzUYQWCwCNFPrzUYShsKDmNyZXdhaV92ZXJzaW9uEgkKBzAuMTE0LjBKGgoOcHl0aG9uX3Zl
cnNpb24SCAoGMy4xMi45Si4KCGNyZXdfa2V5EiIKIGM5N2I1ZmViNWQxYjY2YmI1OTAwNmFhYTAx
YTI5Y2Q2SjEKB2NyZXdfaWQSJgokMjNmZDllZTktMWRiZC00M2FjLTlhZGYtNTQ5YWFhZTNkMTNj
ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShEKC2NyZXdfbWVtb3J5EgIQAEoaChRjcmV3
X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUo6ChBjcmV3
X2ZpbmdlcnByaW50EiYKJDk2M2UyNDA4LTI3MzktNGU3ZS04ZTAzLTIxOGUzZjhmMTFhZEo7Chtj
cmV3X2ZpbmdlcnByaW50X2NyZWF0ZWRfYXQSHAoaMjAyNS0wNC0xMlQxODoyNjoyOC4wMTg1MzVK
0QIKC2NyZXdfYWdlbnRzEsECCr4CW3sia2V5IjogIjA3ZDk5YjYzMDQxMWQzNWZkOTA0N2E1MzJk
NTNkZGE3IiwgImlkIjogIjA3ZWIyOWYzLWE2OWQtNGQ1MC1iZGJiLTAwNjEzN2UzYjU4MiIsICJy
b2xlIjogIlJlc2VhcmNoZXIiLCAidmVyYm9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMjUsICJt
YXhfcnBtIjogbnVsbCwgImZ1bmN0aW9uX2NhbGxpbmdfbGxtIjogIiIsICJsbG0iOiAiZ3B0LTRv
LW1pbmkiLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAiYWxsb3dfY29kZV9leGVjdXRp
b24/IjogZmFsc2UsICJtYXhfcmV0cnlfbGltaXQiOiAyLCAidG9vbHNfbmFtZXMiOiBbXX1dSv8B
CgpjcmV3X3Rhc2tzEvABCu0BW3sia2V5IjogIjYzOTk2NTE3ZjNmM2YxYzk0ZDZiYjYxN2FhMGIx
YzRmIiwgImlkIjogImUwOWIzMzg1LThmNTAtNDIxYy1hYzE0LTdhZDU5NTU4YmY4NiIsICJhc3lu
Y19leGVjdXRpb24/IjogZmFsc2UsICJodW1hbl9pbnB1dD8iOiBmYWxzZSwgImFnZW50X3JvbGUi
OiAiUmVzZWFyY2hlciIsICJhZ2VudF9rZXkiOiAiMDdkOTliNjMwNDExZDM1ZmQ5MDQ3YTUzMmQ1
M2RkYTciLCAidG9vbHNfbmFtZXMiOiBbXX1degIYAYUBAAEAABKABAoQ/KSXqXcsLoGmHCaEWYIa
9xII/Ucae2PMp18qDFRhc2sgQ3JlYXRlZDABObAfF9FPrzUYQeCUF9FPrzUYSi4KCGNyZXdfa2V5
EiIKIGM5N2I1ZmViNWQxYjY2YmI1OTAwNmFhYTAxYTI5Y2Q2SjEKB2NyZXdfaWQSJgokMjNmZDll
ZTktMWRiZC00M2FjLTlhZGYtNTQ5YWFhZTNkMTNjSi4KCHRhc2tfa2V5EiIKIDYzOTk2NTE3ZjNm
M2YxYzk0ZDZiYjYxN2FhMGIxYzRmSjEKB3Rhc2tfaWQSJgokZTA5YjMzODUtOGY1MC00MjFjLWFj
MTQtN2FkNTk1NThiZjg2SjoKEGNyZXdfZmluZ2VycHJpbnQSJgokOTYzZTI0MDgtMjczOS00ZTdl
LThlMDMtMjE4ZTNmOGYxMWFkSjoKEHRhc2tfZmluZ2VycHJpbnQSJgokN2FhMTE0NDAtYjNkYi00
Y2VmLTgzYjUtNTk3ZTMwMTIxZGZhSjsKG3Rhc2tfZmluZ2VycHJpbnRfY3JlYXRlZF9hdBIcChoy
MDI1LTA0LTEyVDE4OjI2OjI4LjAxNzMyNEo7ChFhZ2VudF9maW5nZXJwcmludBImCiQ0MDczMjdk
NC1hMzRjLTQyNTUtYWIxYy1iM2I1OTNiMmM4MTJ6AhgBhQEAAQAA
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '1635'
Content-Type:
- application/x-protobuf
User-Agent:
- OTel-OTLP-Exporter-Python/1.31.1
method: POST
uri: https://telemetry.crewai.com:4319/v1/traces
response:
body:
string: "\n\0"
headers:
Content-Length:
- '2'
Content-Type:
- application/x-protobuf
Date:
- Sat, 12 Apr 2025 21:26:32 GMT
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "You are Researcher. You''re
an expert in research and you love to learn new things.\nYour personal goal
is: You research about math.\nTo give my best complete final answer to the task
respond using the exact following format:\n\nThought: I now can give a great
answer\nFinal Answer: Your final answer must be the great and the most complete
as possible, it must be outcome described.\n\nI MUST use these formats, my job
depends on it!"}, {"role": "user", "content": "\nCurrent Task: Research a topic
to teach a kid aged 6 about math.\n\nThis is the expected criteria for your
final answer: A topic, explanation, angle, and examples.\nyou MUST return the
actual complete content as the final answer, not a summary.\n\nBegin! This is
VERY important to you, use the tools available and give your best Final Answer,
your job depends on it!\n\nThought:"}], "model": "gpt-4o-mini", "stop": ["\nObservation:"]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '947'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.68.2
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.68.2
x-stainless-raw-response:
- 'true'
x-stainless-read-timeout:
- '600.0'
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.9
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
content: "{\n \"id\": \"chatcmpl-BLceqFO97kLaTEPUSKGHkGlckpxLe\",\n \"object\":
\"chat.completion\",\n \"created\": 1744493188,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"I now can give a great answer \\nFinal
Answer: \\n\\n**Topic:** Introduction to Addition\\n\\n**Explanation:** \\nAddition
is a fundamental concept in math that means putting together two or more numbers
to find out how many there are in total. When we add, we combine quantities
to see the total amount we have. The symbol for addition is \\\"+\\\". \\n\\nLet's
break it down so it's easy to understand. If you have a small group of apples
and then you get more apples, to find out how many apples you have altogether,
you add them up! \\n\\n**Angle:** \\nTo teach this concept to a 6-year-old,
we can use tangible objects they can relate to, such as fruits, toys, or stickers.
Kids learn best through play and visual representation, so using real-life examples
will make the concept of addition exciting and engaging!\\n\\n**Examples:**
\ \\n1. **Using Fruits:** \\n - Start with 2 apples. \\n\\n \U0001F34F\U0001F34F
(2 apples)\\n\\n - Then, you receive 3 more apples. \\n\\n \U0001F34F\U0001F34F\U0001F34F
(3 apples)\\n\\n - To find out how many apples you have now, we add them together:
\\n\\n 2 + 3 = 5 \\n\\n - Show them the total by counting all the apples
together: \\n\\n \U0001F34F\U0001F34F\U0001F34F\U0001F34F\U0001F34F (5 apples)\\n\\n2.
**Using Toys:** \\n - Let\u2019s say there are 4 toy cars. \\n\\n \U0001F697\U0001F697\U0001F697\U0001F697
(4 toy cars)\\n\\n - If you get 2 more toy cars. \\n\\n \U0001F697\U0001F697
(2 toy cars)\\n\\n - How many do we have in total? \\n\\n 4 + 2 = 6 \\n\\n
\ - Count them all together: \\n\\n \U0001F697\U0001F697\U0001F697\U0001F697\U0001F697\U0001F697
(6 toy cars)\\n\\n3. **Using Stickers:** \\n - You have 5 stickers. \\n\\n
\ \U0001F31F\U0001F31F\U0001F31F\U0001F31F\U0001F31F (5 stickers)\\n\\n -
Your friend gives you 4 more stickers. \\n\\n \U0001F31F\U0001F31F\U0001F31F\U0001F31F
(4 stickers)\\n\\n - Now, let\u2019s see how many stickers you have in total:
\\n\\n 5 + 4 = 9 \\n\\n - Count them together: \\n\\n \U0001F31F\U0001F31F\U0001F31F\U0001F31F\U0001F31F\U0001F31F\U0001F31F\U0001F31F\U0001F31F
(9 stickers)\\n\\n**Conclusion:** \\nTry to make addition fun! Use snacks or
play time to practice addition. Ask questions during snack time, such as \u201CIf
you eat one of your 5 cookies, how many will you have left?\u201D This approach
makes learning relatable and enjoyable, enhancing their understanding of math
in everyday situations. Happy adding!\",\n \"refusal\": null,\n \"annotations\":
[]\n },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n
\ }\n ],\n \"usage\": {\n \"prompt_tokens\": 182,\n \"completion_tokens\":
561,\n \"total_tokens\": 743,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n
\ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
\"default\",\n \"system_fingerprint\": \"fp_44added55e\"\n}\n"
headers:
CF-RAY:
- 92f5cd5a19257e0f-GRU
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Sat, 12 Apr 2025 21:26:36 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=RJADJOyAKqFS8232yM1dbM71E3ODRyiAty_s9rGvM0Y-1744493196-1.0.1.1-f4yxtdxM2DD78r7TOvv1J75SF6jkKDecDiDNH3cGysXRR3R.QycZfAzjKzWFkncqaQY4jeqGFYZlVR06qIdq2Gw178QxYpOC6MrJT1eqduw;
path=/; expires=Sat, 12-Apr-25 21:56:36 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=l0OvqELD24_KHHDhiAwih_bsqFrop1327mHak9Y_Ovk-1744493196966-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '8640'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '30000'
x-ratelimit-limit-tokens:
- '150000000'
x-ratelimit-remaining-requests:
- '29999'
x-ratelimit-remaining-tokens:
- '149999797'
x-ratelimit-reset-requests:
- 2ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_dda2c2217b856a9012403aeb7378a9e2
http_version: HTTP/1.1
status_code: 200
version: 1

View File

@@ -20,6 +20,7 @@ from crewai.crews.crew_output import CrewOutput
from crewai.knowledge.source.string_knowledge_source import StringKnowledgeSource
from crewai.llm import LLM
from crewai.memory.contextual.contextual_memory import ContextualMemory
from crewai.memory.long_term.long_term_memory import LongTermMemory
from crewai.memory.short_term.short_term_memory import ShortTermMemory
from crewai.process import Process
from crewai.task import Task
@@ -2407,6 +2408,136 @@ def test_using_contextual_memory():
contextual_mem.assert_called_once()
@pytest.mark.vcr(filter_headers=["authorization"])
def test_using_contextual_memory_with_long_term_memory():
from unittest.mock import patch
math_researcher = Agent(
role="Researcher",
goal="You research about math.",
backstory="You're an expert in research and you love to learn new things.",
allow_delegation=False,
)
task1 = Task(
description="Research a topic to teach a kid aged 6 about math.",
expected_output="A topic, explanation, angle, and examples.",
agent=math_researcher,
)
crew = Crew(
agents=[math_researcher],
tasks=[task1],
long_term_memory=LongTermMemory(),
)
with patch.object(ContextualMemory, "build_context_for_task") as contextual_mem:
crew.kickoff()
contextual_mem.assert_called_once()
assert crew.memory is False
@pytest.mark.vcr(filter_headers=["authorization"])
def test_warning_long_term_memory_without_entity_memory():
from unittest.mock import patch
math_researcher = Agent(
role="Researcher",
goal="You research about math.",
backstory="You're an expert in research and you love to learn new things.",
allow_delegation=False,
)
task1 = Task(
description="Research a topic to teach a kid aged 6 about math.",
expected_output="A topic, explanation, angle, and examples.",
agent=math_researcher,
)
crew = Crew(
agents=[math_researcher],
tasks=[task1],
long_term_memory=LongTermMemory(),
)
with (
patch("crewai.utilities.printer.Printer.print") as mock_print,
patch(
"crewai.memory.long_term.long_term_memory.LongTermMemory.save"
) as save_memory,
):
crew.kickoff()
mock_print.assert_called_with(
content="Long term memory is enabled, but entity memory is not enabled. Please configure entity memory or set memory=True to automatically enable it.",
color="bold_yellow",
)
save_memory.assert_not_called()
@pytest.mark.vcr(filter_headers=["authorization"])
def test_long_term_memory_with_memory_flag():
from unittest.mock import patch
math_researcher = Agent(
role="Researcher",
goal="You research about math.",
backstory="You're an expert in research and you love to learn new things.",
allow_delegation=False,
)
task1 = Task(
description="Research a topic to teach a kid aged 6 about math.",
expected_output="A topic, explanation, angle, and examples.",
agent=math_researcher,
)
crew = Crew(
agents=[math_researcher],
tasks=[task1],
memory=True,
long_term_memory=LongTermMemory(),
)
with (
patch("crewai.utilities.printer.Printer.print") as mock_print,
patch(
"crewai.memory.long_term.long_term_memory.LongTermMemory.save"
) as save_memory,
):
crew.kickoff()
mock_print.assert_not_called()
save_memory.assert_called_once()
@pytest.mark.vcr(filter_headers=["authorization"])
def test_using_contextual_memory_with_short_term_memory():
from unittest.mock import patch
math_researcher = Agent(
role="Researcher",
goal="You research about math.",
backstory="You're an expert in research and you love to learn new things.",
allow_delegation=False,
)
task1 = Task(
description="Research a topic to teach a kid aged 6 about math.",
expected_output="A topic, explanation, angle, and examples.",
agent=math_researcher,
)
crew = Crew(
agents=[math_researcher],
tasks=[task1],
short_term_memory=ShortTermMemory(),
)
with patch.object(ContextualMemory, "build_context_for_task") as contextual_mem:
crew.kickoff()
contextual_mem.assert_called_once()
assert crew.memory is False
@pytest.mark.vcr(filter_headers=["authorization"])
def test_disabled_memory_using_contextual_memory():
from unittest.mock import patch

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,182 @@
interactions:
- request:
body: !!binary |
Ct8MCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkStgwKEgoQY3Jld2FpLnRl
bGVtZXRyeRKcCAoQfm0pqVSMD2d8x7Z0oecKIRIIgWppMg8y3GoqDENyZXcgQ3JlYXRlZDABORAN
KtAWrDUYQagXMtAWrDUYShsKDmNyZXdhaV92ZXJzaW9uEgkKBzAuMTE0LjBKGgoOcHl0aG9uX3Zl
cnNpb24SCAoGMy4xMi45Si4KCGNyZXdfa2V5EiIKIDA3YTcxNzY4Y2M0YzkzZWFiM2IzMWUzYzhk
MjgzMmM2SjEKB2NyZXdfaWQSJgokNGY1NjNkN2MtYmYyOC00ZWM2LTgzNzQtMDZlMjZiYzA1NWU0
ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShEKC2NyZXdfbWVtb3J5EgIQAEoaChRjcmV3
X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUo6ChBjcmV3
X2ZpbmdlcnByaW50EiYKJGU4MGY4MDFmLWViZmQtNDlkOS1iNTEwLTM0NmVjN2VlNzAzZko7Chtj
cmV3X2ZpbmdlcnByaW50X2NyZWF0ZWRfYXQSHAoaMjAyNS0wNC0xMlQxNzoyNzoyNC42NTU4NzhK
0AIKC2NyZXdfYWdlbnRzEsACCr0CW3sia2V5IjogIjAyZGYxM2UzNjcxMmFiZjUxZDIzOGZlZWJh
YjFjYTI2IiwgImlkIjogIjg5MjdlNzQ1LWNkNWQtNDJkMy1hMjA2LTEyYTUxOWRlMDY1OCIsICJy
b2xlIjogIlJlc2VhcmNoZXIiLCAidmVyYm9zZT8iOiB0cnVlLCAibWF4X2l0ZXIiOiAyNSwgIm1h
eF9ycG0iOiBudWxsLCAiZnVuY3Rpb25fY2FsbGluZ19sbG0iOiAiIiwgImxsbSI6ICJncHQtNG8t
bWluaSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJhbGxvd19jb2RlX2V4ZWN1dGlv
bj8iOiBmYWxzZSwgIm1heF9yZXRyeV9saW1pdCI6IDIsICJ0b29sc19uYW1lcyI6IFtdfV1K/wEK
CmNyZXdfdGFza3MS8AEK7QFbeyJrZXkiOiAiN2I0MmRmM2MzYzc0YzIxYzg5NDgwZTBjMDcwNTM4
NWYiLCAiaWQiOiAiNDM0MDgzNDYtMjA5OC00M2I1LWE0NWUtMmU2MWY4ZmYxZTliIiwgImFzeW5j
X2V4ZWN1dGlvbj8iOiBmYWxzZSwgImh1bWFuX2lucHV0PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6
ICJSZXNlYXJjaGVyIiwgImFnZW50X2tleSI6ICIwMmRmMTNlMzY3MTJhYmY1MWQyMzhmZWViYWIx
Y2EyNiIsICJ0b29sc19uYW1lcyI6IFtdfV16AhgBhQEAAQAAEoAEChDv1iM8ejIY7tezTF4KBssA
Eghn2bJnw2f60SoMVGFzayBDcmVhdGVkMAE58LFA0BasNRhB4AdB0BasNRhKLgoIY3Jld19rZXkS
IgogMDdhNzE3NjhjYzRjOTNlYWIzYjMxZTNjOGQyODMyYzZKMQoHY3Jld19pZBImCiQ0ZjU2M2Q3
Yy1iZjI4LTRlYzYtODM3NC0wNmUyNmJjMDU1ZTRKLgoIdGFza19rZXkSIgogN2I0MmRmM2MzYzc0
YzIxYzg5NDgwZTBjMDcwNTM4NWZKMQoHdGFza19pZBImCiQ0MzQwODM0Ni0yMDk4LTQzYjUtYTQ1
ZS0yZTYxZjhmZjFlOWJKOgoQY3Jld19maW5nZXJwcmludBImCiRlODBmODAxZi1lYmZkLTQ5ZDkt
YjUxMC0zNDZlYzdlZTcwM2ZKOgoQdGFza19maW5nZXJwcmludBImCiQ2YjgzODVkYS0yYWJjLTRm
NWEtOTk3NC0xNjhiMzVhNDBlOTlKOwobdGFza19maW5nZXJwcmludF9jcmVhdGVkX2F0EhwKGjIw
MjUtMDQtMTJUMTc6Mjc6MjQuNjU1ODQ4SjsKEWFnZW50X2ZpbmdlcnByaW50EiYKJDIyNTI4NDRl
LWNlMTYtNDYyZi04NDI4LTYwYzZmMWYyNGE3N3oCGAGFAQABAAA=
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '1634'
Content-Type:
- application/x-protobuf
User-Agent:
- OTel-OTLP-Exporter-Python/1.31.1
method: POST
uri: https://telemetry.crewai.com:4319/v1/traces
response:
body:
string: "\n\0"
headers:
Content-Length:
- '2'
Content-Type:
- application/x-protobuf
Date:
- Sat, 12 Apr 2025 20:27:26 GMT
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "You are Researcher. You are
a researcher at a leading tech think tank.\nYour personal goal is: Search relevant
data and provide results\nTo give my best complete final answer to the task
respond using the exact following format:\n\nThought: I now can give a great
answer\nFinal Answer: Your final answer must be the great and the most complete
as possible, it must be outcome described.\n\nI MUST use these formats, my job
depends on it!"}, {"role": "user", "content": "\nCurrent Task: Perform a search
on specific topics.\n\nThis is the expected criteria for your final answer:
A list of relevant URLs based on the search query.\nyou MUST return the actual
complete content as the final answer, not a summary.\n\n# Useful context: \nExternal
memories:\n\n\nBegin! This is VERY important to you, use the tools available
and give your best Final Answer, your job depends on it!\n\nThought:"}], "model":
"gpt-4o-mini", "stop": ["\nObservation:"]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '989'
content-type:
- application/json
cookie:
- __cf_bm=nSje5Zn_Lk69BDG85XIauC2hrZjGl0pR2sel9__KWGw-1744489610-1.0.1.1-CPlAgcgTAE30uWrbi_2wiCWrbRDRWiaa.YuQMgST42DLDVg_wdNlJMDQT3Lsqk.g.BO68A66TTirWA0blQaQw.9xdBbPwKO609_ftjdwi5U;
_cfuvid=XLC52GLAWCOeWn2vI379CnSGKjPa7f.qr2vSAQ_R66M-1744489610542-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.68.2
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.68.2
x-stainless-raw-response:
- 'true'
x-stainless-read-timeout:
- '600.0'
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.9
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
content: "{\n \"id\": \"chatcmpl-BLbjg0OfADWdrLsZxrjKHEeVVbWle\",\n \"object\":
\"chat.completion\",\n \"created\": 1744489644,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"I now can give a great answer \\nFinal
Answer: Here is a list of relevant URLs based on the specified search query:
\ \\n\\n1. https://www.forbes.com/technology/ \\n2. https://www.sciencedirect.com/
\ \\n3. https://www.techcrunch.com/ \\n4. https://www.wired.com/ \\n5. https://www.researchgate.net/
\ \\n6. https://www.springer.com/ \\n7. https://www.jstor.org/ \\n8. https://www.statista.com/
\ \\n9. https://www.pwc.com/gx/en/services/consulting/technology.html \\n10.
https://www.gartner.com/en/information-technology \\n\\nThese URLs provide
access to a wealth of information on various technology-related topics, including
articles, research papers, and analytics.\",\n \"refusal\": null,\n \"annotations\":
[]\n },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n
\ }\n ],\n \"usage\": {\n \"prompt_tokens\": 185,\n \"completion_tokens\":
169,\n \"total_tokens\": 354,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n
\ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
\"default\",\n \"system_fingerprint\": \"fp_80cf447eee\"\n}\n"
headers:
CF-RAY:
- 92f576d83a447e05-GRU
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Sat, 12 Apr 2025 20:27:27 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '2273'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '30000'
x-ratelimit-limit-tokens:
- '150000000'
x-ratelimit-remaining-requests:
- '29999'
x-ratelimit-remaining-tokens:
- '149999788'
x-ratelimit-reset-requests:
- 2ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_38f6879956c29e6c61c844d1906fa2e8
http_version: HTTP/1.1
status_code: 200
version: 1

View File

@@ -0,0 +1,190 @@
interactions:
- request:
body: !!binary |
Ct8MCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkStgwKEgoQY3Jld2FpLnRl
bGVtZXRyeRKcCAoQjin/Su47zAwLq3Hv6yv8GhIImRMfAPs+FOMqDENyZXcgQ3JlYXRlZDABOYCY
xbgUrDUYQVie07gUrDUYShsKDmNyZXdhaV92ZXJzaW9uEgkKBzAuMTE0LjBKGgoOcHl0aG9uX3Zl
cnNpb24SCAoGMy4xMi45Si4KCGNyZXdfa2V5EiIKIDA3YTcxNzY4Y2M0YzkzZWFiM2IzMWUzYzhk
MjgzMmM2SjEKB2NyZXdfaWQSJgokY2UyMGFlNWYtZmMyNy00YWJhLWExYWMtNzUwY2ZhZmMwMTE4
ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShEKC2NyZXdfbWVtb3J5EgIQAEoaChRjcmV3
X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIYAUo6ChBjcmV3
X2ZpbmdlcnByaW50EiYKJDQ4NGFmZDhjLTczMmEtNGM1Ni1hZjk2LTU2MzkwMjNmYjhjOUo7Chtj
cmV3X2ZpbmdlcnByaW50X2NyZWF0ZWRfYXQSHAoaMjAyNS0wNC0xMlQxNzoyNzoxNS42NzMyMjNK
0AIKC2NyZXdfYWdlbnRzEsACCr0CW3sia2V5IjogIjAyZGYxM2UzNjcxMmFiZjUxZDIzOGZlZWJh
YjFjYTI2IiwgImlkIjogImYyYjZkYTU1LTNiMGItNDZiNy05Mzk5LWE5NDJmYjQ4YzU2OSIsICJy
b2xlIjogIlJlc2VhcmNoZXIiLCAidmVyYm9zZT8iOiB0cnVlLCAibWF4X2l0ZXIiOiAyNSwgIm1h
eF9ycG0iOiBudWxsLCAiZnVuY3Rpb25fY2FsbGluZ19sbG0iOiAiIiwgImxsbSI6ICJncHQtNG8t
bWluaSIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJhbGxvd19jb2RlX2V4ZWN1dGlv
bj8iOiBmYWxzZSwgIm1heF9yZXRyeV9saW1pdCI6IDIsICJ0b29sc19uYW1lcyI6IFtdfV1K/wEK
CmNyZXdfdGFza3MS8AEK7QFbeyJrZXkiOiAiN2I0MmRmM2MzYzc0YzIxYzg5NDgwZTBjMDcwNTM4
NWYiLCAiaWQiOiAiYmE1MjFjNDgtYzcwNS00MDRlLWE5MDktMjkwZGM0NTlkOThkIiwgImFzeW5j
X2V4ZWN1dGlvbj8iOiBmYWxzZSwgImh1bWFuX2lucHV0PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6
ICJSZXNlYXJjaGVyIiwgImFnZW50X2tleSI6ICIwMmRmMTNlMzY3MTJhYmY1MWQyMzhmZWViYWIx
Y2EyNiIsICJ0b29sc19uYW1lcyI6IFtdfV16AhgBhQEAAQAAEoAEChAmCOpHN6fX3l0shQvTLjrB
EgjLTyt4A1p7wyoMVGFzayBDcmVhdGVkMAE5gN7juBSsNRhBmFfkuBSsNRhKLgoIY3Jld19rZXkS
IgogMDdhNzE3NjhjYzRjOTNlYWIzYjMxZTNjOGQyODMyYzZKMQoHY3Jld19pZBImCiRjZTIwYWU1
Zi1mYzI3LTRhYmEtYTFhYy03NTBjZmFmYzAxMThKLgoIdGFza19rZXkSIgogN2I0MmRmM2MzYzc0
YzIxYzg5NDgwZTBjMDcwNTM4NWZKMQoHdGFza19pZBImCiRiYTUyMWM0OC1jNzA1LTQwNGUtYTkw
OS0yOTBkYzQ1OWQ5OGRKOgoQY3Jld19maW5nZXJwcmludBImCiQ0ODRhZmQ4Yy03MzJhLTRjNTYt
YWY5Ni01NjM5MDIzZmI4YzlKOgoQdGFza19maW5nZXJwcmludBImCiRhMDcyNjgwNC05ZjIwLTQw
ODgtYWFmOC1iNzhkYTUyNmM3NjlKOwobdGFza19maW5nZXJwcmludF9jcmVhdGVkX2F0EhwKGjIw
MjUtMDQtMTJUMTc6Mjc6MTUuNjczMTgxSjsKEWFnZW50X2ZpbmdlcnByaW50EiYKJDNiZDE2MmNm
LWNmMWQtNGUwZi04ZmIzLTk3MDljMDkyNmM4ZHoCGAGFAQABAAA=
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '1634'
Content-Type:
- application/x-protobuf
User-Agent:
- OTel-OTLP-Exporter-Python/1.31.1
method: POST
uri: https://telemetry.crewai.com:4319/v1/traces
response:
body:
string: "\n\0"
headers:
Content-Length:
- '2'
Content-Type:
- application/x-protobuf
Date:
- Sat, 12 Apr 2025 20:27:16 GMT
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "You are Researcher. You are
a researcher at a leading tech think tank.\nYour personal goal is: Search relevant
data and provide results\nTo give my best complete final answer to the task
respond using the exact following format:\n\nThought: I now can give a great
answer\nFinal Answer: Your final answer must be the great and the most complete
as possible, it must be outcome described.\n\nI MUST use these formats, my job
depends on it!"}, {"role": "user", "content": "\nCurrent Task: Perform a search
on specific topics.\n\nThis is the expected criteria for your final answer:
A list of relevant URLs based on the search query.\nyou MUST return the actual
complete content as the final answer, not a summary.\n\n# Useful context: \nExternal
memories:\n\n\nBegin! This is VERY important to you, use the tools available
and give your best Final Answer, your job depends on it!\n\nThought:"}], "model":
"gpt-4o-mini", "stop": ["\nObservation:"]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '989'
content-type:
- application/json
cookie:
- __cf_bm=nSje5Zn_Lk69BDG85XIauC2hrZjGl0pR2sel9__KWGw-1744489610-1.0.1.1-CPlAgcgTAE30uWrbi_2wiCWrbRDRWiaa.YuQMgST42DLDVg_wdNlJMDQT3Lsqk.g.BO68A66TTirWA0blQaQw.9xdBbPwKO609_ftjdwi5U;
_cfuvid=XLC52GLAWCOeWn2vI379CnSGKjPa7f.qr2vSAQ_R66M-1744489610542-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.68.2
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.68.2
x-stainless-raw-response:
- 'true'
x-stainless-read-timeout:
- '600.0'
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.9
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
content: "{\n \"id\": \"chatcmpl-BLbjXyMvmR8ctf0sqhp7F1ePskveM\",\n \"object\":
\"chat.completion\",\n \"created\": 1744489635,\n \"model\": \"gpt-4o-mini-2024-07-18\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"I now can give a great answer \\nFinal
Answer: Here is a list of relevant URLs based on the search query:\\n\\n1. **Artificial
Intelligence in Healthcare**\\n - https://www.healthit.gov/topic/scientific-initiatives/ai-healthcare\\n
\ - https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7317789/\\n - https://www.forbes.com/sites/bernardmarr/2021/10/18/the-top-5-ways-ai-is-being-used-in-healthcare/?sh=3edf5df51c9c\\n\\n2.
**Blockchain Technology in Supply Chain Management**\\n - https://www.ibm.com/blockchain/supply-chain\\n
\ - https://www.gartner.com/en/newsroom/press-releases/2021-06-23-gartner-says-three-use-cases-for-blockchain-in-supply-chain-are-scaling\\n
\ - https://www2.deloitte.com/us/en/insights/industry/retail-distribution/blockchain-in-supply-chain.html\\n\\n3.
**Renewable Energy Innovations**\\n - https://www.irena.org/publications/2020/Sep/Renewable-Power-Generation-Costs-in-2020\\n
\ - https://www.nrel.gov/docs/fy20osti/77021.pdf\\n - https://www.cnbc.com/2021/11/03/renewable-energy-could-get-its-first-taste-of-markets-in-2021.html\\n\\n4.
**7G Technology Developments**\\n - https://www.sciencedirect.com/science/article/pii/S1389128619308189\\n
\ - https://www.forbes.com/sites/bernardmarr/2021/11/01/what-is-7g-technology-a-beginners-guide-to-the-future-of-mobile-communications/?sh=51b8a7e1464a\\n
\ - https://www.ericsson.com/en/reports-and-research/reports/7g-networks-a-powerful-future-for-connected-society\\n\\n5.
**Impact of Quantum Computing on Cybersecurity**\\n - https://www.ibm.com/blogs/research/2021/09/quantum-computing-cybersecurity/\\n
\ - https://www.sciencedirect.com/science/article/pii/S0167739X21000072\\n
\ - https://www.techrepublic.com/article/how-quantum-computing-will-change-cybersecurity/\\n\\nThese
URLs should provide comprehensive information on the topics searched, providing
valuable insights and data for your research needs.\",\n \"refusal\":
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 185,\n \"completion_tokens\":
534,\n \"total_tokens\": 719,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\": {\n
\ \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
\"default\",\n \"system_fingerprint\": \"fp_80cf447eee\"\n}\n"
headers:
CF-RAY:
- 92f576a01d3b7e05-GRU
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Sat, 12 Apr 2025 20:27:24 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '8805'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '30000'
x-ratelimit-limit-tokens:
- '150000000'
x-ratelimit-remaining-requests:
- '29999'
x-ratelimit-remaining-tokens:
- '149999788'
x-ratelimit-reset-requests:
- 2ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_7c2d313d0b5997e903553a782b2afa25
http_version: HTTP/1.1
status_code: 200
version: 1

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -61,6 +61,35 @@ def crew_with_external_memory(external_memory_with_mocked_config, patch_configur
return crew
@pytest.fixture
def crew_with_external_memory_without_memory_flag(
external_memory_with_mocked_config, patch_configure_mem0
):
agent = Agent(
role="Researcher",
goal="Search relevant data and provide results",
backstory="You are a researcher at a leading tech think tank.",
tools=[],
verbose=True,
)
task = Task(
description="Perform a search on specific topics.",
expected_output="A list of relevant URLs based on the search query.",
agent=agent,
)
crew = Crew(
agents=[agent],
tasks=[task],
verbose=True,
process=Process.sequential,
external_memory=external_memory_with_mocked_config,
)
return crew
def test_external_memory_initialization(external_memory_with_mocked_config):
assert external_memory_with_mocked_config is not None
assert isinstance(external_memory_with_mocked_config, ExternalMemory)
@@ -137,7 +166,9 @@ def test_crew_external_memory_reset(mem_type, crew_with_external_memory):
@pytest.mark.parametrize("mem_method", ["search", "save"])
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_external_memory_save(mem_method, crew_with_external_memory):
def test_crew_external_memory_save_with_memory_flag(
mem_method, crew_with_external_memory
):
with patch(
f"crewai.memory.external.external_memory.ExternalMemory.{mem_method}"
) as mock_method:
@@ -145,6 +176,18 @@ def test_crew_external_memory_save(mem_method, crew_with_external_memory):
assert mock_method.call_count > 0
@pytest.mark.parametrize("mem_method", ["search", "save"])
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_external_memory_save_using_crew_without_memory_flag(
mem_method, crew_with_external_memory_without_memory_flag
):
with patch(
f"crewai.memory.external.external_memory.ExternalMemory.{mem_method}"
) as mock_method:
crew_with_external_memory_without_memory_flag.kickoff()
assert mock_method.call_count > 0
def test_external_memory_custom_storage(crew_with_external_memory):
class CustomStorage(Storage):
def __init__(self):