Unblock LLM(stream=True) to work with tools (#2582)

* feat: unblock LLM(stream=True) to work with tools

* feat: replace pytest-vcr by pytest-recording

1. pytest-vcr does not support httpx - which LiteLLM uses for streaming responses.
2. pytest-vcr is no longer maintained, last commit 6 years ago :fist::skin-tone-4:
3. pytest-recording supports modern request libraries (including httpx) and actively maintained

* refactor: remove @skip_streaming_in_ci

Since we have fixed streaming response issue we can remove this @skip_streaming_in_ci

---------

Co-authored-by: Lorenze Jay <63378463+lorenzejay@users.noreply.github.com>
This commit is contained in:
Lucas Gomide
2025-04-17 12:58:52 -03:00
committed by GitHub
parent 8e555149f7
commit ced3c8f0e0
21 changed files with 995 additions and 402 deletions

View File

@@ -0,0 +1,133 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}],
"model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage":
true}, "tools": [{"type": "function", "function": {"name": "get_weather", "description":
"Get the current weather in a given location", "parameters": {"type": "object",
"properties": {"location": {"type": "string", "description": "The city and state,
e.g. San Francisco, CA"}}, "required": ["location"]}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '470'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.74.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.74.0
x-stainless-raw-response:
- 'true'
x-stainless-read-timeout:
- '600.0'
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.12
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_bCEixqN8Y40SUyius8ZfVErH","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"New"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
York"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
NY"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":68,"completion_tokens":18,"total_tokens":86,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
data: [DONE]
'
headers:
CF-RAY:
- 93147723ecc1f237-GRU
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Wed, 16 Apr 2025 14:45:16 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '620'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999989'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_0a08d5f042ef769aeb2c941e398f65f4
status:
code: 200
message: OK
version: 1

View File

@@ -0,0 +1,133 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}],
"model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage":
true}, "tools": [{"type": "function", "function": {"name": "get_weather", "description":
"Get the current weather in a given location", "parameters": {"type": "object",
"properties": {"location": {"type": "string", "description": "The city and state,
e.g. San Francisco, CA"}}, "required": ["location"]}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '470'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.74.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.74.0
x-stainless-raw-response:
- 'true'
x-stainless-read-timeout:
- '600.0'
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.12
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_ccog5nyDCLYpoWzksuCkGEqY","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"New"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
York"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
NY"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":68,"completion_tokens":18,"total_tokens":86,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
data: [DONE]
'
headers:
CF-RAY:
- 931461bbddc27df9-GRU
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Wed, 16 Apr 2025 14:30:39 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '445'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999989'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_1bb5862de2891623d44c012aba597c5e
status:
code: 200
message: OK
version: 1

View File

@@ -0,0 +1,279 @@
interactions:
- request:
body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}],
"model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage":
true}}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '169'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.74.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.74.0
x-stainless-raw-response:
- 'true'
x-stainless-read-timeout:
- '600.0'
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.12
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"I''m"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
unable"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
provide"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
real"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"-time"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
information"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
or"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
current"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
updates"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
For"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
latest"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
information"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
in"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
New"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
York"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
recommend"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
checking"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
reliable"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
website"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
or"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
app"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
such"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
as"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
National"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
Weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
Service"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
Weather"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":".com"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
or"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
similar"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
service"},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null}
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":15,"completion_tokens":47,"total_tokens":62,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
data: [DONE]
'
headers:
CF-RAY:
- 931461c25bb47df9-GRU
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Wed, 16 Apr 2025 14:30:40 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
cf-cache-status:
- DYNAMIC
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '298'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999989'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_89971fd68e5a59c9fa50e04106228b0a
status:
code: 200
message: OK
version: 1

View File

@@ -8,6 +8,7 @@ from dotenv import load_dotenv
load_result = load_dotenv(override=True)
@pytest.fixture(autouse=True)
def setup_test_environment():
"""Set up test environment with a temporary directory for SQLite storage."""
@@ -15,11 +16,13 @@ def setup_test_environment():
# Create the directory with proper permissions
storage_dir = Path(temp_dir) / "crewai_test_storage"
storage_dir.mkdir(parents=True, exist_ok=True)
# Validate that the directory was created successfully
if not storage_dir.exists() or not storage_dir.is_dir():
raise RuntimeError(f"Failed to create test storage directory: {storage_dir}")
raise RuntimeError(
f"Failed to create test storage directory: {storage_dir}"
)
# Verify directory permissions
try:
# Try to create a test file to verify write permissions
@@ -27,11 +30,20 @@ def setup_test_environment():
test_file.touch()
test_file.unlink()
except (OSError, IOError) as e:
raise RuntimeError(f"Test storage directory {storage_dir} is not writable: {e}")
raise RuntimeError(
f"Test storage directory {storage_dir} is not writable: {e}"
)
# Set environment variable to point to the test storage directory
os.environ["CREWAI_STORAGE_DIR"] = str(storage_dir)
yield
# Cleanup is handled automatically when tempfile context exits
@pytest.fixture(scope="module")
def vcr_config(request) -> dict:
return {
"cassette_library_dir": "tests/cassettes",
}

View File

@@ -42,11 +42,6 @@ from crewai.utilities.events.event_listener import EventListener
from crewai.utilities.rpm_controller import RPMController
from crewai.utilities.task_output_storage_handler import TaskOutputStorageHandler
# Skip streaming tests when running in CI/CD environments
skip_streaming_in_ci = pytest.mark.skipif(
os.getenv("CI") is not None, reason="Skipping streaming tests in CI/CD environments"
)
ceo = Agent(
role="CEO",
goal="Make sure the writers in your company produce amazing content.",
@@ -963,7 +958,6 @@ def test_api_calls_throttling(capsys):
moveon.assert_called()
@skip_streaming_in_ci
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_kickoff_usage_metrics():
inputs = [
@@ -999,7 +993,6 @@ def test_crew_kickoff_usage_metrics():
assert result.token_usage.cached_prompt_tokens == 0
@skip_streaming_in_ci
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_kickoff_streaming_usage_metrics():
inputs = [
@@ -4256,53 +4249,93 @@ def test_crew_kickoff_for_each_works_with_manager_agent_copy():
assert crew_copy.manager_agent.role == crew.manager_agent.role
assert crew_copy.manager_agent.goal == crew.manager_agent.goal
def test_crew_copy_with_memory():
"""Test that copying a crew with memory enabled does not raise validation errors and copies memory correctly."""
agent = Agent(role="Test Agent", goal="Test Goal", backstory="Test Backstory")
task = Task(description="Test Task", expected_output="Test Output", agent=agent)
crew = Crew(agents=[agent], tasks=[task], memory=True)
original_short_term_id = id(crew._short_term_memory) if crew._short_term_memory else None
original_long_term_id = id(crew._long_term_memory) if crew._long_term_memory else None
original_short_term_id = (
id(crew._short_term_memory) if crew._short_term_memory else None
)
original_long_term_id = (
id(crew._long_term_memory) if crew._long_term_memory else None
)
original_entity_id = id(crew._entity_memory) if crew._entity_memory else None
original_external_id = id(crew._external_memory) if crew._external_memory else None
original_user_id = id(crew._user_memory) if crew._user_memory else None
try:
crew_copy = crew.copy()
assert hasattr(crew_copy, "_short_term_memory"), "Copied crew should have _short_term_memory"
assert crew_copy._short_term_memory is not None, "Copied _short_term_memory should not be None"
assert id(crew_copy._short_term_memory) != original_short_term_id, "Copied _short_term_memory should be a new object"
assert hasattr(
crew_copy, "_short_term_memory"
), "Copied crew should have _short_term_memory"
assert (
crew_copy._short_term_memory is not None
), "Copied _short_term_memory should not be None"
assert (
id(crew_copy._short_term_memory) != original_short_term_id
), "Copied _short_term_memory should be a new object"
assert hasattr(crew_copy, "_long_term_memory"), "Copied crew should have _long_term_memory"
assert crew_copy._long_term_memory is not None, "Copied _long_term_memory should not be None"
assert id(crew_copy._long_term_memory) != original_long_term_id, "Copied _long_term_memory should be a new object"
assert hasattr(
crew_copy, "_long_term_memory"
), "Copied crew should have _long_term_memory"
assert (
crew_copy._long_term_memory is not None
), "Copied _long_term_memory should not be None"
assert (
id(crew_copy._long_term_memory) != original_long_term_id
), "Copied _long_term_memory should be a new object"
assert hasattr(crew_copy, "_entity_memory"), "Copied crew should have _entity_memory"
assert crew_copy._entity_memory is not None, "Copied _entity_memory should not be None"
assert id(crew_copy._entity_memory) != original_entity_id, "Copied _entity_memory should be a new object"
assert hasattr(
crew_copy, "_entity_memory"
), "Copied crew should have _entity_memory"
assert (
crew_copy._entity_memory is not None
), "Copied _entity_memory should not be None"
assert (
id(crew_copy._entity_memory) != original_entity_id
), "Copied _entity_memory should be a new object"
if original_external_id:
assert hasattr(crew_copy, "_external_memory"), "Copied crew should have _external_memory"
assert crew_copy._external_memory is not None, "Copied _external_memory should not be None"
assert id(crew_copy._external_memory) != original_external_id, "Copied _external_memory should be a new object"
assert hasattr(
crew_copy, "_external_memory"
), "Copied crew should have _external_memory"
assert (
crew_copy._external_memory is not None
), "Copied _external_memory should not be None"
assert (
id(crew_copy._external_memory) != original_external_id
), "Copied _external_memory should be a new object"
else:
assert not hasattr(crew_copy, "_external_memory") or crew_copy._external_memory is None, "Copied _external_memory should be None if not originally present"
assert (
not hasattr(crew_copy, "_external_memory")
or crew_copy._external_memory is None
), "Copied _external_memory should be None if not originally present"
if original_user_id:
assert hasattr(crew_copy, "_user_memory"), "Copied crew should have _user_memory"
assert crew_copy._user_memory is not None, "Copied _user_memory should not be None"
assert id(crew_copy._user_memory) != original_user_id, "Copied _user_memory should be a new object"
assert hasattr(
crew_copy, "_user_memory"
), "Copied crew should have _user_memory"
assert (
crew_copy._user_memory is not None
), "Copied _user_memory should not be None"
assert (
id(crew_copy._user_memory) != original_user_id
), "Copied _user_memory should be a new object"
else:
assert not hasattr(crew_copy, "_user_memory") or crew_copy._user_memory is None, "Copied _user_memory should be None if not originally present"
assert (
not hasattr(crew_copy, "_user_memory") or crew_copy._user_memory is None
), "Copied _user_memory should be None if not originally present"
except pydantic_core.ValidationError as e:
if "Input should be an instance of" in str(e) and ("Memory" in str(e)):
pytest.fail(f"Copying with memory raised Pydantic ValidationError, likely due to incorrect memory copy: {e}")
else:
raise e # Re-raise other validation errors
if "Input should be an instance of" in str(e) and ("Memory" in str(e)):
pytest.fail(
f"Copying with memory raised Pydantic ValidationError, likely due to incorrect memory copy: {e}"
)
else:
raise e # Re-raise other validation errors
except Exception as e:
pytest.fail(f"Copying crew raised an unexpected exception: {e}")

View File

@@ -2,13 +2,16 @@ import os
from time import sleep
from unittest.mock import MagicMock, patch
import litellm
import pytest
from pydantic import BaseModel
from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess
from crewai.llm import CONTEXT_WINDOW_USAGE_RATIO, LLM
from crewai.utilities.events import crewai_event_bus
from crewai.utilities.events.tool_usage_events import ToolExecutionErrorEvent
from crewai.utilities.events import (
LLMCallCompletedEvent,
LLMStreamChunkEvent,
)
from crewai.utilities.token_counter_callback import TokenCalcHandler
@@ -304,6 +307,27 @@ def test_context_window_validation():
assert "must be between 1024 and 2097152" in str(excinfo.value)
@pytest.fixture
def get_weather_tool_schema():
return {
"type": "function",
"function": {
"name": "get_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
}
},
"required": ["location"],
},
},
}
@pytest.mark.vcr(filter_headers=["authorization"])
@pytest.fixture
def anthropic_llm():
@@ -395,3 +419,117 @@ def test_deepseek_r1_with_open_router():
result = llm.call("What is the capital of France?")
assert isinstance(result, str)
assert "Paris" in result
def assert_event_count(
mock_emit,
expected_completed_tool_call: int = 0,
expected_stream_chunk: int = 0,
expected_completed_llm_call: int = 0,
expected_final_chunk_result: str = "",
):
event_count = {
"completed_tool_call": 0,
"stream_chunk": 0,
"completed_llm_call": 0,
}
final_chunk_result = ""
for _call in mock_emit.call_args_list:
event = _call[1]["event"]
if (
isinstance(event, LLMCallCompletedEvent)
and event.call_type.value == "tool_call"
):
event_count["completed_tool_call"] += 1
elif isinstance(event, LLMStreamChunkEvent):
event_count["stream_chunk"] += 1
final_chunk_result += event.chunk
elif (
isinstance(event, LLMCallCompletedEvent)
and event.call_type.value == "llm_call"
):
event_count["completed_llm_call"] += 1
else:
continue
assert event_count["completed_tool_call"] == expected_completed_tool_call
assert event_count["stream_chunk"] == expected_stream_chunk
assert event_count["completed_llm_call"] == expected_completed_llm_call
assert final_chunk_result == expected_final_chunk_result
@pytest.fixture
def mock_emit() -> MagicMock:
from crewai.utilities.events.crewai_event_bus import CrewAIEventsBus
with patch.object(CrewAIEventsBus, "emit") as mock_emit:
yield mock_emit
@pytest.mark.vcr(filter_headers=["authorization"])
def test_handle_streaming_tool_calls(get_weather_tool_schema, mock_emit):
llm = LLM(model="openai/gpt-4o", stream=True)
response = llm.call(
messages=[
{"role": "user", "content": "What is the weather in New York?"},
],
tools=[get_weather_tool_schema],
available_functions={
"get_weather": lambda location: f"The weather in {location} is sunny"
},
)
assert response == "The weather in New York, NY is sunny"
expected_final_chunk_result = (
'{"location":"New York, NY"}The weather in New York, NY is sunny'
)
assert_event_count(
mock_emit=mock_emit,
expected_completed_tool_call=1,
expected_stream_chunk=10,
expected_completed_llm_call=1,
expected_final_chunk_result=expected_final_chunk_result,
)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_handle_streaming_tool_calls_no_available_functions(
get_weather_tool_schema, mock_emit
):
llm = LLM(model="openai/gpt-4o", stream=True)
response = llm.call(
messages=[
{"role": "user", "content": "What is the weather in New York?"},
],
tools=[get_weather_tool_schema],
)
assert response == ""
assert_event_count(
mock_emit=mock_emit,
expected_stream_chunk=9,
expected_completed_llm_call=1,
expected_final_chunk_result='{"location":"New York, NY"}',
)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_handle_streaming_tool_calls_no_tools(mock_emit):
llm = LLM(model="openai/gpt-4o", stream=True)
response = llm.call(
messages=[
{"role": "user", "content": "What is the weather in New York?"},
],
)
assert (
response
== "I'm unable to provide real-time information or current weather updates. For the latest weather information in New York, I recommend checking a reliable weather website or app, such as the National Weather Service, Weather.com, or a similar service."
)
assert_event_count(
mock_emit=mock_emit,
expected_stream_chunk=46,
expected_completed_llm_call=1,
expected_final_chunk_result=response,
)

View File

@@ -1,270 +0,0 @@
interactions:
- request:
body: ''
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate
connection:
- keep-alive
host:
- api.mem0.ai
user-agent:
- python-httpx/0.27.0
method: GET
uri: https://api.mem0.ai/v1/memories/?user_id=test
response:
body:
string: '[]'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8b477138bad847b9-BOM
Connection:
- keep-alive
Content-Length:
- '2'
Content-Type:
- application/json
Date:
- Sat, 17 Aug 2024 06:00:11 GMT
NEL:
- '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}'
Report-To:
- '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=uuyH2foMJVDpV%2FH52g1q%2FnvXKe3dBKVzvsK0mqmSNezkiszNR9OgrEJfVqmkX%2FlPFRP2sH4zrOuzGo6k%2FjzsjYJczqSWJUZHN2pPujiwnr1E9W%2BdLGKmG6%2FqPrGYAy2SBRWkkJVWsTO3OQ%3D%3D"}],"group":"cf-nel","max_age":604800}'
Server:
- cloudflare
allow:
- GET, POST, DELETE, OPTIONS
alt-svc:
- h3=":443"; ma=86400
cross-origin-opener-policy:
- same-origin
referrer-policy:
- same-origin
vary:
- Accept, origin, Cookie
x-content-type-options:
- nosniff
x-frame-options:
- DENY
status:
code: 200
message: OK
- request:
body: '{"batch": [{"properties": {"python_version": "3.12.4 (v3.12.4:8e8a4baf65,
Jun 6 2024, 17:33:18) [Clang 13.0.0 (clang-1300.0.29.30)]", "os": "darwin",
"os_version": "Darwin Kernel Version 23.4.0: Wed Feb 21 21:44:54 PST 2024; root:xnu-10063.101.15~2/RELEASE_ARM64_T6030",
"os_release": "23.4.0", "processor": "arm", "machine": "arm64", "function":
"mem0.client.main.MemoryClient", "$lib": "posthog-python", "$lib_version": "3.5.0",
"$geoip_disable": true}, "timestamp": "2024-08-17T06:00:11.526640+00:00", "context":
{}, "distinct_id": "fd411bd3-99a2-42d6-acd7-9fca8ad09580", "event": "client.init"}],
"historical_migration": false, "sentAt": "2024-08-17T06:00:11.701621+00:00",
"api_key": "phc_hgJkUVJFYtmaJqrvf6CYN67TIQ8yhXAkWzUn9AMU4yX"}'
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '740'
Content-Type:
- application/json
User-Agent:
- posthog-python/3.5.0
method: POST
uri: https://us.i.posthog.com/batch/
response:
body:
string: '{"status":"Ok"}'
headers:
Connection:
- keep-alive
Content-Length:
- '15'
Content-Type:
- application/json
Date:
- Sat, 17 Aug 2024 06:00:12 GMT
access-control-allow-credentials:
- 'true'
server:
- envoy
vary:
- origin, access-control-request-method, access-control-request-headers
x-envoy-upstream-service-time:
- '69'
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "Remember the following insights
from Agent run: test value with provider"}], "metadata": {"task": "test_task_provider",
"agent": "test_agent_provider"}, "app_id": "Researcher"}'
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '219'
content-type:
- application/json
host:
- api.mem0.ai
user-agent:
- python-httpx/0.27.0
method: POST
uri: https://api.mem0.ai/v1/memories/
response:
body:
string: '{"message":"ok"}'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8b477140282547b9-BOM
Connection:
- keep-alive
Content-Length:
- '16'
Content-Type:
- application/json
Date:
- Sat, 17 Aug 2024 06:00:13 GMT
NEL:
- '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}'
Report-To:
- '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=FRjJKSk3YxVj03wA7S05H8ts35KnWfqS3wb6Rfy4kVZ4BgXfw7nJbm92wI6vEv5fWcAcHVnOlkJDggs11B01BMuB2k3a9RqlBi0dJNiMuk%2Bgm5xE%2BODMPWJctYNRwQMjNVbteUpS%2Fad8YA%3D%3D"}],"group":"cf-nel","max_age":604800}'
Server:
- cloudflare
allow:
- GET, POST, DELETE, OPTIONS
alt-svc:
- h3=":443"; ma=86400
cross-origin-opener-policy:
- same-origin
referrer-policy:
- same-origin
vary:
- Accept, origin, Cookie
x-content-type-options:
- nosniff
x-frame-options:
- DENY
status:
code: 200
message: OK
- request:
body: '{"query": "test value with provider", "limit": 3, "app_id": "Researcher"}'
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '73'
content-type:
- application/json
host:
- api.mem0.ai
user-agent:
- python-httpx/0.27.0
method: POST
uri: https://api.mem0.ai/v1/memories/search/
response:
body:
string: '[]'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8b47714d083b47b9-BOM
Connection:
- keep-alive
Content-Length:
- '2'
Content-Type:
- application/json
Date:
- Sat, 17 Aug 2024 06:00:14 GMT
NEL:
- '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}'
Report-To:
- '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=2DRWL1cdKdMvnE8vx1fPUGeTITOgSGl3N5g84PS6w30GRqpfz79BtSx6REhpnOiFV8kM6KGqln0iCZ5yoHc2jBVVJXhPJhQ5t0uerD9JFnkphjISrJOU1MJjZWneT9PlNABddxvVNCmluA%3D%3D"}],"group":"cf-nel","max_age":604800}'
Server:
- cloudflare
allow:
- POST, OPTIONS
alt-svc:
- h3=":443"; ma=86400
cross-origin-opener-policy:
- same-origin
referrer-policy:
- same-origin
vary:
- Accept, origin, Cookie
x-content-type-options:
- nosniff
x-frame-options:
- DENY
status:
code: 200
message: OK
- request:
body: '{"batch": [{"properties": {"python_version": "3.12.4 (v3.12.4:8e8a4baf65,
Jun 6 2024, 17:33:18) [Clang 13.0.0 (clang-1300.0.29.30)]", "os": "darwin",
"os_version": "Darwin Kernel Version 23.4.0: Wed Feb 21 21:44:54 PST 2024; root:xnu-10063.101.15~2/RELEASE_ARM64_T6030",
"os_release": "23.4.0", "processor": "arm", "machine": "arm64", "function":
"mem0.client.main.MemoryClient", "$lib": "posthog-python", "$lib_version": "3.5.0",
"$geoip_disable": true}, "timestamp": "2024-08-17T06:00:13.593952+00:00", "context":
{}, "distinct_id": "fd411bd3-99a2-42d6-acd7-9fca8ad09580", "event": "client.add"}],
"historical_migration": false, "sentAt": "2024-08-17T06:00:13.858277+00:00",
"api_key": "phc_hgJkUVJFYtmaJqrvf6CYN67TIQ8yhXAkWzUn9AMU4yX"}'
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '739'
Content-Type:
- application/json
User-Agent:
- posthog-python/3.5.0
method: POST
uri: https://us.i.posthog.com/batch/
response:
body:
string: '{"status":"Ok"}'
headers:
Connection:
- keep-alive
Content-Length:
- '15'
Content-Type:
- application/json
Date:
- Sat, 17 Aug 2024 06:00:13 GMT
access-control-allow-credentials:
- 'true'
server:
- envoy
vary:
- origin, access-control-request-method, access-control-request-headers
x-envoy-upstream-service-time:
- '33'
status:
code: 200
message: OK
version: 1

View File

@@ -16,6 +16,13 @@ delegate_tool = tools[0]
ask_tool = tools[1]
@pytest.fixture(scope="module")
def vcr_config(request) -> dict:
return {
"cassette_library_dir": "tests/tools/agent_tools/cassettes",
}
@pytest.mark.vcr(filter_headers=["authorization"])
def test_delegate_work():
result = delegate_tool.run(

View File

@@ -21,6 +21,13 @@ from crewai.utilities.converter import (
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
@pytest.fixture(scope="module")
def vcr_config(request) -> dict:
return {
"cassette_library_dir": "tests/utilities/cassettes",
}
# Sample Pydantic models for testing
class EmailResponse(BaseModel):
previous_message_content: str

View File

@@ -50,10 +50,13 @@ from crewai.utilities.events.tool_usage_events import (
ToolUsageErrorEvent,
)
# Skip streaming tests when running in CI/CD environments
skip_streaming_in_ci = pytest.mark.skipif(
os.getenv("CI") is not None, reason="Skipping streaming tests in CI/CD environments"
)
@pytest.fixture(scope="module")
def vcr_config(request) -> dict:
return {
"cassette_library_dir": "tests/utilities/cassettes",
}
base_agent = Agent(
role="base_agent",
@@ -625,7 +628,6 @@ def test_llm_emits_call_failed_event():
assert received_events[0].error == error_message
@skip_streaming_in_ci
@pytest.mark.vcr(filter_headers=["authorization"])
def test_llm_emits_stream_chunk_events():
"""Test that LLM emits stream chunk events when streaming is enabled."""
@@ -650,7 +652,6 @@ def test_llm_emits_stream_chunk_events():
assert "".join(received_chunks) == response
@skip_streaming_in_ci
@pytest.mark.vcr(filter_headers=["authorization"])
def test_llm_no_stream_chunks_when_streaming_disabled():
"""Test that LLM doesn't emit stream chunk events when streaming is disabled."""