mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-27 00:58:13 +00:00
adjust tests
This commit is contained in:
@@ -18,6 +18,7 @@ from crewai.tools.tool_calling import InstructorToolCalling
|
|||||||
from crewai.tools.tool_usage import ToolUsage
|
from crewai.tools.tool_usage import ToolUsage
|
||||||
from crewai.utilities import RPMController
|
from crewai.utilities import RPMController
|
||||||
from crewai.utilities.events import crewai_event_bus
|
from crewai.utilities.events import crewai_event_bus
|
||||||
|
from crewai.utilities.events.llm_events import LLMStreamChunkEvent
|
||||||
from crewai.utilities.events.tool_usage_events import ToolUsageFinishedEvent
|
from crewai.utilities.events.tool_usage_events import ToolUsageFinishedEvent
|
||||||
|
|
||||||
|
|
||||||
@@ -259,9 +260,7 @@ def test_cache_hitting():
|
|||||||
def handle_tool_end(source, event):
|
def handle_tool_end(source, event):
|
||||||
received_events.append(event)
|
received_events.append(event)
|
||||||
|
|
||||||
with (
|
with (patch.object(CacheHandler, "read") as read,):
|
||||||
patch.object(CacheHandler, "read") as read,
|
|
||||||
):
|
|
||||||
read.return_value = "0"
|
read.return_value = "0"
|
||||||
task = Task(
|
task = Task(
|
||||||
description="What is 2 times 6? Ignore correctness and just return the result of the multiplication tool, you must use the tool.",
|
description="What is 2 times 6? Ignore correctness and just return the result of the multiplication tool, you must use the tool.",
|
||||||
@@ -1798,3 +1797,66 @@ def test_litellm_anthropic_error_handling():
|
|||||||
|
|
||||||
# Verify the LLM call was only made once (no retries)
|
# Verify the LLM call was only made once (no retries)
|
||||||
mock_llm_call.assert_called_once()
|
mock_llm_call.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_agent_streaming_with_tool_calling():
|
||||||
|
"""Test that streaming works correctly with tool calling."""
|
||||||
|
received_chunks = []
|
||||||
|
tool_called = False
|
||||||
|
|
||||||
|
# Set up event listener to capture streaming chunks
|
||||||
|
with crewai_event_bus.scoped_handlers():
|
||||||
|
|
||||||
|
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||||
|
def handle_stream_chunk(source, event):
|
||||||
|
received_chunks.append(event.chunk)
|
||||||
|
|
||||||
|
# Define a tool that will be called
|
||||||
|
@tool
|
||||||
|
def calculator(expression: str) -> str:
|
||||||
|
"""Calculate the result of a mathematical expression."""
|
||||||
|
nonlocal tool_called
|
||||||
|
tool_called = True
|
||||||
|
try:
|
||||||
|
result = eval(expression)
|
||||||
|
return f"The result of {expression} is {result}"
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error calculating {expression}: {str(e)}"
|
||||||
|
|
||||||
|
# Create an agent with streaming enabled
|
||||||
|
agent = Agent(
|
||||||
|
role="Math Assistant",
|
||||||
|
goal="Help users with mathematical calculations",
|
||||||
|
backstory="I am an AI assistant specialized in mathematics.",
|
||||||
|
verbose=True,
|
||||||
|
llm=LLM(model="gpt-4o", stream=True), # Enable streaming
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a task that will require tool calling
|
||||||
|
task = Task(
|
||||||
|
description="Calculate 23 * 45 using the calculator tool.",
|
||||||
|
agent=agent,
|
||||||
|
expected_output="The result of the calculation.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Execute the task with the calculator tool
|
||||||
|
output = agent.execute_task(task=task, tools=[calculator])
|
||||||
|
|
||||||
|
# Verify that streaming chunks were received
|
||||||
|
assert len(received_chunks) > 0, "No streaming chunks were received"
|
||||||
|
|
||||||
|
# Verify that the tool was called
|
||||||
|
assert tool_called, "Calculator tool was not called"
|
||||||
|
|
||||||
|
# Verify that the output contains the correct result
|
||||||
|
assert "1035" in output, f"Output does not contain the correct result: {output}"
|
||||||
|
|
||||||
|
# Verify that the streaming chunks form a coherent response
|
||||||
|
combined_chunks = "".join(received_chunks)
|
||||||
|
assert len(combined_chunks) > 0, "Combined chunks are empty"
|
||||||
|
|
||||||
|
# Log for debugging
|
||||||
|
print(f"Received {len(received_chunks)} chunks")
|
||||||
|
print(f"First few chunks: {received_chunks[:3]}")
|
||||||
|
print(f"Output: {output}")
|
||||||
|
|||||||
420
tests/cassettes/test_agent_streaming_with_tool_calling.yaml
Normal file
420
tests/cassettes/test_agent_streaming_with_tool_calling.yaml
Normal file
@@ -0,0 +1,420 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"role": "system", "content": "You are Math Assistant. I
|
||||||
|
am an AI assistant specialized in mathematics.\nYour personal goal is: Help
|
||||||
|
users with mathematical calculations\nYou ONLY have access to the following
|
||||||
|
tools, and should NEVER make up tools that are not listed here:\n\nTool Name:
|
||||||
|
calculator\nTool Arguments: {''expression'': {''description'': None, ''type'':
|
||||||
|
''str''}}\nTool Description: Calculate the result of a mathematical expression.\n\nIMPORTANT:
|
||||||
|
Use the following format in your response:\n\n```\nThought: you should always
|
||||||
|
think about what to do\nAction: the action to take, only one name of [calculator],
|
||||||
|
just the name, exactly as it''s written.\nAction Input: the input to the action,
|
||||||
|
just a simple JSON object, enclosed in curly braces, using \" to wrap keys and
|
||||||
|
values.\nObservation: the result of the action\n```\n\nOnce all necessary information
|
||||||
|
is gathered, return the following format:\n\n```\nThought: I now know the final
|
||||||
|
answer\nFinal Answer: the final answer to the original input question\n```"},
|
||||||
|
{"role": "user", "content": "\nCurrent Task: Calculate 23 * 45 using the calculator
|
||||||
|
tool.\n\nThis is the expected criteria for your final answer: The result of
|
||||||
|
the calculation.\nyou MUST return the actual complete content as the final answer,
|
||||||
|
not a summary.\n\nBegin! This is VERY important to you, use the tools available
|
||||||
|
and give your best Final Answer, your job depends on it!\n\nThought:"}], "model":
|
||||||
|
"gpt-4o", "stop": ["\nObservation:"], "stream": true}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, zstd
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '1488'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
cookie:
|
||||||
|
- _cfuvid=xecEkmr_qTiKn7EKC7aeGN5bpsbPM9ofyIsipL4VCYM-1734033219265-0.0.1.1-604800000
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.65.1
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.65.1
|
||||||
|
x-stainless-raw-response:
|
||||||
|
- 'true'
|
||||||
|
x-stainless-read-timeout:
|
||||||
|
- '600.0'
|
||||||
|
x-stainless-retry-count:
|
||||||
|
- '0'
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.12.8
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
need"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
to"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
multiply"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
the"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
numbers"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"23"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
and"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"45"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
to"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
find"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
the"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
result"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
calculator"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Input"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
{\""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"expression"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"\":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
\""},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"23"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
*"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"45"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"\"}"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKiYUn06lpARj6daDhhdznCpDGu","object":"chat.completion.chunk","created":1741120752,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 91b42e7a3a8cdd24-ATL
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Tue, 04 Mar 2025 20:39:12 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Set-Cookie:
|
||||||
|
- __cf_bm=U9sEwgKRGGwBFmR3cSvnCyPI_mv0PJ_LwaX4QGIl9d4-1741120752-1.0.1.1-pu1RGWDKEtePCn25B7Grmk8ZBGtG6IU93jtF7Tb.PnV25f4F6KkELvq9dq2d.Zrj8IzHmbYdUKA.fH0S3DmzjOUcPY3nMV.lL87ajtmnZ9Q;
|
||||||
|
path=/; expires=Tue, 04-Mar-25 21:09:12 GMT; domain=.api.openai.com; HttpOnly;
|
||||||
|
Secure; SameSite=None
|
||||||
|
- _cfuvid=VCFvsyb9HY4ITGtseTxBGiADWlhpqt0w9jrNxOrz9WU-1741120752688-0.0.1.1-604800000;
|
||||||
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
X-Content-Type-Options:
|
||||||
|
- nosniff
|
||||||
|
access-control-expose-headers:
|
||||||
|
- X-Request-ID
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '341'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains; preload
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '30000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '29999654'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_3d5518f5f530457596ce4da2038100a6
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"role": "system", "content": "You are Math Assistant. I
|
||||||
|
am an AI assistant specialized in mathematics.\nYour personal goal is: Help
|
||||||
|
users with mathematical calculations\nYou ONLY have access to the following
|
||||||
|
tools, and should NEVER make up tools that are not listed here:\n\nTool Name:
|
||||||
|
calculator\nTool Arguments: {''expression'': {''description'': None, ''type'':
|
||||||
|
''str''}}\nTool Description: Calculate the result of a mathematical expression.\n\nIMPORTANT:
|
||||||
|
Use the following format in your response:\n\n```\nThought: you should always
|
||||||
|
think about what to do\nAction: the action to take, only one name of [calculator],
|
||||||
|
just the name, exactly as it''s written.\nAction Input: the input to the action,
|
||||||
|
just a simple JSON object, enclosed in curly braces, using \" to wrap keys and
|
||||||
|
values.\nObservation: the result of the action\n```\n\nOnce all necessary information
|
||||||
|
is gathered, return the following format:\n\n```\nThought: I now know the final
|
||||||
|
answer\nFinal Answer: the final answer to the original input question\n```"},
|
||||||
|
{"role": "user", "content": "\nCurrent Task: Calculate 23 * 45 using the calculator
|
||||||
|
tool.\n\nThis is the expected criteria for your final answer: The result of
|
||||||
|
the calculation.\nyou MUST return the actual complete content as the final answer,
|
||||||
|
not a summary.\n\nBegin! This is VERY important to you, use the tools available
|
||||||
|
and give your best Final Answer, your job depends on it!\n\nThought:"}, {"role":
|
||||||
|
"assistant", "content": "Thought: I need to multiply the numbers 23 and 45 to
|
||||||
|
find the result.\n\nAction: calculator\nAction Input: {\"expression\": \"23
|
||||||
|
* 45\"}\nObservation: The result of 23 * 45 is 1035"}], "model": "gpt-4o", "stop":
|
||||||
|
["\nObservation:"], "stream": true}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, zstd
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '1706'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
cookie:
|
||||||
|
- _cfuvid=VCFvsyb9HY4ITGtseTxBGiADWlhpqt0w9jrNxOrz9WU-1741120752688-0.0.1.1-604800000;
|
||||||
|
__cf_bm=U9sEwgKRGGwBFmR3cSvnCyPI_mv0PJ_LwaX4QGIl9d4-1741120752-1.0.1.1-pu1RGWDKEtePCn25B7Grmk8ZBGtG6IU93jtF7Tb.PnV25f4F6KkELvq9dq2d.Zrj8IzHmbYdUKA.fH0S3DmzjOUcPY3nMV.lL87ajtmnZ9Q
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.65.1
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.65.1
|
||||||
|
x-stainless-raw-response:
|
||||||
|
- 'true'
|
||||||
|
x-stainless-read-timeout:
|
||||||
|
- '600.0'
|
||||||
|
x-stainless-retry-count:
|
||||||
|
- '0'
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.12.8
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
now"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
know"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
the"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"
|
||||||
|
"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"103"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-B7TKjNZ9dx1zBap5Gvu9XaUlHID73","object":"chat.completion.chunk","created":1741120753,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_fc9f1d7035","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-Cache-Status:
|
||||||
|
- DYNAMIC
|
||||||
|
CF-RAY:
|
||||||
|
- 91b42e840ccbdd24-ATL
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Tue, 04 Mar 2025 20:39:13 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
X-Content-Type-Options:
|
||||||
|
- nosniff
|
||||||
|
access-control-expose-headers:
|
||||||
|
- X-Request-ID
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '234'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains; preload
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '30000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '29999609'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_adcd6994207d958e980ae9c2460361f8
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
@@ -1,144 +0,0 @@
|
|||||||
interactions:
|
|
||||||
- request:
|
|
||||||
body: '{"messages": [{"role": "user", "content": "Process this text with the sample
|
|
||||||
tool: ''Hello, world!''"}], "model": "gpt-4", "stop": [], "stream": true, "tools":
|
|
||||||
[{"type": "function", "function": {"name": "sample_tool", "description": "A
|
|
||||||
sample tool that processes text", "parameters": {"type": "object", "properties":
|
|
||||||
{"text": {"type": "string", "description": "The text to process"}}, "required":
|
|
||||||
["text"]}}}]}'
|
|
||||||
headers:
|
|
||||||
accept:
|
|
||||||
- application/json
|
|
||||||
accept-encoding:
|
|
||||||
- gzip, deflate, zstd
|
|
||||||
connection:
|
|
||||||
- keep-alive
|
|
||||||
content-length:
|
|
||||||
- '408'
|
|
||||||
content-type:
|
|
||||||
- application/json
|
|
||||||
cookie:
|
|
||||||
- _cfuvid=5wzaJSCvT1p1Eazad55wDvp1JsgxrlghhmmU9tx0fMs-1741025614868-0.0.1.1-604800000;
|
|
||||||
__cf_bm=Jydtg8l0yjWRI2vKmejdq.C1W.sasIwEbTrV2rUt6V0-1741025614-1.0.1.1-Af3gmq.j2ecn9QEa3aCVY09QU4VqoW2GTk9AjvzPA.jyAZlwhJd4paniSt3kSusH0tryW03iC8uaX826hb2xzapgcfSm6Jdh_eWh_BMCh_8
|
|
||||||
host:
|
|
||||||
- api.openai.com
|
|
||||||
user-agent:
|
|
||||||
- OpenAI/Python 1.65.1
|
|
||||||
x-stainless-arch:
|
|
||||||
- arm64
|
|
||||||
x-stainless-async:
|
|
||||||
- 'false'
|
|
||||||
x-stainless-lang:
|
|
||||||
- python
|
|
||||||
x-stainless-os:
|
|
||||||
- MacOS
|
|
||||||
x-stainless-package-version:
|
|
||||||
- 1.65.1
|
|
||||||
x-stainless-raw-response:
|
|
||||||
- 'true'
|
|
||||||
x-stainless-read-timeout:
|
|
||||||
- '600.0'
|
|
||||||
x-stainless-retry-count:
|
|
||||||
- '0'
|
|
||||||
x-stainless-runtime:
|
|
||||||
- CPython
|
|
||||||
x-stainless-runtime-version:
|
|
||||||
- 3.12.8
|
|
||||||
method: POST
|
|
||||||
uri: https://api.openai.com/v1/chat/completions
|
|
||||||
response:
|
|
||||||
body:
|
|
||||||
string: 'data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_CvJXBBDO8uDFZ9bdZkZfsmLb","type":"function","function":{"name":"sample_tool","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\n"}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
|
|
||||||
"}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
|
|
||||||
\""}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"text"}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":"}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
|
|
||||||
\""}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Hello"}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
|
|
||||||
world"}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"!\"\n"}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"}"}}]},"logprobs":null,"finish_reason":null}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: {"id":"chatcmpl-B74aFEEmqXpcnfB7af27OFRQZ5mpq","object":"chat.completion.chunk","created":1741025615,"model":"gpt-4-0613","service_tier":"default","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]}
|
|
||||||
|
|
||||||
|
|
||||||
data: [DONE]
|
|
||||||
|
|
||||||
|
|
||||||
'
|
|
||||||
headers:
|
|
||||||
CF-Cache-Status:
|
|
||||||
- DYNAMIC
|
|
||||||
CF-RAY:
|
|
||||||
- 91ab1bd2d868bcda-ATL
|
|
||||||
Connection:
|
|
||||||
- keep-alive
|
|
||||||
Content-Type:
|
|
||||||
- text/event-stream; charset=utf-8
|
|
||||||
Date:
|
|
||||||
- Mon, 03 Mar 2025 18:13:36 GMT
|
|
||||||
Server:
|
|
||||||
- cloudflare
|
|
||||||
Transfer-Encoding:
|
|
||||||
- chunked
|
|
||||||
X-Content-Type-Options:
|
|
||||||
- nosniff
|
|
||||||
access-control-expose-headers:
|
|
||||||
- X-Request-ID
|
|
||||||
alt-svc:
|
|
||||||
- h3=":443"; ma=86400
|
|
||||||
openai-organization:
|
|
||||||
- crewai-iuxna1
|
|
||||||
openai-processing-ms:
|
|
||||||
- '552'
|
|
||||||
openai-version:
|
|
||||||
- '2020-10-01'
|
|
||||||
strict-transport-security:
|
|
||||||
- max-age=31536000; includeSubDomains; preload
|
|
||||||
x-ratelimit-limit-requests:
|
|
||||||
- '10000'
|
|
||||||
x-ratelimit-limit-tokens:
|
|
||||||
- '1000000'
|
|
||||||
x-ratelimit-remaining-requests:
|
|
||||||
- '9999'
|
|
||||||
x-ratelimit-remaining-tokens:
|
|
||||||
- '999968'
|
|
||||||
x-ratelimit-reset-requests:
|
|
||||||
- 6ms
|
|
||||||
x-ratelimit-reset-tokens:
|
|
||||||
- 1ms
|
|
||||||
x-request-id:
|
|
||||||
- req_541a74020ec89786d0bef89a013fdc78
|
|
||||||
status:
|
|
||||||
code: 200
|
|
||||||
message: OK
|
|
||||||
version: 1
|
|
||||||
@@ -1,109 +0,0 @@
|
|||||||
interactions:
|
|
||||||
- request:
|
|
||||||
body: '{"messages": [{"role": "user", "content": "Process this text with the sample
|
|
||||||
tool: ''Hello, world!''"}], "model": "gpt-4", "stop": [], "stream": false, "tools":
|
|
||||||
[{"type": "function", "function": {"name": "sample_tool", "description": "A
|
|
||||||
sample tool that processes text", "parameters": {"type": "object", "properties":
|
|
||||||
{"text": {"type": "string", "description": "The text to process"}}, "required":
|
|
||||||
["text"]}}}]}'
|
|
||||||
headers:
|
|
||||||
accept:
|
|
||||||
- application/json
|
|
||||||
accept-encoding:
|
|
||||||
- gzip, deflate, zstd
|
|
||||||
connection:
|
|
||||||
- keep-alive
|
|
||||||
content-length:
|
|
||||||
- '409'
|
|
||||||
content-type:
|
|
||||||
- application/json
|
|
||||||
cookie:
|
|
||||||
- _cfuvid=5wzaJSCvT1p1Eazad55wDvp1JsgxrlghhmmU9tx0fMs-1741025614868-0.0.1.1-604800000;
|
|
||||||
__cf_bm=Jydtg8l0yjWRI2vKmejdq.C1W.sasIwEbTrV2rUt6V0-1741025614-1.0.1.1-Af3gmq.j2ecn9QEa3aCVY09QU4VqoW2GTk9AjvzPA.jyAZlwhJd4paniSt3kSusH0tryW03iC8uaX826hb2xzapgcfSm6Jdh_eWh_BMCh_8
|
|
||||||
host:
|
|
||||||
- api.openai.com
|
|
||||||
user-agent:
|
|
||||||
- OpenAI/Python 1.65.1
|
|
||||||
x-stainless-arch:
|
|
||||||
- arm64
|
|
||||||
x-stainless-async:
|
|
||||||
- 'false'
|
|
||||||
x-stainless-lang:
|
|
||||||
- python
|
|
||||||
x-stainless-os:
|
|
||||||
- MacOS
|
|
||||||
x-stainless-package-version:
|
|
||||||
- 1.65.1
|
|
||||||
x-stainless-raw-response:
|
|
||||||
- 'true'
|
|
||||||
x-stainless-read-timeout:
|
|
||||||
- '600.0'
|
|
||||||
x-stainless-retry-count:
|
|
||||||
- '0'
|
|
||||||
x-stainless-runtime:
|
|
||||||
- CPython
|
|
||||||
x-stainless-runtime-version:
|
|
||||||
- 3.12.8
|
|
||||||
method: POST
|
|
||||||
uri: https://api.openai.com/v1/chat/completions
|
|
||||||
response:
|
|
||||||
body:
|
|
||||||
string: !!binary |
|
|
||||||
H4sIAAAAAAAAAwAAAP//jFPLbtswELzrK9g9y4WV+AXdEqCF+0KBtmkOVSHQ5EpiQpEMSaVJDP97
|
|
||||||
QcmWZMcFqoNA7OzM7s6S24gQEBxSAqyintVGTq6XM7p+9+FivvrG17ebq2LJ+YP++qVSn6oNxIGh
|
|
||||||
N3fI/IH1lunaSPRCqw5mFqnHoJosZ8n0Yr5Ili1Qa44y0ErjJ7PJdJFc7hmVFgwdpORXRAgh2/Yf
|
|
||||||
elMcnyAl0/gQqdE5WiKkfRIhYLUMEaDOCeep8hAPINPKowrtqkbKEeC1ljmjUg6Fu287Og8GUSnz
|
|
||||||
HzN18/Dokrtb+n46/3jNmp/fX14+34/qddLPpm2oaBTrjRnhfTw9KUYIKFq3XEeDrXno8oROCFBb
|
|
||||||
NjUqH1qHbaYIycDjk88gJRmsUUodkz/aSv4mg0zt4EhgF507/x55Y7FoHJV70/bxXb8FqUtj9cad
|
|
||||||
mAqFUMJVuUXq2uHGHkeHam0daI7WCMbq2vjc63tUQXYx70RhuF0DmKz2oNeeyiG+uozPqOUcPRXt
|
|
||||||
lvuLxSirkA/M4YLRhgs9AqLR5K+bOafdTS9U+T/yA8AYGo88Nxa5YMcDD2kWw9v7V1rvcdswOLSP
|
|
||||||
gmHuBdqwDY4FbWT3OsA9O491XghVojVW9E8k2kV/AQAA//8DAGu8z/YZBAAA
|
|
||||||
headers:
|
|
||||||
CF-RAY:
|
|
||||||
- 91ab1bda9b68bcda-ATL
|
|
||||||
Connection:
|
|
||||||
- keep-alive
|
|
||||||
Content-Encoding:
|
|
||||||
- gzip
|
|
||||||
Content-Type:
|
|
||||||
- application/json
|
|
||||||
Date:
|
|
||||||
- Mon, 03 Mar 2025 18:13:37 GMT
|
|
||||||
Server:
|
|
||||||
- cloudflare
|
|
||||||
Transfer-Encoding:
|
|
||||||
- chunked
|
|
||||||
X-Content-Type-Options:
|
|
||||||
- nosniff
|
|
||||||
access-control-expose-headers:
|
|
||||||
- X-Request-ID
|
|
||||||
alt-svc:
|
|
||||||
- h3=":443"; ma=86400
|
|
||||||
cf-cache-status:
|
|
||||||
- DYNAMIC
|
|
||||||
openai-organization:
|
|
||||||
- crewai-iuxna1
|
|
||||||
openai-processing-ms:
|
|
||||||
- '875'
|
|
||||||
openai-version:
|
|
||||||
- '2020-10-01'
|
|
||||||
strict-transport-security:
|
|
||||||
- max-age=31536000; includeSubDomains; preload
|
|
||||||
x-ratelimit-limit-requests:
|
|
||||||
- '10000'
|
|
||||||
x-ratelimit-limit-tokens:
|
|
||||||
- '1000000'
|
|
||||||
x-ratelimit-remaining-requests:
|
|
||||||
- '9999'
|
|
||||||
x-ratelimit-remaining-tokens:
|
|
||||||
- '999968'
|
|
||||||
x-ratelimit-reset-requests:
|
|
||||||
- 6ms
|
|
||||||
x-ratelimit-reset-tokens:
|
|
||||||
- 1ms
|
|
||||||
x-request-id:
|
|
||||||
- req_291d5b012b356f9fba6cbff30f52e6aa
|
|
||||||
status:
|
|
||||||
code: 200
|
|
||||||
message: OK
|
|
||||||
version: 1
|
|
||||||
@@ -630,7 +630,7 @@ def test_llm_emits_stream_chunk_events():
|
|||||||
received_chunks.append(event.chunk)
|
received_chunks.append(event.chunk)
|
||||||
|
|
||||||
# Create an LLM with streaming enabled
|
# Create an LLM with streaming enabled
|
||||||
llm = LLM(model="gpt-3.5-turbo", stream=True)
|
llm = LLM(model="gpt-4o", stream=True)
|
||||||
|
|
||||||
# Call the LLM with a simple message
|
# Call the LLM with a simple message
|
||||||
response = llm.call("Tell me a short joke")
|
response = llm.call("Tell me a short joke")
|
||||||
@@ -654,7 +654,7 @@ def test_llm_no_stream_chunks_when_streaming_disabled():
|
|||||||
received_chunks.append(event.chunk)
|
received_chunks.append(event.chunk)
|
||||||
|
|
||||||
# Create an LLM with streaming disabled
|
# Create an LLM with streaming disabled
|
||||||
llm = LLM(model="gpt-3.5-turbo", stream=False)
|
llm = LLM(model="gpt-4o", stream=False)
|
||||||
|
|
||||||
# Call the LLM with a simple message
|
# Call the LLM with a simple message
|
||||||
response = llm.call("Tell me a short joke")
|
response = llm.call("Tell me a short joke")
|
||||||
@@ -666,89 +666,6 @@ def test_llm_no_stream_chunks_when_streaming_disabled():
|
|||||||
assert response and isinstance(response, str)
|
assert response and isinstance(response, str)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
|
||||||
def test_llm_tool_calling_with_streaming():
|
|
||||||
"""Test that tool calling works correctly with streaming enabled."""
|
|
||||||
received_chunks = []
|
|
||||||
tool_called = False
|
|
||||||
|
|
||||||
def sample_tool(text: str) -> str:
|
|
||||||
nonlocal tool_called
|
|
||||||
tool_called = True
|
|
||||||
return f"Tool processed: {text}"
|
|
||||||
|
|
||||||
available_functions = {"sample_tool": sample_tool}
|
|
||||||
|
|
||||||
tools = [
|
|
||||||
{
|
|
||||||
"type": "function",
|
|
||||||
"function": {
|
|
||||||
"name": "sample_tool",
|
|
||||||
"description": "A sample tool that processes text",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"text": {"type": "string", "description": "The text to process"}
|
|
||||||
},
|
|
||||||
"required": ["text"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
with crewai_event_bus.scoped_handlers():
|
|
||||||
|
|
||||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
|
||||||
def handle_stream_chunk(source, event):
|
|
||||||
received_chunks.append(event.chunk)
|
|
||||||
|
|
||||||
# Create an LLM with streaming enabled
|
|
||||||
llm = LLM(model="gpt-4", stream=True)
|
|
||||||
|
|
||||||
# Store original methods
|
|
||||||
original_call = llm.call
|
|
||||||
original_handle_tool_call = llm._handle_tool_call
|
|
||||||
|
|
||||||
# Create a mock call method that simulates streaming and tool calling
|
|
||||||
def mock_call(messages, tools=None, callbacks=None, available_functions=None):
|
|
||||||
# Emit some chunks first
|
|
||||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk="I'll process "))
|
|
||||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk="that text "))
|
|
||||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk="for you."))
|
|
||||||
|
|
||||||
# Call the tool
|
|
||||||
if available_functions and "sample_tool" in available_functions:
|
|
||||||
result = available_functions["sample_tool"]("Hello, world!")
|
|
||||||
return result
|
|
||||||
|
|
||||||
return "No tool was called"
|
|
||||||
|
|
||||||
# Replace the methods with our mocks
|
|
||||||
llm.call = mock_call
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Call the LLM with a message that should trigger tool use
|
|
||||||
response = llm.call(
|
|
||||||
"Process this text with the sample tool: 'Hello, world!'",
|
|
||||||
tools=tools,
|
|
||||||
available_functions=available_functions,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify that we received chunks
|
|
||||||
assert len(received_chunks) == 3
|
|
||||||
assert "".join(received_chunks) == "I'll process that text for you."
|
|
||||||
|
|
||||||
# Verify that the tool was called
|
|
||||||
assert tool_called
|
|
||||||
|
|
||||||
# Verify the response contains the tool's output
|
|
||||||
assert response == "Tool processed: Hello, world!"
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# Restore the original methods
|
|
||||||
llm.call = original_call
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
def test_streaming_fallback_to_non_streaming():
|
def test_streaming_fallback_to_non_streaming():
|
||||||
"""Test that streaming falls back to non-streaming when there's an error."""
|
"""Test that streaming falls back to non-streaming when there's an error."""
|
||||||
@@ -762,7 +679,7 @@ def test_streaming_fallback_to_non_streaming():
|
|||||||
received_chunks.append(event.chunk)
|
received_chunks.append(event.chunk)
|
||||||
|
|
||||||
# Create an LLM with streaming enabled
|
# Create an LLM with streaming enabled
|
||||||
llm = LLM(model="gpt-3.5-turbo", stream=True)
|
llm = LLM(model="gpt-4o", stream=True)
|
||||||
|
|
||||||
# Store original methods
|
# Store original methods
|
||||||
original_call = llm.call
|
original_call = llm.call
|
||||||
|
|||||||
Reference in New Issue
Block a user