mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-27 17:18:13 +00:00
feat: unblock LLM(stream=True) to work with tools
This commit is contained in:
@@ -4,9 +4,12 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import warnings
|
import warnings
|
||||||
|
from collections import defaultdict
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from types import SimpleNamespace
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
DefaultDict,
|
||||||
Dict,
|
Dict,
|
||||||
List,
|
List,
|
||||||
Literal,
|
Literal,
|
||||||
@@ -18,7 +21,8 @@ from typing import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from pydantic import BaseModel
|
from litellm.types.utils import ChatCompletionDeltaToolCall
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from crewai.utilities.events.llm_events import (
|
from crewai.utilities.events.llm_events import (
|
||||||
LLMCallCompletedEvent,
|
LLMCallCompletedEvent,
|
||||||
@@ -219,6 +223,15 @@ class StreamingChoices(TypedDict):
|
|||||||
finish_reason: Optional[str]
|
finish_reason: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionArgs(BaseModel):
|
||||||
|
name: str = ""
|
||||||
|
arguments: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class AccumulatedToolArgs(BaseModel):
|
||||||
|
function: FunctionArgs = Field(default_factory=FunctionArgs)
|
||||||
|
|
||||||
|
|
||||||
class LLM(BaseLLM):
|
class LLM(BaseLLM):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -371,6 +384,11 @@ class LLM(BaseLLM):
|
|||||||
last_chunk = None
|
last_chunk = None
|
||||||
chunk_count = 0
|
chunk_count = 0
|
||||||
usage_info = None
|
usage_info = None
|
||||||
|
tool_calls = None
|
||||||
|
|
||||||
|
accumulated_tool_args: DefaultDict[int, AccumulatedToolArgs] = defaultdict(
|
||||||
|
AccumulatedToolArgs
|
||||||
|
)
|
||||||
|
|
||||||
# --- 2) Make sure stream is set to True and include usage metrics
|
# --- 2) Make sure stream is set to True and include usage metrics
|
||||||
params["stream"] = True
|
params["stream"] = True
|
||||||
@@ -428,6 +446,20 @@ class LLM(BaseLLM):
|
|||||||
if chunk_content is None and isinstance(delta, dict):
|
if chunk_content is None and isinstance(delta, dict):
|
||||||
# Some models might send empty content chunks
|
# Some models might send empty content chunks
|
||||||
chunk_content = ""
|
chunk_content = ""
|
||||||
|
|
||||||
|
# Enable tool calls using streaming
|
||||||
|
if "tool_calls" in delta:
|
||||||
|
tool_calls = delta["tool_calls"]
|
||||||
|
|
||||||
|
if tool_calls:
|
||||||
|
result = self._handle_streaming_tool_calls(
|
||||||
|
tool_calls=tool_calls,
|
||||||
|
accumulated_tool_args=accumulated_tool_args,
|
||||||
|
available_functions=available_functions,
|
||||||
|
)
|
||||||
|
if result is not None:
|
||||||
|
chunk_content = result
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.debug(f"Error extracting content from chunk: {e}")
|
logging.debug(f"Error extracting content from chunk: {e}")
|
||||||
logging.debug(f"Chunk format: {type(chunk)}, content: {chunk}")
|
logging.debug(f"Chunk format: {type(chunk)}, content: {chunk}")
|
||||||
@@ -442,7 +474,6 @@ class LLM(BaseLLM):
|
|||||||
self,
|
self,
|
||||||
event=LLMStreamChunkEvent(chunk=chunk_content),
|
event=LLMStreamChunkEvent(chunk=chunk_content),
|
||||||
)
|
)
|
||||||
|
|
||||||
# --- 4) Fallback to non-streaming if no content received
|
# --- 4) Fallback to non-streaming if no content received
|
||||||
if not full_response.strip() and chunk_count == 0:
|
if not full_response.strip() and chunk_count == 0:
|
||||||
logging.warning(
|
logging.warning(
|
||||||
@@ -501,7 +532,7 @@ class LLM(BaseLLM):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# --- 6) If still empty, raise an error instead of using a default response
|
# --- 6) If still empty, raise an error instead of using a default response
|
||||||
if not full_response.strip():
|
if not full_response.strip() and len(accumulated_tool_args) == 0:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"No content received from streaming response. Received empty chunks or failed to extract content."
|
"No content received from streaming response. Received empty chunks or failed to extract content."
|
||||||
)
|
)
|
||||||
@@ -533,8 +564,8 @@ class LLM(BaseLLM):
|
|||||||
tool_calls = getattr(message, "tool_calls")
|
tool_calls = getattr(message, "tool_calls")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.debug(f"Error checking for tool calls: {e}")
|
logging.debug(f"Error checking for tool calls: {e}")
|
||||||
|
|
||||||
# --- 8) If no tool calls or no available functions, return the text response directly
|
# --- 8) If no tool calls or no available functions, return the text response directly
|
||||||
|
|
||||||
if not tool_calls or not available_functions:
|
if not tool_calls or not available_functions:
|
||||||
# Log token usage if available in streaming mode
|
# Log token usage if available in streaming mode
|
||||||
self._handle_streaming_callbacks(callbacks, usage_info, last_chunk)
|
self._handle_streaming_callbacks(callbacks, usage_info, last_chunk)
|
||||||
@@ -568,6 +599,47 @@ class LLM(BaseLLM):
|
|||||||
)
|
)
|
||||||
raise Exception(f"Failed to get streaming response: {str(e)}")
|
raise Exception(f"Failed to get streaming response: {str(e)}")
|
||||||
|
|
||||||
|
def _handle_streaming_tool_calls(
|
||||||
|
self,
|
||||||
|
tool_calls: List[ChatCompletionDeltaToolCall],
|
||||||
|
accumulated_tool_args: DefaultDict[int, AccumulatedToolArgs],
|
||||||
|
available_functions: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None | str:
|
||||||
|
for tool_call in tool_calls:
|
||||||
|
current_tool_accumulator = accumulated_tool_args[tool_call.index]
|
||||||
|
|
||||||
|
if tool_call.function.name:
|
||||||
|
current_tool_accumulator.function.name = tool_call.function.name
|
||||||
|
|
||||||
|
if tool_call.function.arguments:
|
||||||
|
current_tool_accumulator.function.arguments += (
|
||||||
|
tool_call.function.arguments
|
||||||
|
)
|
||||||
|
|
||||||
|
crewai_event_bus.emit(
|
||||||
|
self,
|
||||||
|
event=LLMStreamChunkEvent(
|
||||||
|
tool_call=tool_call.to_dict(),
|
||||||
|
chunk=tool_call.function.arguments,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
current_tool_accumulator.function.name
|
||||||
|
and current_tool_accumulator.function.arguments
|
||||||
|
and available_functions
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
json.loads(current_tool_accumulator.function.arguments)
|
||||||
|
|
||||||
|
return self._handle_tool_call(
|
||||||
|
[current_tool_accumulator],
|
||||||
|
available_functions,
|
||||||
|
)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
continue
|
||||||
|
return None
|
||||||
|
|
||||||
def _handle_streaming_callbacks(
|
def _handle_streaming_callbacks(
|
||||||
self,
|
self,
|
||||||
callbacks: Optional[List[Any]],
|
callbacks: Optional[List[Any]],
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from crewai.utilities.events.base_events import BaseEvent
|
from crewai.utilities.events.base_events import BaseEvent
|
||||||
|
|
||||||
|
|
||||||
@@ -41,8 +43,21 @@ class LLMCallFailedEvent(BaseEvent):
|
|||||||
type: str = "llm_call_failed"
|
type: str = "llm_call_failed"
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionCall(BaseModel):
|
||||||
|
arguments: str
|
||||||
|
name: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ToolCall(BaseModel):
|
||||||
|
id: Optional[str] = None
|
||||||
|
function: FunctionCall
|
||||||
|
type: Optional[str] = None
|
||||||
|
index: int
|
||||||
|
|
||||||
|
|
||||||
class LLMStreamChunkEvent(BaseEvent):
|
class LLMStreamChunkEvent(BaseEvent):
|
||||||
"""Event emitted when a streaming chunk is received"""
|
"""Event emitted when a streaming chunk is received"""
|
||||||
|
|
||||||
type: str = "llm_stream_chunk"
|
type: str = "llm_stream_chunk"
|
||||||
chunk: str
|
chunk: str
|
||||||
|
tool_call: Optional[ToolCall] = None
|
||||||
|
|||||||
133
tests/cassettes/test_handle_streaming_tool_calls.yaml
Normal file
133
tests/cassettes/test_handle_streaming_tool_calls.yaml
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}],
|
||||||
|
"model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage":
|
||||||
|
true}, "tools": [{"type": "function", "function": {"name": "get_weather", "description":
|
||||||
|
"Get the current weather in a given location", "parameters": {"type": "object",
|
||||||
|
"properties": {"location": {"type": "string", "description": "The city and state,
|
||||||
|
e.g. San Francisco, CA"}}, "required": ["location"]}}}]}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '470'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.74.0
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.74.0
|
||||||
|
x-stainless-raw-response:
|
||||||
|
- 'true'
|
||||||
|
x-stainless-read-timeout:
|
||||||
|
- '600.0'
|
||||||
|
x-stainless-retry-count:
|
||||||
|
- '0'
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.12
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_bCEixqN8Y40SUyius8ZfVErH","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"New"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
|
||||||
|
York"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
|
||||||
|
NY"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":68,"completion_tokens":18,"total_tokens":86,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-RAY:
|
||||||
|
- 93147723ecc1f237-GRU
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Wed, 16 Apr 2025 14:45:16 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
X-Content-Type-Options:
|
||||||
|
- nosniff
|
||||||
|
access-control-expose-headers:
|
||||||
|
- X-Request-ID
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
cf-cache-status:
|
||||||
|
- DYNAMIC
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '620'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains; preload
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '30000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '29999989'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_0a08d5f042ef769aeb2c941e398f65f4
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
@@ -0,0 +1,133 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}],
|
||||||
|
"model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage":
|
||||||
|
true}, "tools": [{"type": "function", "function": {"name": "get_weather", "description":
|
||||||
|
"Get the current weather in a given location", "parameters": {"type": "object",
|
||||||
|
"properties": {"location": {"type": "string", "description": "The city and state,
|
||||||
|
e.g. San Francisco, CA"}}, "required": ["location"]}}}]}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '470'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.74.0
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.74.0
|
||||||
|
x-stainless-raw-response:
|
||||||
|
- 'true'
|
||||||
|
x-stainless-read-timeout:
|
||||||
|
- '600.0'
|
||||||
|
x-stainless-retry-count:
|
||||||
|
- '0'
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.12
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_ccog5nyDCLYpoWzksuCkGEqY","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"New"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
|
||||||
|
York"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"
|
||||||
|
NY"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":68,"completion_tokens":18,"total_tokens":86,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-RAY:
|
||||||
|
- 931461bbddc27df9-GRU
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Wed, 16 Apr 2025 14:30:39 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
X-Content-Type-Options:
|
||||||
|
- nosniff
|
||||||
|
access-control-expose-headers:
|
||||||
|
- X-Request-ID
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
cf-cache-status:
|
||||||
|
- DYNAMIC
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '445'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains; preload
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '30000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '29999989'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_1bb5862de2891623d44c012aba597c5e
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
279
tests/cassettes/test_handle_streaming_tool_calls_no_tools.yaml
Normal file
279
tests/cassettes/test_handle_streaming_tool_calls_no_tools.yaml
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}],
|
||||||
|
"model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage":
|
||||||
|
true}}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/json
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-length:
|
||||||
|
- '169'
|
||||||
|
content-type:
|
||||||
|
- application/json
|
||||||
|
host:
|
||||||
|
- api.openai.com
|
||||||
|
user-agent:
|
||||||
|
- OpenAI/Python 1.74.0
|
||||||
|
x-stainless-arch:
|
||||||
|
- arm64
|
||||||
|
x-stainless-async:
|
||||||
|
- 'false'
|
||||||
|
x-stainless-lang:
|
||||||
|
- python
|
||||||
|
x-stainless-os:
|
||||||
|
- MacOS
|
||||||
|
x-stainless-package-version:
|
||||||
|
- 1.74.0
|
||||||
|
x-stainless-raw-response:
|
||||||
|
- 'true'
|
||||||
|
x-stainless-read-timeout:
|
||||||
|
- '600.0'
|
||||||
|
x-stainless-retry-count:
|
||||||
|
- '0'
|
||||||
|
x-stainless-runtime:
|
||||||
|
- CPython
|
||||||
|
x-stainless-runtime-version:
|
||||||
|
- 3.11.12
|
||||||
|
method: POST
|
||||||
|
uri: https://api.openai.com/v1/chat/completions
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: 'data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"I''m"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
unable"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
to"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
provide"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
real"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"-time"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
information"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
or"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
current"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
updates"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
For"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
the"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
latest"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
information"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
in"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
New"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
York"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
I"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
recommend"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
checking"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
reliable"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
website"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
or"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
app"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
such"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
as"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
the"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
National"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Service"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
Weather"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":".com"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
or"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
a"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
similar"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"
|
||||||
|
service"},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null}
|
||||||
|
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":15,"completion_tokens":47,"total_tokens":62,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
|
||||||
|
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
||||||
|
'
|
||||||
|
headers:
|
||||||
|
CF-RAY:
|
||||||
|
- 931461c25bb47df9-GRU
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Type:
|
||||||
|
- text/event-stream; charset=utf-8
|
||||||
|
Date:
|
||||||
|
- Wed, 16 Apr 2025 14:30:40 GMT
|
||||||
|
Server:
|
||||||
|
- cloudflare
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
X-Content-Type-Options:
|
||||||
|
- nosniff
|
||||||
|
access-control-expose-headers:
|
||||||
|
- X-Request-ID
|
||||||
|
alt-svc:
|
||||||
|
- h3=":443"; ma=86400
|
||||||
|
cf-cache-status:
|
||||||
|
- DYNAMIC
|
||||||
|
openai-organization:
|
||||||
|
- crewai-iuxna1
|
||||||
|
openai-processing-ms:
|
||||||
|
- '298'
|
||||||
|
openai-version:
|
||||||
|
- '2020-10-01'
|
||||||
|
strict-transport-security:
|
||||||
|
- max-age=31536000; includeSubDomains; preload
|
||||||
|
x-ratelimit-limit-requests:
|
||||||
|
- '10000'
|
||||||
|
x-ratelimit-limit-tokens:
|
||||||
|
- '30000000'
|
||||||
|
x-ratelimit-remaining-requests:
|
||||||
|
- '9999'
|
||||||
|
x-ratelimit-remaining-tokens:
|
||||||
|
- '29999989'
|
||||||
|
x-ratelimit-reset-requests:
|
||||||
|
- 6ms
|
||||||
|
x-ratelimit-reset-tokens:
|
||||||
|
- 0s
|
||||||
|
x-request-id:
|
||||||
|
- req_89971fd68e5a59c9fa50e04106228b0a
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
||||||
@@ -7,8 +7,10 @@ from pydantic import BaseModel
|
|||||||
|
|
||||||
from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess
|
from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess
|
||||||
from crewai.llm import CONTEXT_WINDOW_USAGE_RATIO, LLM
|
from crewai.llm import CONTEXT_WINDOW_USAGE_RATIO, LLM
|
||||||
from crewai.utilities.events import crewai_event_bus
|
from crewai.utilities.events import (
|
||||||
from crewai.utilities.events.tool_usage_events import ToolExecutionErrorEvent
|
LLMCallCompletedEvent,
|
||||||
|
LLMStreamChunkEvent,
|
||||||
|
)
|
||||||
from crewai.utilities.token_counter_callback import TokenCalcHandler
|
from crewai.utilities.token_counter_callback import TokenCalcHandler
|
||||||
|
|
||||||
|
|
||||||
@@ -304,6 +306,27 @@ def test_context_window_validation():
|
|||||||
assert "must be between 1024 and 2097152" in str(excinfo.value)
|
assert "must be between 1024 and 2097152" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def get_weather_tool_schema():
|
||||||
|
return {
|
||||||
|
"type": "function",
|
||||||
|
"function": {
|
||||||
|
"name": "get_weather",
|
||||||
|
"description": "Get the current weather in a given location",
|
||||||
|
"parameters": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"location": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The city and state, e.g. San Francisco, CA",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["location"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def anthropic_llm():
|
def anthropic_llm():
|
||||||
@@ -395,3 +418,117 @@ def test_deepseek_r1_with_open_router():
|
|||||||
result = llm.call("What is the capital of France?")
|
result = llm.call("What is the capital of France?")
|
||||||
assert isinstance(result, str)
|
assert isinstance(result, str)
|
||||||
assert "Paris" in result
|
assert "Paris" in result
|
||||||
|
|
||||||
|
|
||||||
|
def assert_event_count(
|
||||||
|
mock_emit,
|
||||||
|
expected_completed_tool_call: int = 0,
|
||||||
|
expected_stream_chunk: int = 0,
|
||||||
|
expected_completed_llm_call: int = 0,
|
||||||
|
expected_final_chunk_result: str = "",
|
||||||
|
):
|
||||||
|
event_count = {
|
||||||
|
"completed_tool_call": 0,
|
||||||
|
"stream_chunk": 0,
|
||||||
|
"completed_llm_call": 0,
|
||||||
|
}
|
||||||
|
final_chunk_result = ""
|
||||||
|
for _call in mock_emit.call_args_list:
|
||||||
|
event = _call[1]["event"]
|
||||||
|
|
||||||
|
if (
|
||||||
|
isinstance(event, LLMCallCompletedEvent)
|
||||||
|
and event.call_type.value == "tool_call"
|
||||||
|
):
|
||||||
|
event_count["completed_tool_call"] += 1
|
||||||
|
elif isinstance(event, LLMStreamChunkEvent):
|
||||||
|
event_count["stream_chunk"] += 1
|
||||||
|
final_chunk_result += event.chunk
|
||||||
|
elif (
|
||||||
|
isinstance(event, LLMCallCompletedEvent)
|
||||||
|
and event.call_type.value == "llm_call"
|
||||||
|
):
|
||||||
|
event_count["completed_llm_call"] += 1
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
assert event_count["completed_tool_call"] == expected_completed_tool_call
|
||||||
|
assert event_count["stream_chunk"] == expected_stream_chunk
|
||||||
|
assert event_count["completed_llm_call"] == expected_completed_llm_call
|
||||||
|
assert final_chunk_result == expected_final_chunk_result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_emit() -> MagicMock:
|
||||||
|
from crewai.utilities.events.crewai_event_bus import CrewAIEventsBus
|
||||||
|
|
||||||
|
with patch.object(CrewAIEventsBus, "emit") as mock_emit:
|
||||||
|
yield mock_emit
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_handle_streaming_tool_calls(get_weather_tool_schema, mock_emit):
|
||||||
|
llm = LLM(model="openai/gpt-4o", stream=True)
|
||||||
|
response = llm.call(
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": "What is the weather in New York?"},
|
||||||
|
],
|
||||||
|
tools=[get_weather_tool_schema],
|
||||||
|
available_functions={
|
||||||
|
"get_weather": lambda location: f"The weather in {location} is sunny"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response == "The weather in New York, NY is sunny"
|
||||||
|
|
||||||
|
expected_final_chunk_result = (
|
||||||
|
'{"location":"New York, NY"}The weather in New York, NY is sunny'
|
||||||
|
)
|
||||||
|
assert_event_count(
|
||||||
|
mock_emit=mock_emit,
|
||||||
|
expected_completed_tool_call=1,
|
||||||
|
expected_stream_chunk=10,
|
||||||
|
expected_completed_llm_call=1,
|
||||||
|
expected_final_chunk_result=expected_final_chunk_result,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_handle_streaming_tool_calls_no_available_functions(
|
||||||
|
get_weather_tool_schema, mock_emit
|
||||||
|
):
|
||||||
|
llm = LLM(model="openai/gpt-4o", stream=True)
|
||||||
|
response = llm.call(
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": "What is the weather in New York?"},
|
||||||
|
],
|
||||||
|
tools=[get_weather_tool_schema],
|
||||||
|
)
|
||||||
|
assert response == ""
|
||||||
|
|
||||||
|
assert_event_count(
|
||||||
|
mock_emit=mock_emit,
|
||||||
|
expected_stream_chunk=9,
|
||||||
|
expected_completed_llm_call=1,
|
||||||
|
expected_final_chunk_result='{"location":"New York, NY"}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||||
|
def test_handle_streaming_tool_calls_no_tools(mock_emit):
|
||||||
|
llm = LLM(model="openai/gpt-4o", stream=True)
|
||||||
|
response = llm.call(
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": "What is the weather in New York?"},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
response
|
||||||
|
== "I'm unable to provide real-time information or current weather updates. For the latest weather information in New York, I recommend checking a reliable weather website or app, such as the National Weather Service, Weather.com, or a similar service."
|
||||||
|
)
|
||||||
|
|
||||||
|
assert_event_count(
|
||||||
|
mock_emit=mock_emit,
|
||||||
|
expected_stream_chunk=46,
|
||||||
|
expected_completed_llm_call=1,
|
||||||
|
expected_final_chunk_result=response,
|
||||||
|
)
|
||||||
|
|||||||
Reference in New Issue
Block a user