diff --git a/pyproject.toml b/pyproject.toml index d85c43c93..7528a2ecc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,10 +81,10 @@ dev-dependencies = [ "pillow>=10.2.0", "cairosvg>=2.7.1", "pytest>=8.0.0", - "pytest-vcr>=1.0.2", "python-dotenv>=1.0.0", "pytest-asyncio>=0.23.7", "pytest-subprocess>=1.5.2", + "pytest-recording>=0.13.2", ] [project.scripts] diff --git a/src/crewai/llm.py b/src/crewai/llm.py index 25b798a6d..332582744 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -4,9 +4,12 @@ import os import sys import threading import warnings +from collections import defaultdict from contextlib import contextmanager +from types import SimpleNamespace from typing import ( Any, + DefaultDict, Dict, List, Literal, @@ -18,7 +21,8 @@ from typing import ( ) from dotenv import load_dotenv -from pydantic import BaseModel +from litellm.types.utils import ChatCompletionDeltaToolCall +from pydantic import BaseModel, Field from crewai.utilities.events.llm_events import ( LLMCallCompletedEvent, @@ -219,6 +223,15 @@ class StreamingChoices(TypedDict): finish_reason: Optional[str] +class FunctionArgs(BaseModel): + name: str = "" + arguments: str = "" + + +class AccumulatedToolArgs(BaseModel): + function: FunctionArgs = Field(default_factory=FunctionArgs) + + class LLM(BaseLLM): def __init__( self, @@ -371,6 +384,11 @@ class LLM(BaseLLM): last_chunk = None chunk_count = 0 usage_info = None + tool_calls = None + + accumulated_tool_args: DefaultDict[int, AccumulatedToolArgs] = defaultdict( + AccumulatedToolArgs + ) # --- 2) Make sure stream is set to True and include usage metrics params["stream"] = True @@ -428,6 +446,20 @@ class LLM(BaseLLM): if chunk_content is None and isinstance(delta, dict): # Some models might send empty content chunks chunk_content = "" + + # Enable tool calls using streaming + if "tool_calls" in delta: + tool_calls = delta["tool_calls"] + + if tool_calls: + result = self._handle_streaming_tool_calls( + tool_calls=tool_calls, + accumulated_tool_args=accumulated_tool_args, + available_functions=available_functions, + ) + if result is not None: + chunk_content = result + except Exception as e: logging.debug(f"Error extracting content from chunk: {e}") logging.debug(f"Chunk format: {type(chunk)}, content: {chunk}") @@ -442,7 +474,6 @@ class LLM(BaseLLM): self, event=LLMStreamChunkEvent(chunk=chunk_content), ) - # --- 4) Fallback to non-streaming if no content received if not full_response.strip() and chunk_count == 0: logging.warning( @@ -501,7 +532,7 @@ class LLM(BaseLLM): ) # --- 6) If still empty, raise an error instead of using a default response - if not full_response.strip(): + if not full_response.strip() and len(accumulated_tool_args) == 0: raise Exception( "No content received from streaming response. Received empty chunks or failed to extract content." ) @@ -533,8 +564,8 @@ class LLM(BaseLLM): tool_calls = getattr(message, "tool_calls") except Exception as e: logging.debug(f"Error checking for tool calls: {e}") - # --- 8) If no tool calls or no available functions, return the text response directly + if not tool_calls or not available_functions: # Log token usage if available in streaming mode self._handle_streaming_callbacks(callbacks, usage_info, last_chunk) @@ -568,6 +599,47 @@ class LLM(BaseLLM): ) raise Exception(f"Failed to get streaming response: {str(e)}") + def _handle_streaming_tool_calls( + self, + tool_calls: List[ChatCompletionDeltaToolCall], + accumulated_tool_args: DefaultDict[int, AccumulatedToolArgs], + available_functions: Optional[Dict[str, Any]] = None, + ) -> None | str: + for tool_call in tool_calls: + current_tool_accumulator = accumulated_tool_args[tool_call.index] + + if tool_call.function.name: + current_tool_accumulator.function.name = tool_call.function.name + + if tool_call.function.arguments: + current_tool_accumulator.function.arguments += ( + tool_call.function.arguments + ) + + crewai_event_bus.emit( + self, + event=LLMStreamChunkEvent( + tool_call=tool_call.to_dict(), + chunk=tool_call.function.arguments, + ), + ) + + if ( + current_tool_accumulator.function.name + and current_tool_accumulator.function.arguments + and available_functions + ): + try: + json.loads(current_tool_accumulator.function.arguments) + + return self._handle_tool_call( + [current_tool_accumulator], + available_functions, + ) + except json.JSONDecodeError: + continue + return None + def _handle_streaming_callbacks( self, callbacks: Optional[List[Any]], diff --git a/src/crewai/utilities/events/llm_events.py b/src/crewai/utilities/events/llm_events.py index 07a17a48b..ca8d0367a 100644 --- a/src/crewai/utilities/events/llm_events.py +++ b/src/crewai/utilities/events/llm_events.py @@ -1,6 +1,8 @@ from enum import Enum from typing import Any, Dict, List, Optional, Union +from pydantic import BaseModel + from crewai.utilities.events.base_events import BaseEvent @@ -41,8 +43,21 @@ class LLMCallFailedEvent(BaseEvent): type: str = "llm_call_failed" +class FunctionCall(BaseModel): + arguments: str + name: Optional[str] = None + + +class ToolCall(BaseModel): + id: Optional[str] = None + function: FunctionCall + type: Optional[str] = None + index: int + + class LLMStreamChunkEvent(BaseEvent): """Event emitted when a streaming chunk is received""" type: str = "llm_stream_chunk" chunk: str + tool_call: Optional[ToolCall] = None diff --git a/tests/memory/external/cassettes/test_crew_external_memory_save.yaml b/tests/cassettes/test_crew_external_memory_save.yaml similarity index 100% rename from tests/memory/external/cassettes/test_crew_external_memory_save.yaml rename to tests/cassettes/test_crew_external_memory_save.yaml diff --git a/tests/memory/external/cassettes/test_crew_external_memory_save_using_crew_without_memory_flag[save].yaml b/tests/cassettes/test_crew_external_memory_save_using_crew_without_memory_flag[save].yaml similarity index 100% rename from tests/memory/external/cassettes/test_crew_external_memory_save_using_crew_without_memory_flag[save].yaml rename to tests/cassettes/test_crew_external_memory_save_using_crew_without_memory_flag[save].yaml diff --git a/tests/memory/external/cassettes/test_crew_external_memory_save_using_crew_without_memory_flag[search].yaml b/tests/cassettes/test_crew_external_memory_save_using_crew_without_memory_flag[search].yaml similarity index 100% rename from tests/memory/external/cassettes/test_crew_external_memory_save_using_crew_without_memory_flag[search].yaml rename to tests/cassettes/test_crew_external_memory_save_using_crew_without_memory_flag[search].yaml diff --git a/tests/memory/external/cassettes/test_crew_external_memory_save_with_memory_flag[save].yaml b/tests/cassettes/test_crew_external_memory_save_with_memory_flag[save].yaml similarity index 100% rename from tests/memory/external/cassettes/test_crew_external_memory_save_with_memory_flag[save].yaml rename to tests/cassettes/test_crew_external_memory_save_with_memory_flag[save].yaml diff --git a/tests/memory/external/cassettes/test_crew_external_memory_save_with_memory_flag[search].yaml b/tests/cassettes/test_crew_external_memory_save_with_memory_flag[search].yaml similarity index 100% rename from tests/memory/external/cassettes/test_crew_external_memory_save_with_memory_flag[search].yaml rename to tests/cassettes/test_crew_external_memory_save_with_memory_flag[search].yaml diff --git a/tests/memory/external/cassettes/test_crew_external_memory_search.yaml b/tests/cassettes/test_crew_external_memory_search.yaml similarity index 100% rename from tests/memory/external/cassettes/test_crew_external_memory_search.yaml rename to tests/cassettes/test_crew_external_memory_search.yaml diff --git a/tests/cassettes/test_handle_streaming_tool_calls.yaml b/tests/cassettes/test_handle_streaming_tool_calls.yaml new file mode 100644 index 000000000..20c69b053 --- /dev/null +++ b/tests/cassettes/test_handle_streaming_tool_calls.yaml @@ -0,0 +1,133 @@ +interactions: +- request: + body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}], + "model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage": + true}, "tools": [{"type": "function", "function": {"name": "get_weather", "description": + "Get the current weather in a given location", "parameters": {"type": "object", + "properties": {"location": {"type": "string", "description": "The city and state, + e.g. San Francisco, CA"}}, "required": ["location"]}}}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '470' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.74.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.74.0 + x-stainless-raw-response: + - 'true' + x-stainless-read-timeout: + - '600.0' + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.11.12 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_bCEixqN8Y40SUyius8ZfVErH","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"New"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" + York"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" + NY"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null} + + + data: {"id":"chatcmpl-BMyImYKF6jsJsQzWvhNfNb4gAIRVc","object":"chat.completion.chunk","created":1744814716,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":68,"completion_tokens":18,"total_tokens":86,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 93147723ecc1f237-GRU + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Wed, 16 Apr 2025 14:45:16 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '620' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '30000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '29999989' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_0a08d5f042ef769aeb2c941e398f65f4 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/cassettes/test_handle_streaming_tool_calls_no_available_functions.yaml b/tests/cassettes/test_handle_streaming_tool_calls_no_available_functions.yaml new file mode 100644 index 000000000..ef780dee8 --- /dev/null +++ b/tests/cassettes/test_handle_streaming_tool_calls_no_available_functions.yaml @@ -0,0 +1,133 @@ +interactions: +- request: + body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}], + "model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage": + true}, "tools": [{"type": "function", "function": {"name": "get_weather", "description": + "Get the current weather in a given location", "parameters": {"type": "object", + "properties": {"location": {"type": "string", "description": "The city and state, + e.g. San Francisco, CA"}}, "required": ["location"]}}}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '470' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.74.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.74.0 + x-stainless-raw-response: + - 'true' + x-stainless-read-timeout: + - '600.0' + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.11.12 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_ccog5nyDCLYpoWzksuCkGEqY","type":"function","function":{"name":"get_weather","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"New"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" + York"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" + NY"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null} + + + data: {"id":"chatcmpl-BMy4cOLWqB5n5X1Zog3uV9Y7Lrkwp","object":"chat.completion.chunk","created":1744813838,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":68,"completion_tokens":18,"total_tokens":86,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 931461bbddc27df9-GRU + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Wed, 16 Apr 2025 14:30:39 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '445' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '30000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '29999989' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_1bb5862de2891623d44c012aba597c5e + status: + code: 200 + message: OK +version: 1 diff --git a/tests/cassettes/test_handle_streaming_tool_calls_no_tools.yaml b/tests/cassettes/test_handle_streaming_tool_calls_no_tools.yaml new file mode 100644 index 000000000..e9a76d139 --- /dev/null +++ b/tests/cassettes/test_handle_streaming_tool_calls_no_tools.yaml @@ -0,0 +1,279 @@ +interactions: +- request: + body: '{"messages": [{"role": "user", "content": "What is the weather in New York?"}], + "model": "gpt-4o", "stop": [], "stream": true, "stream_options": {"include_usage": + true}}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '169' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.74.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.74.0 + x-stainless-raw-response: + - 'true' + x-stainless-read-timeout: + - '600.0' + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.11.12 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: 'data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"I''m"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + unable"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + to"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + provide"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + real"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"-time"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + information"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + or"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + current"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + weather"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + updates"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + For"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + the"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + latest"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + weather"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + information"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + in"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + New"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + York"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + I"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + recommend"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + checking"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + reliable"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + weather"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + website"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + or"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + app"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + such"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + as"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + the"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + National"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + Weather"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + Service"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + Weather"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":".com"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + or"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + a"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + similar"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":" + service"},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null} + + + data: {"id":"chatcmpl-BMy4dVQFM7KUrmflCVk4i454PXCga","object":"chat.completion.chunk","created":1744813839,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_22890b9c0a","choices":[],"usage":{"prompt_tokens":15,"completion_tokens":47,"total_tokens":62,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}} + + + data: [DONE] + + + ' + headers: + CF-RAY: + - 931461c25bb47df9-GRU + Connection: + - keep-alive + Content-Type: + - text/event-stream; charset=utf-8 + Date: + - Wed, 16 Apr 2025 14:30:40 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + cf-cache-status: + - DYNAMIC + openai-organization: + - crewai-iuxna1 + openai-processing-ms: + - '298' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '30000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '29999989' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_89971fd68e5a59c9fa50e04106228b0a + status: + code: 200 + message: OK +version: 1 diff --git a/tests/conftest.py b/tests/conftest.py index 518c69a81..c0db6316c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,6 +8,7 @@ from dotenv import load_dotenv load_result = load_dotenv(override=True) + @pytest.fixture(autouse=True) def setup_test_environment(): """Set up test environment with a temporary directory for SQLite storage.""" @@ -15,11 +16,13 @@ def setup_test_environment(): # Create the directory with proper permissions storage_dir = Path(temp_dir) / "crewai_test_storage" storage_dir.mkdir(parents=True, exist_ok=True) - + # Validate that the directory was created successfully if not storage_dir.exists() or not storage_dir.is_dir(): - raise RuntimeError(f"Failed to create test storage directory: {storage_dir}") - + raise RuntimeError( + f"Failed to create test storage directory: {storage_dir}" + ) + # Verify directory permissions try: # Try to create a test file to verify write permissions @@ -27,11 +30,20 @@ def setup_test_environment(): test_file.touch() test_file.unlink() except (OSError, IOError) as e: - raise RuntimeError(f"Test storage directory {storage_dir} is not writable: {e}") - + raise RuntimeError( + f"Test storage directory {storage_dir} is not writable: {e}" + ) + # Set environment variable to point to the test storage directory os.environ["CREWAI_STORAGE_DIR"] = str(storage_dir) - + yield - + # Cleanup is handled automatically when tempfile context exits + + +@pytest.fixture(scope="module") +def vcr_config(request) -> dict: + return { + "cassette_library_dir": "tests/cassettes", + } diff --git a/tests/crew_test.py b/tests/crew_test.py index fd0d9f3d4..cb0151f2d 100644 --- a/tests/crew_test.py +++ b/tests/crew_test.py @@ -42,11 +42,6 @@ from crewai.utilities.events.event_listener import EventListener from crewai.utilities.rpm_controller import RPMController from crewai.utilities.task_output_storage_handler import TaskOutputStorageHandler -# Skip streaming tests when running in CI/CD environments -skip_streaming_in_ci = pytest.mark.skipif( - os.getenv("CI") is not None, reason="Skipping streaming tests in CI/CD environments" -) - ceo = Agent( role="CEO", goal="Make sure the writers in your company produce amazing content.", @@ -963,7 +958,6 @@ def test_api_calls_throttling(capsys): moveon.assert_called() -@skip_streaming_in_ci @pytest.mark.vcr(filter_headers=["authorization"]) def test_crew_kickoff_usage_metrics(): inputs = [ @@ -999,7 +993,6 @@ def test_crew_kickoff_usage_metrics(): assert result.token_usage.cached_prompt_tokens == 0 -@skip_streaming_in_ci @pytest.mark.vcr(filter_headers=["authorization"]) def test_crew_kickoff_streaming_usage_metrics(): inputs = [ @@ -4256,53 +4249,93 @@ def test_crew_kickoff_for_each_works_with_manager_agent_copy(): assert crew_copy.manager_agent.role == crew.manager_agent.role assert crew_copy.manager_agent.goal == crew.manager_agent.goal + def test_crew_copy_with_memory(): """Test that copying a crew with memory enabled does not raise validation errors and copies memory correctly.""" agent = Agent(role="Test Agent", goal="Test Goal", backstory="Test Backstory") task = Task(description="Test Task", expected_output="Test Output", agent=agent) crew = Crew(agents=[agent], tasks=[task], memory=True) - original_short_term_id = id(crew._short_term_memory) if crew._short_term_memory else None - original_long_term_id = id(crew._long_term_memory) if crew._long_term_memory else None + original_short_term_id = ( + id(crew._short_term_memory) if crew._short_term_memory else None + ) + original_long_term_id = ( + id(crew._long_term_memory) if crew._long_term_memory else None + ) original_entity_id = id(crew._entity_memory) if crew._entity_memory else None original_external_id = id(crew._external_memory) if crew._external_memory else None original_user_id = id(crew._user_memory) if crew._user_memory else None - try: crew_copy = crew.copy() - assert hasattr(crew_copy, "_short_term_memory"), "Copied crew should have _short_term_memory" - assert crew_copy._short_term_memory is not None, "Copied _short_term_memory should not be None" - assert id(crew_copy._short_term_memory) != original_short_term_id, "Copied _short_term_memory should be a new object" + assert hasattr( + crew_copy, "_short_term_memory" + ), "Copied crew should have _short_term_memory" + assert ( + crew_copy._short_term_memory is not None + ), "Copied _short_term_memory should not be None" + assert ( + id(crew_copy._short_term_memory) != original_short_term_id + ), "Copied _short_term_memory should be a new object" - assert hasattr(crew_copy, "_long_term_memory"), "Copied crew should have _long_term_memory" - assert crew_copy._long_term_memory is not None, "Copied _long_term_memory should not be None" - assert id(crew_copy._long_term_memory) != original_long_term_id, "Copied _long_term_memory should be a new object" + assert hasattr( + crew_copy, "_long_term_memory" + ), "Copied crew should have _long_term_memory" + assert ( + crew_copy._long_term_memory is not None + ), "Copied _long_term_memory should not be None" + assert ( + id(crew_copy._long_term_memory) != original_long_term_id + ), "Copied _long_term_memory should be a new object" - assert hasattr(crew_copy, "_entity_memory"), "Copied crew should have _entity_memory" - assert crew_copy._entity_memory is not None, "Copied _entity_memory should not be None" - assert id(crew_copy._entity_memory) != original_entity_id, "Copied _entity_memory should be a new object" + assert hasattr( + crew_copy, "_entity_memory" + ), "Copied crew should have _entity_memory" + assert ( + crew_copy._entity_memory is not None + ), "Copied _entity_memory should not be None" + assert ( + id(crew_copy._entity_memory) != original_entity_id + ), "Copied _entity_memory should be a new object" if original_external_id: - assert hasattr(crew_copy, "_external_memory"), "Copied crew should have _external_memory" - assert crew_copy._external_memory is not None, "Copied _external_memory should not be None" - assert id(crew_copy._external_memory) != original_external_id, "Copied _external_memory should be a new object" + assert hasattr( + crew_copy, "_external_memory" + ), "Copied crew should have _external_memory" + assert ( + crew_copy._external_memory is not None + ), "Copied _external_memory should not be None" + assert ( + id(crew_copy._external_memory) != original_external_id + ), "Copied _external_memory should be a new object" else: - assert not hasattr(crew_copy, "_external_memory") or crew_copy._external_memory is None, "Copied _external_memory should be None if not originally present" + assert ( + not hasattr(crew_copy, "_external_memory") + or crew_copy._external_memory is None + ), "Copied _external_memory should be None if not originally present" if original_user_id: - assert hasattr(crew_copy, "_user_memory"), "Copied crew should have _user_memory" - assert crew_copy._user_memory is not None, "Copied _user_memory should not be None" - assert id(crew_copy._user_memory) != original_user_id, "Copied _user_memory should be a new object" + assert hasattr( + crew_copy, "_user_memory" + ), "Copied crew should have _user_memory" + assert ( + crew_copy._user_memory is not None + ), "Copied _user_memory should not be None" + assert ( + id(crew_copy._user_memory) != original_user_id + ), "Copied _user_memory should be a new object" else: - assert not hasattr(crew_copy, "_user_memory") or crew_copy._user_memory is None, "Copied _user_memory should be None if not originally present" - + assert ( + not hasattr(crew_copy, "_user_memory") or crew_copy._user_memory is None + ), "Copied _user_memory should be None if not originally present" except pydantic_core.ValidationError as e: - if "Input should be an instance of" in str(e) and ("Memory" in str(e)): - pytest.fail(f"Copying with memory raised Pydantic ValidationError, likely due to incorrect memory copy: {e}") - else: - raise e # Re-raise other validation errors + if "Input should be an instance of" in str(e) and ("Memory" in str(e)): + pytest.fail( + f"Copying with memory raised Pydantic ValidationError, likely due to incorrect memory copy: {e}" + ) + else: + raise e # Re-raise other validation errors except Exception as e: pytest.fail(f"Copying crew raised an unexpected exception: {e}") diff --git a/tests/llm_test.py b/tests/llm_test.py index 65cc75bab..2bd03306b 100644 --- a/tests/llm_test.py +++ b/tests/llm_test.py @@ -2,13 +2,16 @@ import os from time import sleep from unittest.mock import MagicMock, patch +import litellm import pytest from pydantic import BaseModel from crewai.agents.agent_builder.utilities.base_token_process import TokenProcess from crewai.llm import CONTEXT_WINDOW_USAGE_RATIO, LLM -from crewai.utilities.events import crewai_event_bus -from crewai.utilities.events.tool_usage_events import ToolExecutionErrorEvent +from crewai.utilities.events import ( + LLMCallCompletedEvent, + LLMStreamChunkEvent, +) from crewai.utilities.token_counter_callback import TokenCalcHandler @@ -304,6 +307,27 @@ def test_context_window_validation(): assert "must be between 1024 and 2097152" in str(excinfo.value) +@pytest.fixture +def get_weather_tool_schema(): + return { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + } + }, + "required": ["location"], + }, + }, + } + + @pytest.mark.vcr(filter_headers=["authorization"]) @pytest.fixture def anthropic_llm(): @@ -395,3 +419,117 @@ def test_deepseek_r1_with_open_router(): result = llm.call("What is the capital of France?") assert isinstance(result, str) assert "Paris" in result + + +def assert_event_count( + mock_emit, + expected_completed_tool_call: int = 0, + expected_stream_chunk: int = 0, + expected_completed_llm_call: int = 0, + expected_final_chunk_result: str = "", +): + event_count = { + "completed_tool_call": 0, + "stream_chunk": 0, + "completed_llm_call": 0, + } + final_chunk_result = "" + for _call in mock_emit.call_args_list: + event = _call[1]["event"] + + if ( + isinstance(event, LLMCallCompletedEvent) + and event.call_type.value == "tool_call" + ): + event_count["completed_tool_call"] += 1 + elif isinstance(event, LLMStreamChunkEvent): + event_count["stream_chunk"] += 1 + final_chunk_result += event.chunk + elif ( + isinstance(event, LLMCallCompletedEvent) + and event.call_type.value == "llm_call" + ): + event_count["completed_llm_call"] += 1 + else: + continue + + assert event_count["completed_tool_call"] == expected_completed_tool_call + assert event_count["stream_chunk"] == expected_stream_chunk + assert event_count["completed_llm_call"] == expected_completed_llm_call + assert final_chunk_result == expected_final_chunk_result + + +@pytest.fixture +def mock_emit() -> MagicMock: + from crewai.utilities.events.crewai_event_bus import CrewAIEventsBus + + with patch.object(CrewAIEventsBus, "emit") as mock_emit: + yield mock_emit + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_handle_streaming_tool_calls(get_weather_tool_schema, mock_emit): + llm = LLM(model="openai/gpt-4o", stream=True) + response = llm.call( + messages=[ + {"role": "user", "content": "What is the weather in New York?"}, + ], + tools=[get_weather_tool_schema], + available_functions={ + "get_weather": lambda location: f"The weather in {location} is sunny" + }, + ) + assert response == "The weather in New York, NY is sunny" + + expected_final_chunk_result = ( + '{"location":"New York, NY"}The weather in New York, NY is sunny' + ) + assert_event_count( + mock_emit=mock_emit, + expected_completed_tool_call=1, + expected_stream_chunk=10, + expected_completed_llm_call=1, + expected_final_chunk_result=expected_final_chunk_result, + ) + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_handle_streaming_tool_calls_no_available_functions( + get_weather_tool_schema, mock_emit +): + llm = LLM(model="openai/gpt-4o", stream=True) + response = llm.call( + messages=[ + {"role": "user", "content": "What is the weather in New York?"}, + ], + tools=[get_weather_tool_schema], + ) + assert response == "" + + assert_event_count( + mock_emit=mock_emit, + expected_stream_chunk=9, + expected_completed_llm_call=1, + expected_final_chunk_result='{"location":"New York, NY"}', + ) + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_handle_streaming_tool_calls_no_tools(mock_emit): + llm = LLM(model="openai/gpt-4o", stream=True) + response = llm.call( + messages=[ + {"role": "user", "content": "What is the weather in New York?"}, + ], + ) + assert ( + response + == "I'm unable to provide real-time information or current weather updates. For the latest weather information in New York, I recommend checking a reliable weather website or app, such as the National Weather Service, Weather.com, or a similar service." + ) + + assert_event_count( + mock_emit=mock_emit, + expected_stream_chunk=46, + expected_completed_llm_call=1, + expected_final_chunk_result=response, + ) diff --git a/tests/memory/cassettes/test_save_and_search_with_provider.yaml b/tests/memory/cassettes/test_save_and_search_with_provider.yaml deleted file mode 100644 index c30f3f065..000000000 --- a/tests/memory/cassettes/test_save_and_search_with_provider.yaml +++ /dev/null @@ -1,270 +0,0 @@ -interactions: -- request: - body: '' - headers: - accept: - - '*/*' - accept-encoding: - - gzip, deflate - connection: - - keep-alive - host: - - api.mem0.ai - user-agent: - - python-httpx/0.27.0 - method: GET - uri: https://api.mem0.ai/v1/memories/?user_id=test - response: - body: - string: '[]' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 8b477138bad847b9-BOM - Connection: - - keep-alive - Content-Length: - - '2' - Content-Type: - - application/json - Date: - - Sat, 17 Aug 2024 06:00:11 GMT - NEL: - - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' - Report-To: - - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=uuyH2foMJVDpV%2FH52g1q%2FnvXKe3dBKVzvsK0mqmSNezkiszNR9OgrEJfVqmkX%2FlPFRP2sH4zrOuzGo6k%2FjzsjYJczqSWJUZHN2pPujiwnr1E9W%2BdLGKmG6%2FqPrGYAy2SBRWkkJVWsTO3OQ%3D%3D"}],"group":"cf-nel","max_age":604800}' - Server: - - cloudflare - allow: - - GET, POST, DELETE, OPTIONS - alt-svc: - - h3=":443"; ma=86400 - cross-origin-opener-policy: - - same-origin - referrer-policy: - - same-origin - vary: - - Accept, origin, Cookie - x-content-type-options: - - nosniff - x-frame-options: - - DENY - status: - code: 200 - message: OK -- request: - body: '{"batch": [{"properties": {"python_version": "3.12.4 (v3.12.4:8e8a4baf65, - Jun 6 2024, 17:33:18) [Clang 13.0.0 (clang-1300.0.29.30)]", "os": "darwin", - "os_version": "Darwin Kernel Version 23.4.0: Wed Feb 21 21:44:54 PST 2024; root:xnu-10063.101.15~2/RELEASE_ARM64_T6030", - "os_release": "23.4.0", "processor": "arm", "machine": "arm64", "function": - "mem0.client.main.MemoryClient", "$lib": "posthog-python", "$lib_version": "3.5.0", - "$geoip_disable": true}, "timestamp": "2024-08-17T06:00:11.526640+00:00", "context": - {}, "distinct_id": "fd411bd3-99a2-42d6-acd7-9fca8ad09580", "event": "client.init"}], - "historical_migration": false, "sentAt": "2024-08-17T06:00:11.701621+00:00", - "api_key": "phc_hgJkUVJFYtmaJqrvf6CYN67TIQ8yhXAkWzUn9AMU4yX"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '740' - Content-Type: - - application/json - User-Agent: - - posthog-python/3.5.0 - method: POST - uri: https://us.i.posthog.com/batch/ - response: - body: - string: '{"status":"Ok"}' - headers: - Connection: - - keep-alive - Content-Length: - - '15' - Content-Type: - - application/json - Date: - - Sat, 17 Aug 2024 06:00:12 GMT - access-control-allow-credentials: - - 'true' - server: - - envoy - vary: - - origin, access-control-request-method, access-control-request-headers - x-envoy-upstream-service-time: - - '69' - status: - code: 200 - message: OK -- request: - body: '{"messages": [{"role": "user", "content": "Remember the following insights - from Agent run: test value with provider"}], "metadata": {"task": "test_task_provider", - "agent": "test_agent_provider"}, "app_id": "Researcher"}' - headers: - accept: - - '*/*' - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '219' - content-type: - - application/json - host: - - api.mem0.ai - user-agent: - - python-httpx/0.27.0 - method: POST - uri: https://api.mem0.ai/v1/memories/ - response: - body: - string: '{"message":"ok"}' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 8b477140282547b9-BOM - Connection: - - keep-alive - Content-Length: - - '16' - Content-Type: - - application/json - Date: - - Sat, 17 Aug 2024 06:00:13 GMT - NEL: - - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' - Report-To: - - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=FRjJKSk3YxVj03wA7S05H8ts35KnWfqS3wb6Rfy4kVZ4BgXfw7nJbm92wI6vEv5fWcAcHVnOlkJDggs11B01BMuB2k3a9RqlBi0dJNiMuk%2Bgm5xE%2BODMPWJctYNRwQMjNVbteUpS%2Fad8YA%3D%3D"}],"group":"cf-nel","max_age":604800}' - Server: - - cloudflare - allow: - - GET, POST, DELETE, OPTIONS - alt-svc: - - h3=":443"; ma=86400 - cross-origin-opener-policy: - - same-origin - referrer-policy: - - same-origin - vary: - - Accept, origin, Cookie - x-content-type-options: - - nosniff - x-frame-options: - - DENY - status: - code: 200 - message: OK -- request: - body: '{"query": "test value with provider", "limit": 3, "app_id": "Researcher"}' - headers: - accept: - - '*/*' - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '73' - content-type: - - application/json - host: - - api.mem0.ai - user-agent: - - python-httpx/0.27.0 - method: POST - uri: https://api.mem0.ai/v1/memories/search/ - response: - body: - string: '[]' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 8b47714d083b47b9-BOM - Connection: - - keep-alive - Content-Length: - - '2' - Content-Type: - - application/json - Date: - - Sat, 17 Aug 2024 06:00:14 GMT - NEL: - - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' - Report-To: - - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=2DRWL1cdKdMvnE8vx1fPUGeTITOgSGl3N5g84PS6w30GRqpfz79BtSx6REhpnOiFV8kM6KGqln0iCZ5yoHc2jBVVJXhPJhQ5t0uerD9JFnkphjISrJOU1MJjZWneT9PlNABddxvVNCmluA%3D%3D"}],"group":"cf-nel","max_age":604800}' - Server: - - cloudflare - allow: - - POST, OPTIONS - alt-svc: - - h3=":443"; ma=86400 - cross-origin-opener-policy: - - same-origin - referrer-policy: - - same-origin - vary: - - Accept, origin, Cookie - x-content-type-options: - - nosniff - x-frame-options: - - DENY - status: - code: 200 - message: OK -- request: - body: '{"batch": [{"properties": {"python_version": "3.12.4 (v3.12.4:8e8a4baf65, - Jun 6 2024, 17:33:18) [Clang 13.0.0 (clang-1300.0.29.30)]", "os": "darwin", - "os_version": "Darwin Kernel Version 23.4.0: Wed Feb 21 21:44:54 PST 2024; root:xnu-10063.101.15~2/RELEASE_ARM64_T6030", - "os_release": "23.4.0", "processor": "arm", "machine": "arm64", "function": - "mem0.client.main.MemoryClient", "$lib": "posthog-python", "$lib_version": "3.5.0", - "$geoip_disable": true}, "timestamp": "2024-08-17T06:00:13.593952+00:00", "context": - {}, "distinct_id": "fd411bd3-99a2-42d6-acd7-9fca8ad09580", "event": "client.add"}], - "historical_migration": false, "sentAt": "2024-08-17T06:00:13.858277+00:00", - "api_key": "phc_hgJkUVJFYtmaJqrvf6CYN67TIQ8yhXAkWzUn9AMU4yX"}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '739' - Content-Type: - - application/json - User-Agent: - - posthog-python/3.5.0 - method: POST - uri: https://us.i.posthog.com/batch/ - response: - body: - string: '{"status":"Ok"}' - headers: - Connection: - - keep-alive - Content-Length: - - '15' - Content-Type: - - application/json - Date: - - Sat, 17 Aug 2024 06:00:13 GMT - access-control-allow-credentials: - - 'true' - server: - - envoy - vary: - - origin, access-control-request-method, access-control-request-headers - x-envoy-upstream-service-time: - - '33' - status: - code: 200 - message: OK -version: 1 diff --git a/tests/memory/external/external_memory_test.py b/tests/memory/external_memory_test.py similarity index 100% rename from tests/memory/external/external_memory_test.py rename to tests/memory/external_memory_test.py diff --git a/tests/tools/agent_tools/agent_tools_test.py b/tests/tools/agent_tools/agent_tools_test.py index 9aea7b4bc..6cb5d26e7 100644 --- a/tests/tools/agent_tools/agent_tools_test.py +++ b/tests/tools/agent_tools/agent_tools_test.py @@ -16,6 +16,13 @@ delegate_tool = tools[0] ask_tool = tools[1] +@pytest.fixture(scope="module") +def vcr_config(request) -> dict: + return { + "cassette_library_dir": "tests/tools/agent_tools/cassettes", + } + + @pytest.mark.vcr(filter_headers=["authorization"]) def test_delegate_work(): result = delegate_tool.run( diff --git a/tests/utilities/test_converter.py b/tests/utilities/test_converter.py index 3f4a4d07b..70d1ce718 100644 --- a/tests/utilities/test_converter.py +++ b/tests/utilities/test_converter.py @@ -21,6 +21,13 @@ from crewai.utilities.converter import ( from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser +@pytest.fixture(scope="module") +def vcr_config(request) -> dict: + return { + "cassette_library_dir": "tests/utilities/cassettes", + } + + # Sample Pydantic models for testing class EmailResponse(BaseModel): previous_message_content: str diff --git a/tests/utilities/test_events.py b/tests/utilities/test_events.py index 2a6e6c41c..150a383cb 100644 --- a/tests/utilities/test_events.py +++ b/tests/utilities/test_events.py @@ -50,10 +50,13 @@ from crewai.utilities.events.tool_usage_events import ( ToolUsageErrorEvent, ) -# Skip streaming tests when running in CI/CD environments -skip_streaming_in_ci = pytest.mark.skipif( - os.getenv("CI") is not None, reason="Skipping streaming tests in CI/CD environments" -) + +@pytest.fixture(scope="module") +def vcr_config(request) -> dict: + return { + "cassette_library_dir": "tests/utilities/cassettes", + } + base_agent = Agent( role="base_agent", @@ -625,7 +628,6 @@ def test_llm_emits_call_failed_event(): assert received_events[0].error == error_message -@skip_streaming_in_ci @pytest.mark.vcr(filter_headers=["authorization"]) def test_llm_emits_stream_chunk_events(): """Test that LLM emits stream chunk events when streaming is enabled.""" @@ -650,7 +652,6 @@ def test_llm_emits_stream_chunk_events(): assert "".join(received_chunks) == response -@skip_streaming_in_ci @pytest.mark.vcr(filter_headers=["authorization"]) def test_llm_no_stream_chunks_when_streaming_disabled(): """Test that LLM doesn't emit stream chunk events when streaming is disabled.""" diff --git a/uv.lock b/uv.lock index 21c8445f0..d9c9d5c66 100644 --- a/uv.lock +++ b/uv.lock @@ -1,42 +1,32 @@ version = 1 +revision = 1 requires-python = ">=3.10, <3.13" resolution-markers = [ - "python_full_version < '3.11' and platform_system == 'Darwin' and sys_platform == 'darwin'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'darwin'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version < '3.11' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')", - "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_system == 'Darwin' and sys_platform == 'linux'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'", - "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version < '3.11' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'", - "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version < '3.11' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and platform_system == 'Darwin' and sys_platform == 'darwin'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'darwin'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version == '3.11.*' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_system == 'Darwin' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version == '3.11.*' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'", - "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version == '3.11.*' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_system == 'Darwin' and sys_platform == 'darwin'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'darwin'", - "(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Darwin' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'", - "(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'", - "(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version >= '3.12.4' and platform_system == 'Darwin' and sys_platform == 'darwin'", - "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'darwin'", - "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform == 'darwin') or (python_full_version >= '3.12.4' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'darwin')", - "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Darwin' and sys_platform == 'linux'", - "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform == 'linux'", - "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform == 'linux'", - "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_system == 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_system == 'Darwin' and sys_platform != 'darwin' and sys_platform != 'linux')", - "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_system == 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux'", - "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_system != 'Darwin' and sys_platform != 'darwin') or (python_full_version >= '3.12.4' and platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.11' and platform_python_implementation == 'PyPy' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_python_implementation != 'PyPy' and sys_platform == 'darwin'", + "python_version < '0'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_python_implementation == 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_python_implementation != 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'darwin'", + "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'darwin'", + "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux') or (python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", ] [[package]] @@ -344,7 +334,7 @@ name = "build" version = "1.2.2.post1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "os_name == 'nt'" }, + { name = "colorama", marker = "(os_name == 'nt' and platform_machine != 'aarch64' and sys_platform == 'linux') or (os_name == 'nt' and sys_platform != 'darwin' and sys_platform != 'linux')" }, { name = "importlib-metadata", marker = "python_full_version < '3.10.2'" }, { name = "packaging" }, { name = "pyproject-hooks" }, @@ -579,7 +569,7 @@ name = "click" version = "8.1.8" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } wheels = [ @@ -703,8 +693,8 @@ dev = [ { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-asyncio" }, + { name = "pytest-recording" }, { name = "pytest-subprocess" }, - { name = "pytest-vcr" }, { name = "python-dotenv" }, { name = "ruff" }, ] @@ -745,6 +735,7 @@ requires-dist = [ { name = "tomli-w", specifier = ">=1.1.0" }, { name = "uv", specifier = ">=0.4.25" }, ] +provides-extras = ["tools", "embeddings", "agentops", "fastembed", "pdfplumber", "pandas", "openpyxl", "mem0", "docling", "aisuite"] [package.metadata.requires-dev] dev = [ @@ -759,8 +750,8 @@ dev = [ { name = "pre-commit", specifier = ">=3.6.0" }, { name = "pytest", specifier = ">=8.0.0" }, { name = "pytest-asyncio", specifier = ">=0.23.7" }, + { name = "pytest-recording", specifier = ">=0.13.2" }, { name = "pytest-subprocess", specifier = ">=1.5.2" }, - { name = "pytest-vcr", specifier = ">=1.0.2" }, { name = "python-dotenv", specifier = ">=1.0.0" }, { name = "ruff", specifier = ">=0.8.2" }, ] @@ -2518,7 +2509,7 @@ version = "1.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, - { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, { name = "ghp-import" }, { name = "jinja2" }, { name = "markdown" }, @@ -2699,7 +2690,7 @@ version = "2.10.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pygments" }, - { name = "pywin32", marker = "platform_system == 'Windows'" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "tqdm" }, ] sdist = { url = "https://files.pythonhosted.org/packages/3a/93/80ac75c20ce54c785648b4ed363c88f148bf22637e10c9863db4fbe73e74/mpire-2.10.2.tar.gz", hash = "sha256:f66a321e93fadff34585a4bfa05e95bd946cf714b442f51c529038eb45773d97", size = 271270 } @@ -2946,7 +2937,7 @@ name = "nvidia-cudnn-cu12" version = "9.1.0.70" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, + { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741 }, @@ -2973,9 +2964,9 @@ name = "nvidia-cusolver-cu12" version = "11.4.5.107" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, - { name = "nvidia-cusparse-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, - { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, + { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "nvidia-cusparse-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd", size = 124161928 }, @@ -2986,7 +2977,7 @@ name = "nvidia-cusparse-cu12" version = "12.1.0.106" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, + { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c", size = 195958278 }, @@ -2997,7 +2988,6 @@ name = "nvidia-nccl-cu12" version = "2.20.5" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/bb/d09dda47c881f9ff504afd6f9ca4f502ded6d8fc2f572cacc5e39da91c28/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01", size = 176238458 }, { url = "https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56", size = 176249402 }, ] @@ -3007,7 +2997,6 @@ version = "12.6.85" source = { registry = "https://pypi.org/simple" } wheels = [ { url = "https://files.pythonhosted.org/packages/9d/d7/c5383e47c7e9bf1c99d5bd2a8c935af2b6d705ad831a7ec5c97db4d82f4f/nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:eedc36df9e88b682efe4309aa16b5b4e78c2407eac59e8c10a6a47535164369a", size = 19744971 }, - { url = "https://files.pythonhosted.org/packages/31/db/dc71113d441f208cdfe7ae10d4983884e13f464a6252450693365e166dcf/nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cf4eaa7d4b6b543ffd69d6abfb11efdeb2db48270d94dfd3a452c24150829e41", size = 19270338 }, ] [[package]] @@ -3525,7 +3514,7 @@ name = "portalocker" version = "2.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pywin32", marker = "platform_system == 'Windows'" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891 } wheels = [ @@ -4075,6 +4064,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/96/31/6607dab48616902f76885dfcf62c08d929796fc3b2d2318faf9fd54dbed9/pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b", size = 18024 }, ] +[[package]] +name = "pytest-recording" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "vcrpy", version = "5.1.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_python_implementation == 'PyPy'" }, + { name = "vcrpy", version = "7.0.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/2a/ea6b8036ae01979eae02d8ad5a7da14dec90d9176b613e49fb8d134c78fc/pytest_recording-0.13.2.tar.gz", hash = "sha256:000c3babbb466681457fd65b723427c1779a0c6c17d9e381c3142a701e124877", size = 25270 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/52/8e67a969e9fad3fa5ec4eab9f2a7348ff04692065c7deda21d76e9112703/pytest_recording-0.13.2-py3-none-any.whl", hash = "sha256:3820fe5743d1ac46e807989e11d073cb776a60bdc544cf43ebca454051b22d13", size = 12783 }, +] + [[package]] name = "pytest-subprocess" version = "1.5.2" @@ -4087,19 +4090,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/10/77/a80e8f9126b95ffd5ad4d04bd14005c68dcbf0d88f53b2b14893f6cc7232/pytest_subprocess-1.5.2-py3-none-any.whl", hash = "sha256:23ac7732aa8bd45f1757265b1316eb72a7f55b41fb21e2ca22e149ba3629fa46", size = 20886 }, ] -[[package]] -name = "pytest-vcr" -version = "1.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytest" }, - { name = "vcrpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1a/60/104c619483c1a42775d3f8b27293f1ecfc0728014874d065e68cb9702d49/pytest-vcr-1.0.2.tar.gz", hash = "sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896", size = 3810 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/d3/ff520d11e6ee400602711d1ece8168dcfc5b6d8146fb7db4244a6ad6a9c3/pytest_vcr-1.0.2-py2.py3-none-any.whl", hash = "sha256:2f316e0539399bea0296e8b8401145c62b6f85e9066af7e57b6151481b0d6d9c", size = 4137 }, -] - [[package]] name = "python-bidi" version = "0.6.3" @@ -5032,19 +5022,19 @@ dependencies = [ { name = "fsspec" }, { name = "jinja2" }, { name = "networkx" }, - { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, - { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "sympy" }, - { name = "triton", marker = "platform_machine == 'x86_64' and platform_system == 'Linux'" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "typing-extensions" }, ] wheels = [ @@ -5091,7 +5081,7 @@ name = "tqdm" version = "4.66.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/58/83/6ba9844a41128c62e810fddddd72473201f3eacde02046066142a2d96cc5/tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad", size = 169504 } wheels = [ @@ -5133,7 +5123,7 @@ name = "triton" version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "filelock", marker = "(platform_machine != 'aarch64' and platform_system != 'Darwin') or (platform_system != 'Darwin' and platform_system != 'Linux' and sys_platform != 'linux')" }, + { name = "filelock", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/45/27/14cc3101409b9b4b9241d2ba7deaa93535a217a211c86c4cc7151fb12181/triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a", size = 209376304 }, @@ -5288,16 +5278,59 @@ wheels = [ name = "vcrpy" version = "5.1.0" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and platform_python_implementation == 'PyPy' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_python_implementation == 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'darwin'", + "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation == 'PyPy' and sys_platform == 'linux') or (python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", +] dependencies = [ - { name = "pyyaml" }, - { name = "wrapt" }, - { name = "yarl" }, + { name = "pyyaml", marker = "platform_python_implementation == 'PyPy'" }, + { name = "wrapt", marker = "platform_python_implementation == 'PyPy'" }, + { name = "yarl", marker = "platform_python_implementation == 'PyPy'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a5/ea/a166a3cce4ac5958ba9bbd9768acdb1ba38ae17ff7986da09fa5b9dbc633/vcrpy-5.1.0.tar.gz", hash = "sha256:bbf1532f2618a04f11bce2a99af3a9647a32c880957293ff91e0a5f187b6b3d2", size = 84576 } wheels = [ { url = "https://files.pythonhosted.org/packages/2a/5b/3f70bcb279ad30026cc4f1df0a0491a0205a24dddd88301f396c485de9e7/vcrpy-5.1.0-py2.py3-none-any.whl", hash = "sha256:605e7b7a63dcd940db1df3ab2697ca7faf0e835c0852882142bafb19649d599e", size = 41969 }, ] +[[package]] +name = "vcrpy" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.11' and platform_python_implementation != 'PyPy' and sys_platform == 'darwin'", + "python_full_version < '3.11' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "(python_full_version < '3.11' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (python_full_version < '3.11' and platform_python_implementation != 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "(python_full_version == '3.11.*' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "(python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'darwin'", + "python_full_version >= '3.12.4' and platform_machine == 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "(python_full_version >= '3.12.4' and platform_machine != 'aarch64' and platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'darwin' and sys_platform != 'linux')", +] +dependencies = [ + { name = "pyyaml", marker = "platform_python_implementation != 'PyPy'" }, + { name = "urllib3", marker = "platform_python_implementation != 'PyPy'" }, + { name = "wrapt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "yarl", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/d3/856e06184d4572aada1dd559ddec3bedc46df1f2edc5ab2c91121a2cccdb/vcrpy-7.0.0.tar.gz", hash = "sha256:176391ad0425edde1680c5b20738ea3dc7fb942520a48d2993448050986b3a50", size = 85502 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/5d/1f15b252890c968d42b348d1e9b0aa12d5bf3e776704178ec37cceccdb63/vcrpy-7.0.0-py2.py3-none-any.whl", hash = "sha256:55791e26c18daa363435054d8b35bd41a4ac441b6676167635d1b37a71dbe124", size = 42321 }, +] + [[package]] name = "virtualenv" version = "20.27.0"