From 40a2d387a1ae6f34ecc6c57e8090d09d3405cd90 Mon Sep 17 00:00:00 2001 From: Greyson LaLonde Date: Thu, 6 Nov 2025 21:10:25 -0500 Subject: [PATCH] fix: keep stopwords updated --- lib/crewai/src/crewai/llms/hooks/transport.py | 11 +- .../llms/providers/anthropic/completion.py | 24 +++ .../llms/providers/bedrock/completion.py | 24 +++ .../llms/providers/gemini/completion.py | 24 +++ ..._anthropic_stop_sequences_sent_to_api.yaml | 202 ++++++++++++++++++ .../tests/llms/anthropic/test_anthropic.py | 34 +++ lib/crewai/tests/llms/bedrock/test_bedrock.py | 53 +++++ lib/crewai/tests/llms/google/test_google.py | 52 +++++ 8 files changed, 418 insertions(+), 6 deletions(-) create mode 100644 lib/crewai/tests/cassettes/test_anthropic_stop_sequences_sent_to_api.yaml diff --git a/lib/crewai/src/crewai/llms/hooks/transport.py b/lib/crewai/src/crewai/llms/hooks/transport.py index ee3f9224c..27a0972ab 100644 --- a/lib/crewai/src/crewai/llms/hooks/transport.py +++ b/lib/crewai/src/crewai/llms/hooks/transport.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: from crewai.llms.hooks.base import BaseInterceptor -class HTTPTransportKwargs(TypedDict): +class HTTPTransportKwargs(TypedDict, total=False): """Typed dictionary for httpx.HTTPTransport initialization parameters. These parameters configure the underlying HTTP transport behavior including @@ -33,14 +33,14 @@ class HTTPTransportKwargs(TypedDict): """ verify: bool | str | SSLContext - cert: NotRequired[CertTypes | None] + cert: NotRequired[CertTypes] trust_env: bool http1: bool http2: bool limits: Limits - proxy: NotRequired[ProxyTypes | None] - uds: NotRequired[str | None] - local_address: NotRequired[str | None] + proxy: NotRequired[ProxyTypes] + uds: NotRequired[str] + local_address: NotRequired[str] retries: int socket_options: NotRequired[ Iterable[ @@ -48,7 +48,6 @@ class HTTPTransportKwargs(TypedDict): | tuple[int, int, bytes | bytearray] | tuple[int, int, None, int] ] - | None ] diff --git a/lib/crewai/src/crewai/llms/providers/anthropic/completion.py b/lib/crewai/src/crewai/llms/providers/anthropic/completion.py index 50298eb77..ea161fc63 100644 --- a/lib/crewai/src/crewai/llms/providers/anthropic/completion.py +++ b/lib/crewai/src/crewai/llms/providers/anthropic/completion.py @@ -94,6 +94,30 @@ class AnthropicCompletion(BaseLLM): self.is_claude_3 = "claude-3" in model.lower() self.supports_tools = self.is_claude_3 # Claude 3+ supports tool use + @property + def stop(self) -> list[str]: + """Get stop sequences sent to the API.""" + return self.stop_sequences + + @stop.setter + def stop(self, value: list[str] | str | None) -> None: + """Set stop sequences. + + Synchronizes stop_sequences to ensure values set by CrewAgentExecutor + are properly sent to the Anthropic API. + + Args: + value: Stop sequences as a list, single string, or None + """ + if value is None: + self.stop_sequences = [] + elif isinstance(value, str): + self.stop_sequences = [value] + elif isinstance(value, list): + self.stop_sequences = value + else: + self.stop_sequences = [] + def _get_client_params(self) -> dict[str, Any]: """Get client parameters.""" diff --git a/lib/crewai/src/crewai/llms/providers/bedrock/completion.py b/lib/crewai/src/crewai/llms/providers/bedrock/completion.py index ff0808937..20eabf763 100644 --- a/lib/crewai/src/crewai/llms/providers/bedrock/completion.py +++ b/lib/crewai/src/crewai/llms/providers/bedrock/completion.py @@ -243,6 +243,30 @@ class BedrockCompletion(BaseLLM): # Handle inference profiles for newer models self.model_id = model + @property + def stop(self) -> list[str]: + """Get stop sequences sent to the API.""" + return list(self.stop_sequences) + + @stop.setter + def stop(self, value: Sequence[str] | str | None) -> None: + """Set stop sequences. + + Synchronizes stop_sequences to ensure values set by CrewAgentExecutor + are properly sent to the Bedrock API. + + Args: + value: Stop sequences as a Sequence, single string, or None + """ + if value is None: + self.stop_sequences = [] + elif isinstance(value, str): + self.stop_sequences = [value] + elif isinstance(value, Sequence): + self.stop_sequences = list(value) + else: + self.stop_sequences = [] + def call( self, messages: str | list[LLMMessage], diff --git a/lib/crewai/src/crewai/llms/providers/gemini/completion.py b/lib/crewai/src/crewai/llms/providers/gemini/completion.py index 45b603c19..8668a8f58 100644 --- a/lib/crewai/src/crewai/llms/providers/gemini/completion.py +++ b/lib/crewai/src/crewai/llms/providers/gemini/completion.py @@ -104,6 +104,30 @@ class GeminiCompletion(BaseLLM): self.is_gemini_1_5 = "gemini-1.5" in model.lower() self.supports_tools = self.is_gemini_1_5 or self.is_gemini_2 + @property + def stop(self) -> list[str]: + """Get stop sequences sent to the API.""" + return self.stop_sequences + + @stop.setter + def stop(self, value: list[str] | str | None) -> None: + """Set stop sequences. + + Synchronizes stop_sequences to ensure values set by CrewAgentExecutor + are properly sent to the Gemini API. + + Args: + value: Stop sequences as a list, single string, or None + """ + if value is None: + self.stop_sequences = [] + elif isinstance(value, str): + self.stop_sequences = [value] + elif isinstance(value, list): + self.stop_sequences = value + else: + self.stop_sequences = [] + def _initialize_client(self, use_vertexai: bool = False) -> genai.Client: # type: ignore[no-any-unimported] """Initialize the Google Gen AI client with proper parameter handling. diff --git a/lib/crewai/tests/cassettes/test_anthropic_stop_sequences_sent_to_api.yaml b/lib/crewai/tests/cassettes/test_anthropic_stop_sequences_sent_to_api.yaml new file mode 100644 index 000000000..8759062f9 --- /dev/null +++ b/lib/crewai/tests/cassettes/test_anthropic_stop_sequences_sent_to_api.yaml @@ -0,0 +1,202 @@ +interactions: +- request: + body: '{"trace_id": "1703c4e0-d3be-411c-85e7-48018c2df384", "execution_type": + "crew", "user_identifier": null, "execution_context": {"crew_fingerprint": null, + "crew_name": "Unknown Crew", "flow_name": null, "crewai_version": "1.3.0", "privacy_level": + "standard"}, "execution_metadata": {"expected_duration_estimate": 300, "agent_count": + 0, "task_count": 0, "flow_method_count": 0, "execution_started_at": "2025-11-07T01:58:22.260309+00:00"}}' + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate, zstd + Connection: + - keep-alive + Content-Length: + - '434' + Content-Type: + - application/json + User-Agent: + - CrewAI-CLI/1.3.0 + X-Crewai-Version: + - 1.3.0 + method: POST + uri: https://app.crewai.com/crewai_plus/api/v1/tracing/batches + response: + body: + string: '{"error":"bad_credentials","message":"Bad credentials"}' + headers: + Connection: + - keep-alive + Content-Length: + - '55' + Content-Type: + - application/json; charset=utf-8 + Date: + - Fri, 07 Nov 2025 01:58:22 GMT + cache-control: + - no-store + content-security-policy: + - 'default-src ''self'' *.app.crewai.com app.crewai.com; script-src ''self'' + ''unsafe-inline'' *.app.crewai.com app.crewai.com https://cdn.jsdelivr.net/npm/apexcharts + https://www.gstatic.com https://run.pstmn.io https://apis.google.com https://apis.google.com/js/api.js + https://accounts.google.com https://accounts.google.com/gsi/client https://cdnjs.cloudflare.com/ajax/libs/normalize/8.0.1/normalize.min.css.map + https://*.google.com https://docs.google.com https://slides.google.com https://js.hs-scripts.com + https://js.sentry-cdn.com https://browser.sentry-cdn.com https://www.googletagmanager.com + https://js-na1.hs-scripts.com https://js.hubspot.com http://js-na1.hs-scripts.com + https://bat.bing.com https://cdn.amplitude.com https://cdn.segment.com https://d1d3n03t5zntha.cloudfront.net/ + https://descriptusercontent.com https://edge.fullstory.com https://googleads.g.doubleclick.net + https://js.hs-analytics.net https://js.hs-banner.com https://js.hsadspixel.net + https://js.hscollectedforms.net https://js.usemessages.com https://snap.licdn.com + https://static.cloudflareinsights.com https://static.reo.dev https://www.google-analytics.com + https://share.descript.com/; style-src ''self'' ''unsafe-inline'' *.app.crewai.com + app.crewai.com https://cdn.jsdelivr.net/npm/apexcharts; img-src ''self'' data: + *.app.crewai.com app.crewai.com https://zeus.tools.crewai.com https://dashboard.tools.crewai.com + https://cdn.jsdelivr.net https://forms.hsforms.com https://track.hubspot.com + https://px.ads.linkedin.com https://px4.ads.linkedin.com https://www.google.com + https://www.google.com.br; font-src ''self'' data: *.app.crewai.com app.crewai.com; + connect-src ''self'' *.app.crewai.com app.crewai.com https://zeus.tools.crewai.com + https://connect.useparagon.com/ https://zeus.useparagon.com/* https://*.useparagon.com/* + https://run.pstmn.io https://connect.tools.crewai.com/ https://*.sentry.io + https://www.google-analytics.com https://edge.fullstory.com https://rs.fullstory.com + https://api.hubspot.com https://forms.hscollectedforms.net https://api.hubapi.com + https://px.ads.linkedin.com https://px4.ads.linkedin.com https://google.com/pagead/form-data/16713662509 + https://google.com/ccm/form-data/16713662509 https://www.google.com/ccm/collect + https://worker-actionkit.tools.crewai.com https://api.reo.dev; frame-src ''self'' + *.app.crewai.com app.crewai.com https://connect.useparagon.com/ https://zeus.tools.crewai.com + https://zeus.useparagon.com/* https://connect.tools.crewai.com/ https://docs.google.com + https://drive.google.com https://slides.google.com https://accounts.google.com + https://*.google.com https://app.hubspot.com/ https://td.doubleclick.net https://www.googletagmanager.com/ + https://www.youtube.com https://share.descript.com' + expires: + - '0' + permissions-policy: + - camera=(), microphone=(self), geolocation=() + pragma: + - no-cache + referrer-policy: + - strict-origin-when-cross-origin + strict-transport-security: + - max-age=63072000; includeSubDomains + vary: + - Accept + x-content-type-options: + - nosniff + x-frame-options: + - SAMEORIGIN + x-permitted-cross-domain-policies: + - none + x-request-id: + - 4124c4ce-02cf-4d08-9b0b-8983c2e9da6e + x-runtime: + - '0.073764' + x-xss-protection: + - 1; mode=block + status: + code: 401 + message: Unauthorized +- request: + body: '{"max_tokens":4096,"messages":[{"role":"user","content":"Say hello in one + word"}],"model":"claude-3-5-haiku-20241022","stop_sequences":["\nObservation:","\nThought:"],"stream":false}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate, zstd + anthropic-version: + - '2023-06-01' + connection: + - keep-alive + content-length: + - '182' + content-type: + - application/json + host: + - api.anthropic.com + user-agent: + - Anthropic/Python 0.71.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 0.71.0 + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.12.9 + x-stainless-timeout: + - NOT_GIVEN + method: POST + uri: https://api.anthropic.com/v1/messages + response: + body: + string: !!binary | + H4sIAAAAAAAAAwAAAP//dJBdS8QwEEX/y31Ope26u5J3dwWfBH0SCTEZtmHTpCYTXSn979LF4hc+ + DdxzZgbuiD5a8pAwXhdL1apaV512x1K1dXvZ1G0LAWch0eeDqpvN1f3q7bTPz91tc/ewLcfr/Xaz + gwC/DzRblLM+EARS9HOgc3aZdWAImBiYAkM+jovPdJrJeUjckPfxAtOTQOY4qEQ6xwAJClZxSQGf + INNLoWAIMhTvBcr5qRzhwlBYcTxSyJBNK2C06UiZRJpdDOqnUC88kbb/sWV3vk9DRz0l7dW6/+t/ + 0ab7TSeBWPh7tBbIlF6dIcWOEiTmoqxOFtP0AQAA//8DAM5WvkqaAQAA + headers: + CF-RAY: + - 99a939a5a931556e-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Fri, 07 Nov 2025 01:58:22 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + X-Robots-Tag: + - none + anthropic-organization-id: + - SCRUBBED-ORG-ID + anthropic-ratelimit-input-tokens-limit: + - '400000' + anthropic-ratelimit-input-tokens-remaining: + - '400000' + anthropic-ratelimit-input-tokens-reset: + - '2025-11-07T01:58:22Z' + anthropic-ratelimit-output-tokens-limit: + - '80000' + anthropic-ratelimit-output-tokens-remaining: + - '80000' + anthropic-ratelimit-output-tokens-reset: + - '2025-11-07T01:58:22Z' + anthropic-ratelimit-requests-limit: + - '4000' + anthropic-ratelimit-requests-remaining: + - '3999' + anthropic-ratelimit-requests-reset: + - '2025-11-07T01:58:22Z' + anthropic-ratelimit-tokens-limit: + - '480000' + anthropic-ratelimit-tokens-remaining: + - '480000' + anthropic-ratelimit-tokens-reset: + - '2025-11-07T01:58:22Z' + cf-cache-status: + - DYNAMIC + request-id: + - req_011CUshbL7CEVoner91hUvxL + retry-after: + - '41' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-envoy-upstream-service-time: + - '390' + status: + code: 200 + message: OK +version: 1 diff --git a/lib/crewai/tests/llms/anthropic/test_anthropic.py b/lib/crewai/tests/llms/anthropic/test_anthropic.py index 37ba366b9..6ba294d8b 100644 --- a/lib/crewai/tests/llms/anthropic/test_anthropic.py +++ b/lib/crewai/tests/llms/anthropic/test_anthropic.py @@ -664,3 +664,37 @@ def test_anthropic_token_usage_tracking(): assert usage["input_tokens"] == 50 assert usage["output_tokens"] == 25 assert usage["total_tokens"] == 75 + + +def test_anthropic_stop_sequences_sync(): + """Test that stop and stop_sequences attributes stay synchronized.""" + llm = LLM(model="anthropic/claude-3-5-sonnet-20241022") + + # Test setting stop as a list + llm.stop = ["\nObservation:", "\nThought:"] + assert llm.stop_sequences == ["\nObservation:", "\nThought:"] + assert llm.stop == ["\nObservation:", "\nThought:"] + + # Test setting stop as a string + llm.stop = "\nFinal Answer:" + assert llm.stop_sequences == ["\nFinal Answer:"] + assert llm.stop == ["\nFinal Answer:"] + + # Test setting stop as None + llm.stop = None + assert llm.stop_sequences == [] + assert llm.stop == [] + + +@pytest.mark.vcr(filter_headers=["authorization", "x-api-key"]) +def test_anthropic_stop_sequences_sent_to_api(): + """Test that stop_sequences are properly sent to the Anthropic API.""" + llm = LLM(model="anthropic/claude-3-5-haiku-20241022") + + llm.stop = ["\nObservation:", "\nThought:"] + + result = llm.call("Say hello in one word") + + assert result is not None + assert isinstance(result, str) + assert len(result) > 0 diff --git a/lib/crewai/tests/llms/bedrock/test_bedrock.py b/lib/crewai/tests/llms/bedrock/test_bedrock.py index 9fd172cc6..aecbdde0e 100644 --- a/lib/crewai/tests/llms/bedrock/test_bedrock.py +++ b/lib/crewai/tests/llms/bedrock/test_bedrock.py @@ -736,3 +736,56 @@ def test_bedrock_client_error_handling(): with pytest.raises(RuntimeError) as exc_info: llm.call("Hello") assert "throttled" in str(exc_info.value).lower() + + +def test_bedrock_stop_sequences_sync(): + """Test that stop and stop_sequences attributes stay synchronized.""" + llm = LLM(model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0") + + # Test setting stop as a list + llm.stop = ["\nObservation:", "\nThought:"] + assert list(llm.stop_sequences) == ["\nObservation:", "\nThought:"] + assert llm.stop == ["\nObservation:", "\nThought:"] + + # Test setting stop as a string + llm.stop = "\nFinal Answer:" + assert list(llm.stop_sequences) == ["\nFinal Answer:"] + assert llm.stop == ["\nFinal Answer:"] + + # Test setting stop as None + llm.stop = None + assert list(llm.stop_sequences) == [] + assert llm.stop == [] + + +def test_bedrock_stop_sequences_sent_to_api(): + """Test that stop_sequences are properly sent to the Bedrock API.""" + llm = LLM(model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0") + + # Set stop sequences via the stop attribute (simulating CrewAgentExecutor) + llm.stop = ["\nObservation:", "\nThought:"] + + # Patch the API call to capture parameters without making real call + with patch.object(llm.client, 'converse') as mock_converse: + mock_response = { + 'output': { + 'message': { + 'role': 'assistant', + 'content': [{'text': 'Hello'}] + } + }, + 'usage': { + 'inputTokens': 10, + 'outputTokens': 5, + 'totalTokens': 15 + } + } + mock_converse.return_value = mock_response + + llm.call("Say hello in one word") + + # Verify stop_sequences were passed to the API in the inference config + call_kwargs = mock_converse.call_args[1] + assert "inferenceConfig" in call_kwargs + assert "stopSequences" in call_kwargs["inferenceConfig"] + assert call_kwargs["inferenceConfig"]["stopSequences"] == ["\nObservation:", "\nThought:"] diff --git a/lib/crewai/tests/llms/google/test_google.py b/lib/crewai/tests/llms/google/test_google.py index fc3ff9099..f7b721d1d 100644 --- a/lib/crewai/tests/llms/google/test_google.py +++ b/lib/crewai/tests/llms/google/test_google.py @@ -648,3 +648,55 @@ def test_gemini_token_usage_tracking(): assert usage["candidates_token_count"] == 25 assert usage["total_token_count"] == 75 assert usage["total_tokens"] == 75 + + +def test_gemini_stop_sequences_sync(): + """Test that stop and stop_sequences attributes stay synchronized.""" + llm = LLM(model="google/gemini-2.0-flash-001") + + # Test setting stop as a list + llm.stop = ["\nObservation:", "\nThought:"] + assert llm.stop_sequences == ["\nObservation:", "\nThought:"] + assert llm.stop == ["\nObservation:", "\nThought:"] + + # Test setting stop as a string + llm.stop = "\nFinal Answer:" + assert llm.stop_sequences == ["\nFinal Answer:"] + assert llm.stop == ["\nFinal Answer:"] + + # Test setting stop as None + llm.stop = None + assert llm.stop_sequences == [] + assert llm.stop == [] + + +def test_gemini_stop_sequences_sent_to_api(): + """Test that stop_sequences are properly sent to the Gemini API.""" + llm = LLM(model="google/gemini-2.0-flash-001") + + # Set stop sequences via the stop attribute (simulating CrewAgentExecutor) + llm.stop = ["\nObservation:", "\nThought:"] + + # Patch the API call to capture parameters without making real call + with patch.object(llm.client.models, 'generate_content') as mock_generate: + mock_response = MagicMock() + mock_response.text = "Hello" + mock_response.candidates = [] + mock_response.usage_metadata = MagicMock( + prompt_token_count=10, + candidates_token_count=5, + total_token_count=15 + ) + mock_generate.return_value = mock_response + + llm.call("Say hello in one word") + + # Verify stop_sequences were passed to the API in the config + call_kwargs = mock_generate.call_args[1] + assert "config" in call_kwargs + # The config object should have stop_sequences set + config = call_kwargs["config"] + # Check if the config has stop_sequences attribute + assert hasattr(config, 'stop_sequences') or 'stop_sequences' in config.__dict__ + if hasattr(config, 'stop_sequences'): + assert config.stop_sequences == ["\nObservation:", "\nThought:"]