From e4b7ccf717cfbf2c1995c5a0731973689455a82a Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Mon, 8 Dec 2025 15:33:26 +0000 Subject: [PATCH] fix: add drop_params support to BedrockCompletion for unsupported parameters MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fixes GitHub issue #4046 where certain Bedrock models don't support the stopSequences field. The fix adds support for drop_params and additional_drop_params parameters in BedrockCompletion._get_inference_config() to allow users to drop unsupported parameters from the inference config. Example usage: llm = LLM( model="bedrock/openai.gpt-oss-safeguard-120b", drop_params=True, additional_drop_params=["stopSequences"] ) This follows the same pattern as the Azure provider implementation. Co-Authored-By: João --- .../llms/providers/bedrock/completion.py | 9 ++ lib/crewai/tests/llms/bedrock/test_bedrock.py | 151 ++++++++++++++++++ 2 files changed, 160 insertions(+) diff --git a/lib/crewai/src/crewai/llms/providers/bedrock/completion.py b/lib/crewai/src/crewai/llms/providers/bedrock/completion.py index 2057bd871..ed82ae6cd 100644 --- a/lib/crewai/src/crewai/llms/providers/bedrock/completion.py +++ b/lib/crewai/src/crewai/llms/providers/bedrock/completion.py @@ -1301,6 +1301,15 @@ class BedrockCompletion(BaseLLM): # top_k is supported by Claude models config["topK"] = int(self.top_k) + # Handle drop_params to remove unsupported parameters for certain models + additional_params = getattr(self, "additional_params", {}) or {} + additional_drop_params = additional_params.get("additional_drop_params") + drop_params = additional_params.get("drop_params") + + if drop_params and isinstance(additional_drop_params, list): + for key in additional_drop_params: + config.pop(key, None) # type: ignore[misc] + return config def _handle_client_error(self, e: ClientError) -> str: diff --git a/lib/crewai/tests/llms/bedrock/test_bedrock.py b/lib/crewai/tests/llms/bedrock/test_bedrock.py index aecbdde0e..fef54cac5 100644 --- a/lib/crewai/tests/llms/bedrock/test_bedrock.py +++ b/lib/crewai/tests/llms/bedrock/test_bedrock.py @@ -789,3 +789,154 @@ def test_bedrock_stop_sequences_sent_to_api(): assert "inferenceConfig" in call_kwargs assert "stopSequences" in call_kwargs["inferenceConfig"] assert call_kwargs["inferenceConfig"]["stopSequences"] == ["\nObservation:", "\nThought:"] + + +def test_bedrock_drop_params_removes_stop_sequences(): + """Test that drop_params with additional_drop_params removes stopSequences from inference config. + + This tests the fix for GitHub issue #4046 where certain Bedrock models + don't support the stopSequences field. + """ + llm = LLM( + model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0", + stop_sequences=["\nObservation:", "\nThought:"], + drop_params=True, + additional_drop_params=["stopSequences"], + ) + + from crewai.llms.providers.bedrock.completion import BedrockCompletion + assert isinstance(llm, BedrockCompletion) + + # Get the inference config + config = llm._get_inference_config() + + # Verify stopSequences is NOT in the config when drop_params is enabled + assert "stopSequences" not in config + + +def test_bedrock_drop_params_preserves_other_params(): + """Test that drop_params only removes specified parameters, not others.""" + llm = LLM( + model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0", + temperature=0.7, + max_tokens=1000, + top_p=0.9, + stop_sequences=["\nObservation:"], + drop_params=True, + additional_drop_params=["stopSequences"], + ) + + from crewai.llms.providers.bedrock.completion import BedrockCompletion + assert isinstance(llm, BedrockCompletion) + + # Get the inference config + config = llm._get_inference_config() + + # Verify stopSequences is removed + assert "stopSequences" not in config + + # Verify other parameters are preserved + assert "temperature" in config + assert config["temperature"] == 0.7 + assert "maxTokens" in config + assert config["maxTokens"] == 1000 + assert "topP" in config + assert config["topP"] == 0.9 + + +def test_bedrock_without_drop_params_keeps_stop_sequences(): + """Test that without drop_params, stopSequences is included in inference config.""" + llm = LLM( + model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0", + stop_sequences=["\nObservation:", "\nThought:"], + ) + + from crewai.llms.providers.bedrock.completion import BedrockCompletion + assert isinstance(llm, BedrockCompletion) + + # Get the inference config + config = llm._get_inference_config() + + # Verify stopSequences IS in the config when drop_params is not set + assert "stopSequences" in config + assert config["stopSequences"] == ["\nObservation:", "\nThought:"] + + +def test_bedrock_drop_params_false_keeps_stop_sequences(): + """Test that with drop_params=False, stopSequences is included even with additional_drop_params.""" + llm = LLM( + model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0", + stop_sequences=["\nObservation:"], + drop_params=False, + additional_drop_params=["stopSequences"], + ) + + from crewai.llms.providers.bedrock.completion import BedrockCompletion + assert isinstance(llm, BedrockCompletion) + + # Get the inference config + config = llm._get_inference_config() + + # Verify stopSequences IS in the config when drop_params is False + assert "stopSequences" in config + assert config["stopSequences"] == ["\nObservation:"] + + +def test_bedrock_drop_params_multiple_params(): + """Test that drop_params can remove multiple parameters.""" + llm = LLM( + model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0", + temperature=0.7, + top_p=0.9, + stop_sequences=["\nObservation:"], + drop_params=True, + additional_drop_params=["stopSequences", "topP"], + ) + + from crewai.llms.providers.bedrock.completion import BedrockCompletion + assert isinstance(llm, BedrockCompletion) + + # Get the inference config + config = llm._get_inference_config() + + # Verify both stopSequences and topP are removed + assert "stopSequences" not in config + assert "topP" not in config + + # Verify temperature is preserved + assert "temperature" in config + assert config["temperature"] == 0.7 + + +def test_bedrock_drop_params_api_call(): + """Test that drop_params affects the actual API call parameters.""" + llm = LLM( + model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0", + stop_sequences=["\nObservation:"], + drop_params=True, + additional_drop_params=["stopSequences"], + ) + + # Patch the API call to capture parameters without making real call + with patch.object(llm.client, 'converse') as mock_converse: + mock_response = { + 'output': { + 'message': { + 'role': 'assistant', + 'content': [{'text': 'Hello'}] + } + }, + 'usage': { + 'inputTokens': 10, + 'outputTokens': 5, + 'totalTokens': 15 + } + } + mock_converse.return_value = mock_response + + llm.call("Say hello") + + # Verify stopSequences is NOT in the inference config sent to the API + call_kwargs = mock_converse.call_args[1] + assert "inferenceConfig" in call_kwargs + assert "stopSequences" not in call_kwargs["inferenceConfig"]