fix: add drop_params support to BedrockCompletion for unsupported parameters

This fixes GitHub issue #4046 where certain Bedrock models don't support
the stopSequences field. The fix adds support for drop_params and
additional_drop_params parameters in BedrockCompletion._get_inference_config()
to allow users to drop unsupported parameters from the inference config.

Example usage:
llm = LLM(
    model="bedrock/openai.gpt-oss-safeguard-120b",
    drop_params=True,
    additional_drop_params=["stopSequences"]
)

This follows the same pattern as the Azure provider implementation.

Co-Authored-By: João <joao@crewai.com>
This commit is contained in:
Devin AI
2025-12-08 15:33:26 +00:00
parent f2f994612c
commit e4b7ccf717
2 changed files with 160 additions and 0 deletions

View File

@@ -1301,6 +1301,15 @@ class BedrockCompletion(BaseLLM):
# top_k is supported by Claude models
config["topK"] = int(self.top_k)
# Handle drop_params to remove unsupported parameters for certain models
additional_params = getattr(self, "additional_params", {}) or {}
additional_drop_params = additional_params.get("additional_drop_params")
drop_params = additional_params.get("drop_params")
if drop_params and isinstance(additional_drop_params, list):
for key in additional_drop_params:
config.pop(key, None) # type: ignore[misc]
return config
def _handle_client_error(self, e: ClientError) -> str:

View File

@@ -789,3 +789,154 @@ def test_bedrock_stop_sequences_sent_to_api():
assert "inferenceConfig" in call_kwargs
assert "stopSequences" in call_kwargs["inferenceConfig"]
assert call_kwargs["inferenceConfig"]["stopSequences"] == ["\nObservation:", "\nThought:"]
def test_bedrock_drop_params_removes_stop_sequences():
"""Test that drop_params with additional_drop_params removes stopSequences from inference config.
This tests the fix for GitHub issue #4046 where certain Bedrock models
don't support the stopSequences field.
"""
llm = LLM(
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
stop_sequences=["\nObservation:", "\nThought:"],
drop_params=True,
additional_drop_params=["stopSequences"],
)
from crewai.llms.providers.bedrock.completion import BedrockCompletion
assert isinstance(llm, BedrockCompletion)
# Get the inference config
config = llm._get_inference_config()
# Verify stopSequences is NOT in the config when drop_params is enabled
assert "stopSequences" not in config
def test_bedrock_drop_params_preserves_other_params():
"""Test that drop_params only removes specified parameters, not others."""
llm = LLM(
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
temperature=0.7,
max_tokens=1000,
top_p=0.9,
stop_sequences=["\nObservation:"],
drop_params=True,
additional_drop_params=["stopSequences"],
)
from crewai.llms.providers.bedrock.completion import BedrockCompletion
assert isinstance(llm, BedrockCompletion)
# Get the inference config
config = llm._get_inference_config()
# Verify stopSequences is removed
assert "stopSequences" not in config
# Verify other parameters are preserved
assert "temperature" in config
assert config["temperature"] == 0.7
assert "maxTokens" in config
assert config["maxTokens"] == 1000
assert "topP" in config
assert config["topP"] == 0.9
def test_bedrock_without_drop_params_keeps_stop_sequences():
"""Test that without drop_params, stopSequences is included in inference config."""
llm = LLM(
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
stop_sequences=["\nObservation:", "\nThought:"],
)
from crewai.llms.providers.bedrock.completion import BedrockCompletion
assert isinstance(llm, BedrockCompletion)
# Get the inference config
config = llm._get_inference_config()
# Verify stopSequences IS in the config when drop_params is not set
assert "stopSequences" in config
assert config["stopSequences"] == ["\nObservation:", "\nThought:"]
def test_bedrock_drop_params_false_keeps_stop_sequences():
"""Test that with drop_params=False, stopSequences is included even with additional_drop_params."""
llm = LLM(
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
stop_sequences=["\nObservation:"],
drop_params=False,
additional_drop_params=["stopSequences"],
)
from crewai.llms.providers.bedrock.completion import BedrockCompletion
assert isinstance(llm, BedrockCompletion)
# Get the inference config
config = llm._get_inference_config()
# Verify stopSequences IS in the config when drop_params is False
assert "stopSequences" in config
assert config["stopSequences"] == ["\nObservation:"]
def test_bedrock_drop_params_multiple_params():
"""Test that drop_params can remove multiple parameters."""
llm = LLM(
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
temperature=0.7,
top_p=0.9,
stop_sequences=["\nObservation:"],
drop_params=True,
additional_drop_params=["stopSequences", "topP"],
)
from crewai.llms.providers.bedrock.completion import BedrockCompletion
assert isinstance(llm, BedrockCompletion)
# Get the inference config
config = llm._get_inference_config()
# Verify both stopSequences and topP are removed
assert "stopSequences" not in config
assert "topP" not in config
# Verify temperature is preserved
assert "temperature" in config
assert config["temperature"] == 0.7
def test_bedrock_drop_params_api_call():
"""Test that drop_params affects the actual API call parameters."""
llm = LLM(
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
stop_sequences=["\nObservation:"],
drop_params=True,
additional_drop_params=["stopSequences"],
)
# Patch the API call to capture parameters without making real call
with patch.object(llm.client, 'converse') as mock_converse:
mock_response = {
'output': {
'message': {
'role': 'assistant',
'content': [{'text': 'Hello'}]
}
},
'usage': {
'inputTokens': 10,
'outputTokens': 5,
'totalTokens': 15
}
}
mock_converse.return_value = mock_response
llm.call("Say hello")
# Verify stopSequences is NOT in the inference config sent to the API
call_kwargs = mock_converse.call_args[1]
assert "inferenceConfig" in call_kwargs
assert "stopSequences" not in call_kwargs["inferenceConfig"]