mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-04-05 18:48:15 +00:00
feat(bedrock): add request_metadata support for Converse API
Adds support for the requestMetadata field in AWS Bedrock Converse API calls. This field is crucial for Bedrock Model Invocation Logs and allows users to include custom metadata in their API requests. Changes: - Add request_metadata parameter to BedrockCompletion constructor - Add requestMetadata to BedrockConverseRequestBody TypedDef - Add requestMetadata to BedrockConverseStreamRequestBody TypedDef - Include request_metadata in both sync call() and async acall() methods - Add comprehensive tests for request_metadata functionality Closes #4362 Co-Authored-By: João <joao@crewai.com>
This commit is contained in:
@@ -189,6 +189,7 @@ class BedrockConverseRequestBody(TypedDict, total=False):
|
||||
guardrailConfig: GuardrailConfigurationTypeDef
|
||||
additionalModelRequestFields: dict[str, Any]
|
||||
additionalModelResponseFieldPaths: list[str]
|
||||
requestMetadata: dict[str, str]
|
||||
|
||||
|
||||
class BedrockConverseStreamRequestBody(TypedDict, total=False):
|
||||
@@ -203,6 +204,7 @@ class BedrockConverseStreamRequestBody(TypedDict, total=False):
|
||||
guardrailConfig: GuardrailStreamConfigurationTypeDef
|
||||
additionalModelRequestFields: dict[str, Any]
|
||||
additionalModelResponseFieldPaths: list[str]
|
||||
requestMetadata: dict[str, str]
|
||||
|
||||
|
||||
class BedrockCompletion(BaseLLM):
|
||||
@@ -244,6 +246,7 @@ class BedrockCompletion(BaseLLM):
|
||||
guardrail_config: dict[str, Any] | None = None,
|
||||
additional_model_request_fields: dict[str, Any] | None = None,
|
||||
additional_model_response_field_paths: list[str] | None = None,
|
||||
request_metadata: dict[str, str] | None = None,
|
||||
interceptor: BaseInterceptor[Any, Any] | None = None,
|
||||
response_format: type[BaseModel] | None = None,
|
||||
**kwargs: Any,
|
||||
@@ -265,6 +268,8 @@ class BedrockCompletion(BaseLLM):
|
||||
guardrail_config: Guardrail configuration for content filtering
|
||||
additional_model_request_fields: Model-specific request parameters
|
||||
additional_model_response_field_paths: Custom response field paths
|
||||
request_metadata: Metadata to include in the request for Bedrock Model
|
||||
Invocation Logs. Keys and values must be strings.
|
||||
interceptor: HTTP interceptor (not yet supported for Bedrock).
|
||||
response_format: Pydantic model for structured output. Used as default when
|
||||
response_model is not passed to call()/acall() methods.
|
||||
@@ -332,6 +337,7 @@ class BedrockCompletion(BaseLLM):
|
||||
self.additional_model_response_field_paths = (
|
||||
additional_model_response_field_paths
|
||||
)
|
||||
self.request_metadata = request_metadata
|
||||
|
||||
# Model-specific settings
|
||||
self.is_claude_model = "claude" in model.lower()
|
||||
@@ -451,6 +457,9 @@ class BedrockCompletion(BaseLLM):
|
||||
self.additional_model_response_field_paths
|
||||
)
|
||||
|
||||
if self.request_metadata:
|
||||
body["requestMetadata"] = self.request_metadata
|
||||
|
||||
if self.stream:
|
||||
return self._handle_streaming_converse(
|
||||
formatted_messages,
|
||||
@@ -581,6 +590,9 @@ class BedrockCompletion(BaseLLM):
|
||||
self.additional_model_response_field_paths
|
||||
)
|
||||
|
||||
if self.request_metadata:
|
||||
body["requestMetadata"] = self.request_metadata
|
||||
|
||||
if self.stream:
|
||||
return await self._ahandle_streaming_converse(
|
||||
formatted_messages,
|
||||
|
||||
@@ -867,6 +867,125 @@ def test_bedrock_stop_sequences_sent_to_api():
|
||||
assert call_kwargs["inferenceConfig"]["stopSequences"] == ["\nObservation:", "\nThought:"]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Request Metadata Tests
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def test_bedrock_request_metadata_initialization():
|
||||
"""Test that request_metadata is properly initialized in BedrockCompletion."""
|
||||
from crewai.llms.providers.bedrock.completion import BedrockCompletion
|
||||
|
||||
metadata = {"user_id": "test-user-123", "session_id": "session-456"}
|
||||
llm = LLM(
|
||||
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
request_metadata=metadata
|
||||
)
|
||||
|
||||
assert isinstance(llm, BedrockCompletion)
|
||||
assert llm.request_metadata == metadata
|
||||
assert llm.request_metadata["user_id"] == "test-user-123"
|
||||
assert llm.request_metadata["session_id"] == "session-456"
|
||||
|
||||
|
||||
def test_bedrock_request_metadata_none_by_default():
|
||||
"""Test that request_metadata is None by default."""
|
||||
from crewai.llms.providers.bedrock.completion import BedrockCompletion
|
||||
|
||||
llm = LLM(model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0")
|
||||
|
||||
assert isinstance(llm, BedrockCompletion)
|
||||
assert llm.request_metadata is None
|
||||
|
||||
|
||||
def test_bedrock_request_metadata_sent_to_api():
|
||||
"""Test that request_metadata is properly sent to the Bedrock Converse API."""
|
||||
metadata = {"user_id": "test-user-123", "session_id": "session-456"}
|
||||
llm = LLM(
|
||||
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
request_metadata=metadata
|
||||
)
|
||||
|
||||
with patch.object(llm.client, 'converse') as mock_converse:
|
||||
mock_response = {
|
||||
'output': {
|
||||
'message': {
|
||||
'role': 'assistant',
|
||||
'content': [{'text': 'Hello'}]
|
||||
}
|
||||
},
|
||||
'usage': {
|
||||
'inputTokens': 10,
|
||||
'outputTokens': 5,
|
||||
'totalTokens': 15
|
||||
}
|
||||
}
|
||||
mock_converse.return_value = mock_response
|
||||
|
||||
llm.call("Say hello")
|
||||
|
||||
call_kwargs = mock_converse.call_args[1]
|
||||
assert "requestMetadata" in call_kwargs
|
||||
assert call_kwargs["requestMetadata"] == metadata
|
||||
assert call_kwargs["requestMetadata"]["user_id"] == "test-user-123"
|
||||
assert call_kwargs["requestMetadata"]["session_id"] == "session-456"
|
||||
|
||||
|
||||
def test_bedrock_request_metadata_not_sent_when_none():
|
||||
"""Test that requestMetadata is not included in API call when not configured."""
|
||||
llm = LLM(model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0")
|
||||
|
||||
with patch.object(llm.client, 'converse') as mock_converse:
|
||||
mock_response = {
|
||||
'output': {
|
||||
'message': {
|
||||
'role': 'assistant',
|
||||
'content': [{'text': 'Hello'}]
|
||||
}
|
||||
},
|
||||
'usage': {
|
||||
'inputTokens': 10,
|
||||
'outputTokens': 5,
|
||||
'totalTokens': 15
|
||||
}
|
||||
}
|
||||
mock_converse.return_value = mock_response
|
||||
|
||||
llm.call("Say hello")
|
||||
|
||||
call_kwargs = mock_converse.call_args[1]
|
||||
assert "requestMetadata" not in call_kwargs
|
||||
|
||||
|
||||
def test_bedrock_request_metadata_with_streaming():
|
||||
"""Test that request_metadata is properly sent when streaming is enabled."""
|
||||
metadata = {"user_id": "stream-user", "request_type": "streaming"}
|
||||
llm = LLM(
|
||||
model="bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
request_metadata=metadata,
|
||||
stream=True
|
||||
)
|
||||
|
||||
with patch.object(llm.client, 'converse_stream') as mock_converse_stream:
|
||||
mock_response = {
|
||||
'stream': iter([
|
||||
{'messageStart': {'role': 'assistant'}},
|
||||
{'contentBlockStart': {'start': {'text': ''}, 'contentBlockIndex': 0}},
|
||||
{'contentBlockDelta': {'delta': {'text': 'Hello'}, 'contentBlockIndex': 0}},
|
||||
{'contentBlockStop': {'contentBlockIndex': 0}},
|
||||
{'messageStop': {'stopReason': 'end_turn'}},
|
||||
{'metadata': {'usage': {'inputTokens': 10, 'outputTokens': 5}}}
|
||||
])
|
||||
}
|
||||
mock_converse_stream.return_value = mock_response
|
||||
|
||||
llm.call("Say hello")
|
||||
|
||||
call_kwargs = mock_converse_stream.call_args[1]
|
||||
assert "requestMetadata" in call_kwargs
|
||||
assert call_kwargs["requestMetadata"] == metadata
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Agent Kickoff Structured Output Tests
|
||||
# =============================================================================
|
||||
|
||||
Reference in New Issue
Block a user