Files
crewAI/lib/crewai/tests/llms/bedrock/test_bedrock_async.py
Greyson LaLonde 20704742e2
Some checks failed
CodeQL Advanced / Analyze (actions) (push) Has been cancelled
CodeQL Advanced / Analyze (python) (push) Has been cancelled
Check Documentation Broken Links / Check broken links (push) Has been cancelled
Notify Downstream / notify-downstream (push) Has been cancelled
Build uv cache / build-cache (3.10) (push) Has been cancelled
Build uv cache / build-cache (3.11) (push) Has been cancelled
Build uv cache / build-cache (3.12) (push) Has been cancelled
Build uv cache / build-cache (3.13) (push) Has been cancelled
Mark stale issues and pull requests / stale (push) Has been cancelled
feat: async llm support
feat: introduce async contract to BaseLLM

feat: add async call support for:

Azure provider

Anthropic provider

OpenAI provider

Gemini provider

Bedrock provider

LiteLLM provider

chore: expand scrubbed header fields (conftest, anthropic, bedrock)

chore: update docs to cover async functionality

chore: update and harden tests to support acall; re-add uri for cassette compatibility

chore: generate missing cassette

fix: ensure acall is non-abstract and set supports_tools = true for supported Anthropic models

chore: improve Bedrock async docstring and general test robustness
2025-12-01 18:56:56 -05:00

128 lines
3.6 KiB
Python

"""Tests for Bedrock async completion functionality.
Note: These tests are skipped in CI because VCR.py does not support
aiobotocore's HTTP session. The cassettes were recorded locally but
cannot be played back properly in CI.
"""
import pytest
import tiktoken
from crewai.llm import LLM
SKIP_REASON = "VCR does not support aiobotocore async HTTP client"
@pytest.mark.vcr()
@pytest.mark.asyncio
@pytest.mark.skip(reason=SKIP_REASON)
async def test_bedrock_async_basic_call():
"""Test basic async call with Bedrock."""
llm = LLM(model="bedrock/us.anthropic.claude-3-5-sonnet-20241022-v2:0")
result = await llm.acall("Say hello")
assert result is not None
assert isinstance(result, str)
assert len(result) > 0
@pytest.mark.vcr()
@pytest.mark.asyncio
@pytest.mark.skip(reason=SKIP_REASON)
async def test_bedrock_async_with_temperature():
"""Test async call with temperature parameter."""
llm = LLM(model="bedrock/us.anthropic.claude-3-5-sonnet-20241022-v2:0", temperature=0.1)
result = await llm.acall("Say the word 'test' once")
assert result is not None
assert isinstance(result, str)
@pytest.mark.vcr()
@pytest.mark.asyncio
@pytest.mark.skip(reason=SKIP_REASON)
async def test_bedrock_async_with_max_tokens():
"""Test async call with max_tokens parameter."""
llm = LLM(model="bedrock/us.anthropic.claude-3-5-sonnet-20241022-v2:0", max_tokens=10)
result = await llm.acall("Write a very long story about a dragon.")
assert result is not None
assert isinstance(result, str)
encoder = tiktoken.get_encoding("cl100k_base")
token_count = len(encoder.encode(result))
assert token_count <= 10
@pytest.mark.vcr()
@pytest.mark.asyncio
@pytest.mark.skip(reason=SKIP_REASON)
async def test_bedrock_async_with_system_message():
"""Test async call with system message."""
llm = LLM(model="bedrock/us.anthropic.claude-3-5-sonnet-20241022-v2:0")
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "What is 2+2?"}
]
result = await llm.acall(messages)
assert result is not None
assert isinstance(result, str)
@pytest.mark.vcr()
@pytest.mark.asyncio
@pytest.mark.skip(reason=SKIP_REASON)
async def test_bedrock_async_conversation():
"""Test async call with conversation history."""
llm = LLM(model="bedrock/us.anthropic.claude-3-5-sonnet-20241022-v2:0")
messages = [
{"role": "user", "content": "My name is Alice."},
{"role": "assistant", "content": "Hello Alice! Nice to meet you."},
{"role": "user", "content": "What is my name?"}
]
result = await llm.acall(messages)
assert result is not None
assert isinstance(result, str)
@pytest.mark.vcr()
@pytest.mark.asyncio
@pytest.mark.skip(reason=SKIP_REASON)
async def test_bedrock_async_multiple_calls():
"""Test making multiple async calls in sequence."""
llm = LLM(model="bedrock/us.anthropic.claude-3-5-sonnet-20241022-v2:0")
result1 = await llm.acall("What is 1+1?")
result2 = await llm.acall("What is 2+2?")
assert result1 is not None
assert result2 is not None
assert isinstance(result1, str)
assert isinstance(result2, str)
@pytest.mark.vcr()
@pytest.mark.asyncio
@pytest.mark.skip(reason=SKIP_REASON)
async def test_bedrock_async_with_parameters():
"""Test async call with multiple parameters."""
llm = LLM(
model="bedrock/us.anthropic.claude-3-5-sonnet-20241022-v2:0",
temperature=0.7,
max_tokens=100,
top_p=0.9
)
result = await llm.acall("Tell me a short fact")
assert result is not None
assert isinstance(result, str)