Non-streaming working again

This commit is contained in:
Brandon Hancock
2025-03-05 10:24:30 -05:00
parent 3df5278ee9
commit 6ba66ae1fb
4 changed files with 3893 additions and 96 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -948,7 +948,7 @@ def test_api_calls_throttling(capsys):
moveon.assert_called()
# @pytest.mark.vcr(filter_headers=["authorization"])
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_kickoff_usage_metrics():
inputs = [
{"topic": "dog"},
@@ -983,6 +983,41 @@ def test_crew_kickoff_usage_metrics():
assert result.token_usage.cached_prompt_tokens == 0
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_kickoff_streaming_usage_metrics():
inputs = [
{"topic": "dog"},
{"topic": "cat"},
{"topic": "apple"},
]
agent = Agent(
role="{topic} Researcher",
goal="Express hot takes on {topic}.",
backstory="You have a lot of experience with {topic}.",
llm=LLM(model="gpt-4o", stream=True),
)
task = Task(
description="Give me an analysis around {topic}.",
expected_output="1 bullet point about {topic} that's under 15 words.",
agent=agent,
)
# Use real LLM calls instead of mocking
crew = Crew(agents=[agent], tasks=[task])
results = crew.kickoff_for_each(inputs=inputs)
assert len(results) == len(inputs)
for result in results:
# Assert that all required keys are in usage_metrics and their values are greater than 0
assert result.token_usage.total_tokens > 0
assert result.token_usage.prompt_tokens > 0
assert result.token_usage.completion_tokens > 0
assert result.token_usage.successful_requests > 0
assert result.token_usage.cached_prompt_tokens == 0
def test_agents_rpm_is_never_set_if_crew_max_RPM_is_not_set():
agent = Agent(
role="test role",