mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-25 08:08:14 +00:00
Compare commits
4 Commits
devin/1768
...
devin/1743
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d337862be1 | ||
|
|
2234672cf3 | ||
|
|
ce38a3d70e | ||
|
|
7907c8a147 |
@@ -1,5 +1,6 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
import warnings
|
import warnings
|
||||||
@@ -56,6 +57,7 @@ from crewai.utilities.events.crew_events import (
|
|||||||
)
|
)
|
||||||
from crewai.utilities.events.crewai_event_bus import crewai_event_bus
|
from crewai.utilities.events.crewai_event_bus import crewai_event_bus
|
||||||
from crewai.utilities.events.event_listener import EventListener
|
from crewai.utilities.events.event_listener import EventListener
|
||||||
|
from crewai.utilities.exceptions.llm_error import LLMError
|
||||||
from crewai.utilities.formatter import (
|
from crewai.utilities.formatter import (
|
||||||
aggregate_raw_outputs_from_task_outputs,
|
aggregate_raw_outputs_from_task_outputs,
|
||||||
aggregate_raw_outputs_from_tasks,
|
aggregate_raw_outputs_from_tasks,
|
||||||
@@ -683,8 +685,23 @@ class Crew(BaseModel):
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
async def kickoff_async(self, inputs: Optional[Dict[str, Any]] = {}) -> CrewOutput:
|
async def kickoff_async(self, inputs: Optional[Dict[str, Any]] = {}) -> CrewOutput:
|
||||||
"""Asynchronous kickoff method to start the crew execution."""
|
"""Asynchronous kickoff method to start the crew execution.
|
||||||
return await asyncio.to_thread(self.kickoff, inputs)
|
|
||||||
|
Args:
|
||||||
|
inputs (Optional[Dict[str, Any]]): Input parameters for the crew execution
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
CrewOutput: The result of the crew execution
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
LLMError: When LLM-specific errors occur
|
||||||
|
Exception: For other unexpected errors
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return await asyncio.to_thread(self.kickoff, inputs)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error during async crew execution: {str(e)}")
|
||||||
|
raise LLMError(f"Crew execution failed: {str(e)}", original_error=e)
|
||||||
|
|
||||||
async def kickoff_for_each_async(self, inputs: List[Dict]) -> List[CrewOutput]:
|
async def kickoff_for_each_async(self, inputs: List[Dict]) -> List[CrewOutput]:
|
||||||
crew_copies = [self.copy() for _ in inputs]
|
crew_copies = [self.copy() for _ in inputs]
|
||||||
|
|||||||
16
src/crewai/utilities/exceptions/llm_error.py
Normal file
16
src/crewai/utilities/exceptions/llm_error.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
"""Exception class for LLM-related errors."""
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class LLMError(Exception):
|
||||||
|
"""Base exception class for LLM operation errors."""
|
||||||
|
|
||||||
|
def __init__(self, message: str, original_error: Optional[Exception] = None):
|
||||||
|
"""Initialize the LLM error.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
message: The error message to display
|
||||||
|
original_error: The original exception that caused this error, if any
|
||||||
|
"""
|
||||||
|
super().__init__(message)
|
||||||
|
self.original_error = original_error
|
||||||
@@ -1501,6 +1501,80 @@ async def test_async_kickoff_for_each_async_empty_input():
|
|||||||
|
|
||||||
# Assertion
|
# Assertion
|
||||||
assert results == [], "Result should be an empty list when input is empty"
|
assert results == [], "Result should be an empty list when input is empty"
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_kickoff_async_error_handling():
|
||||||
|
"""Tests error handling in kickoff_async when kickoff raises an error."""
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from crewai.utilities.exceptions.llm_error import LLMError
|
||||||
|
|
||||||
|
inputs = {"topic": "dog"}
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the crew
|
||||||
|
crew = Crew(
|
||||||
|
agents=[agent],
|
||||||
|
tasks=[task],
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch.object(Crew, "kickoff", side_effect=Exception("Simulated LLM error")) as mock_kickoff:
|
||||||
|
with pytest.raises(LLMError) as excinfo:
|
||||||
|
await crew.kickoff_async(inputs)
|
||||||
|
|
||||||
|
assert "Crew execution failed: Simulated LLM error" in str(excinfo.value)
|
||||||
|
assert excinfo.value.original_error is not None
|
||||||
|
assert "Simulated LLM error" in str(excinfo.value.original_error)
|
||||||
|
mock_kickoff.assert_called_once_with(inputs)
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_kickoff_async_context_length_error_handling():
|
||||||
|
"""Tests error handling in kickoff_async when kickoff raises a context length error."""
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from crewai.utilities.exceptions.context_window_exceeding_exception import (
|
||||||
|
LLMContextLengthExceededException,
|
||||||
|
)
|
||||||
|
from crewai.utilities.exceptions.llm_error import LLMError
|
||||||
|
|
||||||
|
inputs = {"topic": "dog"}
|
||||||
|
|
||||||
|
agent = Agent(
|
||||||
|
role="{topic} Researcher",
|
||||||
|
goal="Express hot takes on {topic}.",
|
||||||
|
backstory="You have a lot of experience with {topic}.",
|
||||||
|
)
|
||||||
|
|
||||||
|
task = Task(
|
||||||
|
description="Give me an analysis around {topic}.",
|
||||||
|
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||||
|
agent=agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the crew
|
||||||
|
crew = Crew(
|
||||||
|
agents=[agent],
|
||||||
|
tasks=[task],
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch.object(Crew, "kickoff", side_effect=LLMContextLengthExceededException("maximum context length exceeded")) as mock_kickoff:
|
||||||
|
with pytest.raises(LLMError) as excinfo:
|
||||||
|
await crew.kickoff_async(inputs)
|
||||||
|
|
||||||
|
assert "Crew execution failed" in str(excinfo.value)
|
||||||
|
assert "maximum context length exceeded" in str(excinfo.value.original_error)
|
||||||
|
mock_kickoff.assert_called_once_with(inputs)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_set_agents_step_callback():
|
def test_set_agents_step_callback():
|
||||||
|
|||||||
Reference in New Issue
Block a user