Compare commits

..

4 Commits

Author SHA1 Message Date
Lorenze Jay
d337862be1 Merge branch 'main' into devin/1743067554-fix-issue-2487 2025-03-27 12:50:26 -07:00
Devin AI
2234672cf3 Fix lint issues: import sorting
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-03-27 09:39:11 +00:00
Devin AI
ce38a3d70e Improve error handling in kickoff_async with LLMError exception class
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-03-27 09:37:26 +00:00
Devin AI
7907c8a147 Fix issue #2487: Ensure LLM errors are properly raised in async context
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-03-27 09:28:27 +00:00
5 changed files with 113 additions and 6 deletions

View File

@@ -45,7 +45,7 @@ Documentation = "https://docs.crewai.com"
Repository = "https://github.com/crewAIInc/crewAI"
[project.optional-dependencies]
tools = ["crewai-tools~=0.38.0"]
tools = ["crewai-tools>=0.37.0"]
embeddings = [
"tiktoken~=0.7.0"
]

View File

@@ -1,5 +1,6 @@
import asyncio
import json
import logging
import re
import uuid
import warnings
@@ -56,6 +57,7 @@ from crewai.utilities.events.crew_events import (
)
from crewai.utilities.events.crewai_event_bus import crewai_event_bus
from crewai.utilities.events.event_listener import EventListener
from crewai.utilities.exceptions.llm_error import LLMError
from crewai.utilities.formatter import (
aggregate_raw_outputs_from_task_outputs,
aggregate_raw_outputs_from_tasks,
@@ -683,8 +685,23 @@ class Crew(BaseModel):
return results
async def kickoff_async(self, inputs: Optional[Dict[str, Any]] = {}) -> CrewOutput:
"""Asynchronous kickoff method to start the crew execution."""
return await asyncio.to_thread(self.kickoff, inputs)
"""Asynchronous kickoff method to start the crew execution.
Args:
inputs (Optional[Dict[str, Any]]): Input parameters for the crew execution
Returns:
CrewOutput: The result of the crew execution
Raises:
LLMError: When LLM-specific errors occur
Exception: For other unexpected errors
"""
try:
return await asyncio.to_thread(self.kickoff, inputs)
except Exception as e:
logging.error(f"Error during async crew execution: {str(e)}")
raise LLMError(f"Crew execution failed: {str(e)}", original_error=e)
async def kickoff_for_each_async(self, inputs: List[Dict]) -> List[CrewOutput]:
crew_copies = [self.copy() for _ in inputs]

View File

@@ -0,0 +1,16 @@
"""Exception class for LLM-related errors."""
from typing import Optional
class LLMError(Exception):
"""Base exception class for LLM operation errors."""
def __init__(self, message: str, original_error: Optional[Exception] = None):
"""Initialize the LLM error.
Args:
message: The error message to display
original_error: The original exception that caused this error, if any
"""
super().__init__(message)
self.original_error = original_error

View File

@@ -1501,6 +1501,80 @@ async def test_async_kickoff_for_each_async_empty_input():
# Assertion
assert results == [], "Result should be an empty list when input is empty"
@pytest.mark.asyncio
async def test_kickoff_async_error_handling():
"""Tests error handling in kickoff_async when kickoff raises an error."""
from unittest.mock import patch
from crewai.utilities.exceptions.llm_error import LLMError
inputs = {"topic": "dog"}
agent = Agent(
role="{topic} Researcher",
goal="Express hot takes on {topic}.",
backstory="You have a lot of experience with {topic}.",
)
task = Task(
description="Give me an analysis around {topic}.",
expected_output="1 bullet point about {topic} that's under 15 words.",
agent=agent,
)
# Create the crew
crew = Crew(
agents=[agent],
tasks=[task],
)
with patch.object(Crew, "kickoff", side_effect=Exception("Simulated LLM error")) as mock_kickoff:
with pytest.raises(LLMError) as excinfo:
await crew.kickoff_async(inputs)
assert "Crew execution failed: Simulated LLM error" in str(excinfo.value)
assert excinfo.value.original_error is not None
assert "Simulated LLM error" in str(excinfo.value.original_error)
mock_kickoff.assert_called_once_with(inputs)
@pytest.mark.asyncio
async def test_kickoff_async_context_length_error_handling():
"""Tests error handling in kickoff_async when kickoff raises a context length error."""
from unittest.mock import patch
from crewai.utilities.exceptions.context_window_exceeding_exception import (
LLMContextLengthExceededException,
)
from crewai.utilities.exceptions.llm_error import LLMError
inputs = {"topic": "dog"}
agent = Agent(
role="{topic} Researcher",
goal="Express hot takes on {topic}.",
backstory="You have a lot of experience with {topic}.",
)
task = Task(
description="Give me an analysis around {topic}.",
expected_output="1 bullet point about {topic} that's under 15 words.",
agent=agent,
)
# Create the crew
crew = Crew(
agents=[agent],
tasks=[task],
)
with patch.object(Crew, "kickoff", side_effect=LLMContextLengthExceededException("maximum context length exceeded")) as mock_kickoff:
with pytest.raises(LLMError) as excinfo:
await crew.kickoff_async(inputs)
assert "Crew execution failed" in str(excinfo.value)
assert "maximum context length exceeded" in str(excinfo.value.original_error)
mock_kickoff.assert_called_once_with(inputs)
def test_set_agents_step_callback():

6
uv.lock generated
View File

@@ -1,5 +1,4 @@
version = 1
revision = 1
requires-python = ">=3.10, <3.13"
resolution-markers = [
"python_full_version < '3.11' and sys_platform == 'darwin'",
@@ -695,7 +694,7 @@ requires-dist = [
{ name = "blinker", specifier = ">=1.9.0" },
{ name = "chromadb", specifier = ">=0.5.23" },
{ name = "click", specifier = ">=8.1.7" },
{ name = "crewai-tools", marker = "extra == 'tools'", specifier = "~=0.38.0" },
{ name = "crewai-tools", marker = "extra == 'tools'", specifier = ">=0.37.0" },
{ name = "docling", marker = "extra == 'docling'", specifier = ">=2.12.0" },
{ name = "fastembed", marker = "extra == 'fastembed'", specifier = ">=0.4.1" },
{ name = "instructor", specifier = ">=1.3.3" },
@@ -722,7 +721,6 @@ requires-dist = [
{ name = "tomli-w", specifier = ">=1.1.0" },
{ name = "uv", specifier = ">=0.4.25" },
]
provides-extras = ["tools", "embeddings", "agentops", "fastembed", "pdfplumber", "pandas", "openpyxl", "mem0", "docling", "aisuite"]
[package.metadata.requires-dev]
dev = [
@@ -2975,6 +2973,7 @@ name = "nvidia-nccl-cu12"
version = "2.20.5"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/bb/d09dda47c881f9ff504afd6f9ca4f502ded6d8fc2f572cacc5e39da91c28/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01", size = 176238458 },
{ url = "https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56", size = 176249402 },
]
@@ -2984,6 +2983,7 @@ version = "12.6.85"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9d/d7/c5383e47c7e9bf1c99d5bd2a8c935af2b6d705ad831a7ec5c97db4d82f4f/nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:eedc36df9e88b682efe4309aa16b5b4e78c2407eac59e8c10a6a47535164369a", size = 19744971 },
{ url = "https://files.pythonhosted.org/packages/31/db/dc71113d441f208cdfe7ae10d4983884e13f464a6252450693365e166dcf/nvidia_nvjitlink_cu12-12.6.85-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cf4eaa7d4b6b543ffd69d6abfb11efdeb2db48270d94dfd3a452c24150829e41", size = 19270338 },
]
[[package]]