From bb72804e746480430b0bf01d9c0240b52ae85bc1 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Wed, 4 Jun 2025 07:15:10 +0000 Subject: [PATCH] Fix CI failures: remove unused imports and add streaming parameters to agent adapters and test mocks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: João --- .../agents/agent_adapters/langgraph/langgraph_adapter.py | 5 +++-- .../agents/agent_adapters/openai_agents/openai_adapter.py | 2 ++ src/crewai/agents/agent_builder/base_agent.py | 2 +- tests/test_streaming_comprehensive.py | 2 +- tests/test_task_guardrails.py | 2 +- 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/crewai/agents/agent_adapters/langgraph/langgraph_adapter.py b/src/crewai/agents/agent_adapters/langgraph/langgraph_adapter.py index ea2e373d2..d87fb96fd 100644 --- a/src/crewai/agents/agent_adapters/langgraph/langgraph_adapter.py +++ b/src/crewai/agents/agent_adapters/langgraph/langgraph_adapter.py @@ -1,4 +1,4 @@ -from typing import Any, AsyncIterable, Dict, List, Optional +from typing import Any, Dict, List, Optional from pydantic import Field, PrivateAttr @@ -22,7 +22,6 @@ from crewai.utilities.events.agent_events import ( ) try: - from langchain_core.messages import ToolMessage from langgraph.checkpoint.memory import MemorySaver from langgraph.prebuilt import create_react_agent @@ -126,6 +125,8 @@ class LangGraphAgentAdapter(BaseAgentAdapter): task: Any, context: Optional[str] = None, tools: Optional[List[BaseTool]] = None, + stream: bool = False, + stream_callback: Optional[Any] = None, ) -> str: """Execute a task using the LangGraph workflow.""" self.create_agent_executor(tools) diff --git a/src/crewai/agents/agent_adapters/openai_agents/openai_adapter.py b/src/crewai/agents/agent_adapters/openai_agents/openai_adapter.py index ac368c1a3..be6adf3b4 100644 --- a/src/crewai/agents/agent_adapters/openai_agents/openai_adapter.py +++ b/src/crewai/agents/agent_adapters/openai_agents/openai_adapter.py @@ -86,6 +86,8 @@ class OpenAIAgentAdapter(BaseAgentAdapter): task: Any, context: Optional[str] = None, tools: Optional[List[BaseTool]] = None, + stream: bool = False, + stream_callback: Optional[Any] = None, ) -> str: """Execute a task using the OpenAI Assistant""" self._converter_adapter.configure_structured_output(task) diff --git a/src/crewai/agents/agent_builder/base_agent.py b/src/crewai/agents/agent_builder/base_agent.py index f4dd0893c..a9357496f 100644 --- a/src/crewai/agents/agent_builder/base_agent.py +++ b/src/crewai/agents/agent_builder/base_agent.py @@ -25,7 +25,7 @@ from crewai.security.security_config import SecurityConfig from crewai.tools.base_tool import BaseTool, Tool from crewai.utilities import I18N, Logger, RPMController from crewai.utilities.config import process_config -from crewai.utilities.converter import Converter + from crewai.utilities.string_utils import interpolate_only T = TypeVar("T", bound="BaseAgent") diff --git a/tests/test_streaming_comprehensive.py b/tests/test_streaming_comprehensive.py index 711103ba0..971ddeb75 100644 --- a/tests/test_streaming_comprehensive.py +++ b/tests/test_streaming_comprehensive.py @@ -211,7 +211,6 @@ def test_llm_stream_chunk_to_crew_stream_chunk(): llm_event = LLMStreamChunkEvent(chunk="test chunk") - from crewai.utilities.events.crewai_event_bus import crewai_event_bus crewai_event_bus.emit(mock_source, llm_event) assert len(received_crew_chunks) == 1 @@ -230,6 +229,7 @@ def test_multiple_agents_streaming(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Agent response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent1 = Agent( diff --git a/tests/test_task_guardrails.py b/tests/test_task_guardrails.py index 901b962b9..5d462be69 100644 --- a/tests/test_task_guardrails.py +++ b/tests/test_task_guardrails.py @@ -119,7 +119,7 @@ def test_guardrail_error_in_context(): # Mock execute_task to succeed on second attempt first_call = True - def execute_task(task, context, tools): + def execute_task(task, context, tools, stream=False, stream_callback=None): nonlocal first_call if first_call: first_call = False