diff --git a/src/crewai/agents/agent_builder/base_agent.py b/src/crewai/agents/agent_builder/base_agent.py index ba2596f63..f4dd0893c 100644 --- a/src/crewai/agents/agent_builder/base_agent.py +++ b/src/crewai/agents/agent_builder/base_agent.py @@ -254,6 +254,8 @@ class BaseAgent(ABC, BaseModel): task: Any, context: Optional[str] = None, tools: Optional[List[BaseTool]] = None, + stream: bool = False, + stream_callback: Optional[Callable[[str, str, str, str], None]] = None, ) -> str: pass diff --git a/tests/test_streaming.py b/tests/test_streaming.py index 7f6ca8e93..d0ee7543e 100644 --- a/tests/test_streaming.py +++ b/tests/test_streaming.py @@ -1,9 +1,7 @@ import pytest from unittest.mock import Mock, patch from crewai import Agent, Task, Crew -from crewai.llm import LLM from crewai.utilities.events.crew_events import CrewStreamChunkEvent, TaskStreamChunkEvent, AgentStreamChunkEvent -from crewai.utilities.events.crewai_event_bus import crewai_event_bus @pytest.fixture @@ -55,6 +53,7 @@ def test_crew_streaming_enabled(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( @@ -93,6 +92,7 @@ def test_crew_streaming_disabled_by_default(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( @@ -213,7 +213,7 @@ def test_streaming_integration_with_llm(): mock_executor._stream_callback = None mock_executor._task_description = None - result = crew.kickoff(stream=True, stream_callback=stream_callback) + crew.kickoff(stream=True, stream_callback=stream_callback) assert hasattr(agent.agent_executor, '_stream_callback') assert hasattr(agent.agent_executor, '_task_description') @@ -224,6 +224,7 @@ def test_streaming_parameters_propagation(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( diff --git a/tests/test_streaming_comprehensive.py b/tests/test_streaming_comprehensive.py index 77ed70dcb..711103ba0 100644 --- a/tests/test_streaming_comprehensive.py +++ b/tests/test_streaming_comprehensive.py @@ -1,5 +1,4 @@ -import pytest -from unittest.mock import Mock, patch, MagicMock +from unittest.mock import Mock, patch from crewai import Agent, Task, Crew from crewai.utilities.events.crew_events import CrewStreamChunkEvent from crewai.utilities.events.llm_events import LLMStreamChunkEvent @@ -21,6 +20,7 @@ def test_streaming_callback_called(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( @@ -78,6 +78,7 @@ def test_streaming_disabled_by_default(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( @@ -113,6 +114,7 @@ def test_streaming_parameters_propagation(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( @@ -155,6 +157,7 @@ def test_async_task_streaming(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( @@ -208,8 +211,8 @@ def test_llm_stream_chunk_to_crew_stream_chunk(): llm_event = LLMStreamChunkEvent(chunk="test chunk") - from crewai.utilities.events.event_listener import event_listener - event_listener.on_llm_stream_chunk(mock_source, llm_event) + from crewai.utilities.events.crewai_event_bus import crewai_event_bus + crewai_event_bus.emit(mock_source, llm_event) assert len(received_crew_chunks) == 1 crew_event = received_crew_chunks[0] @@ -263,7 +266,7 @@ def test_multiple_agents_streaming(): verbose=False ) - result = crew.kickoff(stream=True, stream_callback=stream_callback) + crew.kickoff(stream=True, stream_callback=stream_callback) assert hasattr(crew, '_stream_enabled') assert crew._stream_enabled is True diff --git a/tests/test_streaming_integration.py b/tests/test_streaming_integration.py index 8ff939e78..f5c65ae47 100644 --- a/tests/test_streaming_integration.py +++ b/tests/test_streaming_integration.py @@ -1,4 +1,3 @@ -import pytest from unittest.mock import Mock, patch from crewai import Agent, Task, Crew from crewai.utilities.events.crew_events import CrewStreamChunkEvent @@ -21,6 +20,7 @@ def test_streaming_callback_integration(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( @@ -132,7 +132,7 @@ def test_streaming_with_multiple_agents(): verbose=False ) - result = crew.kickoff(stream=True, stream_callback=stream_callback) + crew.kickoff(stream=True, stream_callback=stream_callback) assert hasattr(crew, '_stream_enabled') assert crew._stream_enabled is True @@ -145,6 +145,7 @@ def test_streaming_disabled_by_default(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( @@ -180,6 +181,7 @@ def test_streaming_parameters_propagation(): with patch('crewai.llm.LLM') as mock_llm_class: mock_llm = Mock() mock_llm.call.return_value = "Test response" + mock_llm.supports_stop_words = True mock_llm_class.return_value = mock_llm agent = Agent( diff --git a/tests/utilities/test_events.py b/tests/utilities/test_events.py index f12fd5d11..7c1f6a066 100644 --- a/tests/utilities/test_events.py +++ b/tests/utilities/test_events.py @@ -820,7 +820,7 @@ def test_crew_streaming_events(): ) # Execute with streaming enabled - result = crew.kickoff(stream=True) + crew.kickoff(stream=True) # Verify that we received crew stream chunks assert len(received_crew_chunks) > 0