Compare commits

...

2 Commits

Author SHA1 Message Date
Devin AI
7cadb58318 Fix lint errors: Remove unused variable and fix whitespace
Co-Authored-By: João <joao@crewai.com>
2025-10-16 07:23:08 +00:00
Devin AI
640d241bca Fix #3715: Remove unwanted LLM stream chunk printing to stdout
- Removed print() statement in event_listener.py that was printing all LLM streaming chunks to stdout
- The print() on line 386 was causing all text chunks from LLM responses to be displayed in stdout
- Added test to verify stream chunks are emitted as events but not printed to stdout
- Streaming chunks should only be handled by event handlers, not printed directly

Fixes #3715

Co-Authored-By: João <joao@crewai.com>
2025-10-16 07:18:21 +00:00
4 changed files with 9789 additions and 3445 deletions

View File

@@ -378,12 +378,8 @@ class EventListener(BaseEventListener):
@crewai_event_bus.on(LLMStreamChunkEvent)
def on_llm_stream_chunk(source, event: LLMStreamChunkEvent):
self.text_stream.write(event.chunk)
self.text_stream.seek(self.next_chunk)
# Read from the in-memory stream
content = self.text_stream.read()
print(content, end="", flush=True)
self.text_stream.read()
self.next_chunk = self.text_stream.tell()
# ----------- LLM GUARDRAIL EVENTS -----------

View File

@@ -7,10 +7,8 @@ from pydantic import Field
from crewai.agent import Agent
from crewai.agents.crew_agent_executor import CrewAgentExecutor
from crewai.crew import Crew
from crewai.flow.flow import Flow, listen, start
from crewai.llm import LLM
from crewai.task import Task
from crewai.tools.base_tool import BaseTool
from crewai.events.event_bus import crewai_event_bus
from crewai.events.event_listener import EventListener
from crewai.events.types.agent_events import (
AgentExecutionCompletedEvent,
AgentExecutionErrorEvent,
@@ -24,9 +22,6 @@ from crewai.events.types.crew_events import (
CrewTestResultEvent,
CrewTestStartedEvent,
)
from crewai.events.event_bus import crewai_event_bus
from crewai.events.event_listener import EventListener
from crewai.events.types.tool_usage_events import ToolUsageFinishedEvent
from crewai.events.types.flow_events import (
FlowCreatedEvent,
FlowFinishedEvent,
@@ -47,7 +42,12 @@ from crewai.events.types.task_events import (
)
from crewai.events.types.tool_usage_events import (
ToolUsageErrorEvent,
ToolUsageFinishedEvent,
)
from crewai.flow.flow import Flow, listen, start
from crewai.llm import LLM
from crewai.task import Task
from crewai.tools.base_tool import BaseTool
@pytest.fixture(scope="module")
@@ -991,3 +991,39 @@ def test_llm_emits_event_with_lite_agent():
assert set(all_agent_roles) == {agent.role}
assert set(all_agent_id) == {agent.id}
def test_llm_stream_chunks_do_not_print_to_stdout(capsys):
"""Test that LLM streaming chunks are not printed to stdout.
This test verifies the fix for issue #3715 where LLM stream chunks
were being printed directly to stdout via print() statement.
"""
received_chunks = []
with crewai_event_bus.scoped_handlers():
@crewai_event_bus.on(LLMStreamChunkEvent)
def handle_stream_chunk(source, event):
received_chunks.append(event.chunk)
# Manually emit stream chunk events to simulate streaming
llm = LLM(model="gpt-4o", stream=True)
test_chunks = ["Hello", " ", "world", "!"]
for chunk in test_chunks:
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk=chunk))
# Capture stdout
captured = capsys.readouterr()
# Verify that we received chunks
assert len(received_chunks) == len(test_chunks), "Should receive all streaming chunks"
assert received_chunks == test_chunks, "Chunks should match what was emitted"
# Verify that chunks were NOT printed to stdout
# The bug was that all chunks were being printed via print()
for chunk in test_chunks:
assert chunk not in captured.out, (
f"Chunk '{chunk}' should not be printed to stdout. "
"This indicates the bug in event_listener.py is not fixed."
)

6775
uv.lock generated

File diff suppressed because it is too large Load Diff

6403
uv.lock.broken Normal file

File diff suppressed because it is too large Load Diff