Implement comprehensive streaming support for CrewAI

- Add streaming events: CrewStreamChunkEvent, TaskStreamChunkEvent, AgentStreamChunkEvent
- Extend Crew.kickoff() with stream parameter and callback support
- Propagate streaming through task and agent execution chains
- Integrate with existing LLM streaming infrastructure
- Add comprehensive tests and examples
- Maintain backward compatibility

Fixes #2950

Co-Authored-By: João <joao@crewai.com>
This commit is contained in:
Devin AI
2025-06-04 07:00:54 +00:00
parent 2bd6b72aae
commit b3b2b1e25f
14 changed files with 1225 additions and 6 deletions

View File

@@ -80,6 +80,8 @@ class CrewAgentExecutor(CrewAgentExecutorMixin):
self.messages: List[Dict[str, str]] = []
self.iterations = 0
self.log_error_after = 3
self._stream_callback = None
self._task_description = None
self.tool_name_to_tool_map: Dict[str, Union[CrewStructuredTool, BaseTool]] = {
tool.name: tool for tool in self.tools
}
@@ -157,6 +159,23 @@ class CrewAgentExecutor(CrewAgentExecutorMixin):
printer=self._printer,
)
formatted_answer = process_llm_response(answer, self.use_stop_words)
if hasattr(self, '_stream_callback') and self._stream_callback:
if hasattr(formatted_answer, 'text'):
step_type = "agent_thinking" if hasattr(formatted_answer, 'tool') else "final_answer"
self._stream_callback(
formatted_answer.text,
self.agent.role if self.agent else "unknown",
getattr(self, '_task_description', "unknown"),
step_type
)
elif isinstance(formatted_answer, str):
self._stream_callback(
formatted_answer,
self.agent.role if self.agent else "unknown",
getattr(self, '_task_description', "unknown"),
"final_answer"
)
if isinstance(formatted_answer, AgentAction):
# Extract agent fingerprint if available