mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-08 15:48:29 +00:00
* Initial Stream working * add tests * adjust tests * Update test for multiplication * Update test for multiplication part 2 * max iter on new test * streaming tool call test update * Force pass * another one * give up on agent * WIP * Non-streaming working again * stream working too * fixing type check * fix failing test * fix failing test * fix failing test * Fix testing for CI * Fix failing test * Fix failing test * Skip failing CI/CD tests * too many logs * working * Trying to fix tests * drop openai failing tests * improve logic * Implement LLM stream chunk event handling with in-memory text stream * More event types * Update docs --------- Co-authored-by: Lorenze Jay <lorenzejaytech@gmail.com>
44 lines
1021 B
Python
44 lines
1021 B
Python
from enum import Enum
|
|
from typing import Any, Dict, List, Optional, Union
|
|
|
|
from crewai.utilities.events.base_events import CrewEvent
|
|
|
|
|
|
class LLMCallType(Enum):
|
|
"""Type of LLM call being made"""
|
|
|
|
TOOL_CALL = "tool_call"
|
|
LLM_CALL = "llm_call"
|
|
|
|
|
|
class LLMCallStartedEvent(CrewEvent):
|
|
"""Event emitted when a LLM call starts"""
|
|
|
|
type: str = "llm_call_started"
|
|
messages: Union[str, List[Dict[str, str]]]
|
|
tools: Optional[List[dict]] = None
|
|
callbacks: Optional[List[Any]] = None
|
|
available_functions: Optional[Dict[str, Any]] = None
|
|
|
|
|
|
class LLMCallCompletedEvent(CrewEvent):
|
|
"""Event emitted when a LLM call completes"""
|
|
|
|
type: str = "llm_call_completed"
|
|
response: Any
|
|
call_type: LLMCallType
|
|
|
|
|
|
class LLMCallFailedEvent(CrewEvent):
|
|
"""Event emitted when a LLM call fails"""
|
|
|
|
error: str
|
|
type: str = "llm_call_failed"
|
|
|
|
|
|
class LLMStreamChunkEvent(CrewEvent):
|
|
"""Event emitted when a streaming chunk is received"""
|
|
|
|
type: str = "llm_stream_chunk"
|
|
chunk: str
|