mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-04-11 05:22:41 +00:00
feat: propagate is_litellm flag through LLMCallCompletedEvent
Add is_litellm field to LLMCallCompletedEvent so downstream consumers
This commit is contained in:
@@ -58,6 +58,7 @@ class LLMCallCompletedEvent(LLMEventBase):
|
||||
response: Any
|
||||
call_type: LLMCallType
|
||||
usage: dict[str, Any] | None = None
|
||||
is_litellm: bool = False
|
||||
|
||||
|
||||
class LLMCallFailedEvent(LLMEventBase):
|
||||
|
||||
@@ -1957,6 +1957,7 @@ class LLM(BaseLLM):
|
||||
model=self.model,
|
||||
call_id=get_current_call_id(),
|
||||
usage=usage,
|
||||
is_litellm=self.is_litellm,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -479,6 +479,7 @@ class BaseLLM(BaseModel, ABC):
|
||||
model=self.model,
|
||||
call_id=get_current_call_id(),
|
||||
usage=usage,
|
||||
is_litellm=self.is_litellm,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -175,6 +175,27 @@ class TestEmitCallCompletedEventPassesUsage:
|
||||
assert isinstance(event, LLMCallCompletedEvent)
|
||||
assert event.usage is None
|
||||
|
||||
def test_is_litellm_is_passed_to_event(self, mock_emit, llm):
|
||||
llm.is_litellm = True
|
||||
llm._emit_call_completed_event(
|
||||
response="hello",
|
||||
call_type=LLMCallType.LLM_CALL,
|
||||
messages="test prompt",
|
||||
)
|
||||
|
||||
event = mock_emit.call_args[1]["event"]
|
||||
assert event.is_litellm is True
|
||||
|
||||
def test_is_litellm_defaults_to_false(self, mock_emit, llm):
|
||||
llm._emit_call_completed_event(
|
||||
response="hello",
|
||||
call_type=LLMCallType.LLM_CALL,
|
||||
messages="test prompt",
|
||||
)
|
||||
|
||||
event = mock_emit.call_args[1]["event"]
|
||||
assert event.is_litellm is False
|
||||
|
||||
class TestUsageMetricsNewFields:
|
||||
def test_add_usage_metrics_aggregates_reasoning_and_cache_creation(self):
|
||||
from crewai.types.usage_metrics import UsageMetrics
|
||||
|
||||
Reference in New Issue
Block a user