From ea610af201ba0c00bc1c2a27f0580f0e33fb362e Mon Sep 17 00:00:00 2001 From: Lucas Gomide Date: Fri, 10 Apr 2026 10:32:21 -0300 Subject: [PATCH] feat: propagate is_litellm flag through LLMCallCompletedEvent Add is_litellm field to LLMCallCompletedEvent so downstream consumers --- .../src/crewai/events/types/llm_events.py | 1 + lib/crewai/src/crewai/llm.py | 1 + lib/crewai/src/crewai/llms/base_llm.py | 1 + .../tests/events/test_llm_usage_event.py | 21 +++++++++++++++++++ 4 files changed, 24 insertions(+) diff --git a/lib/crewai/src/crewai/events/types/llm_events.py b/lib/crewai/src/crewai/events/types/llm_events.py index b138f908c..453048e4f 100644 --- a/lib/crewai/src/crewai/events/types/llm_events.py +++ b/lib/crewai/src/crewai/events/types/llm_events.py @@ -58,6 +58,7 @@ class LLMCallCompletedEvent(LLMEventBase): response: Any call_type: LLMCallType usage: dict[str, Any] | None = None + is_litellm: bool = False class LLMCallFailedEvent(LLMEventBase): diff --git a/lib/crewai/src/crewai/llm.py b/lib/crewai/src/crewai/llm.py index e6f5cc68b..98ca0cd47 100644 --- a/lib/crewai/src/crewai/llm.py +++ b/lib/crewai/src/crewai/llm.py @@ -1957,6 +1957,7 @@ class LLM(BaseLLM): model=self.model, call_id=get_current_call_id(), usage=usage, + is_litellm=self.is_litellm, ), ) diff --git a/lib/crewai/src/crewai/llms/base_llm.py b/lib/crewai/src/crewai/llms/base_llm.py index 4f45572ee..26d486305 100644 --- a/lib/crewai/src/crewai/llms/base_llm.py +++ b/lib/crewai/src/crewai/llms/base_llm.py @@ -479,6 +479,7 @@ class BaseLLM(BaseModel, ABC): model=self.model, call_id=get_current_call_id(), usage=usage, + is_litellm=self.is_litellm, ), ) diff --git a/lib/crewai/tests/events/test_llm_usage_event.py b/lib/crewai/tests/events/test_llm_usage_event.py index 9be8c639f..2ef3367b7 100644 --- a/lib/crewai/tests/events/test_llm_usage_event.py +++ b/lib/crewai/tests/events/test_llm_usage_event.py @@ -175,6 +175,27 @@ class TestEmitCallCompletedEventPassesUsage: assert isinstance(event, LLMCallCompletedEvent) assert event.usage is None + def test_is_litellm_is_passed_to_event(self, mock_emit, llm): + llm.is_litellm = True + llm._emit_call_completed_event( + response="hello", + call_type=LLMCallType.LLM_CALL, + messages="test prompt", + ) + + event = mock_emit.call_args[1]["event"] + assert event.is_litellm is True + + def test_is_litellm_defaults_to_false(self, mock_emit, llm): + llm._emit_call_completed_event( + response="hello", + call_type=LLMCallType.LLM_CALL, + messages="test prompt", + ) + + event = mock_emit.call_args[1]["event"] + assert event.is_litellm is False + class TestUsageMetricsNewFields: def test_add_usage_metrics_aggregates_reasoning_and_cache_creation(self): from crewai.types.usage_metrics import UsageMetrics