From af6c61bcb8e67f66785e10d9eddd8a685517b5f0 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Thu, 18 Sep 2025 20:32:17 +0000 Subject: [PATCH] fix: Resolve type checking errors in prompt caching implementation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add type ignore comment for intentional content field transformation - Convert ChatCompletionDeltaToolCall to ToolCall format for event emission - Fixes mypy errors on lines 416 and 789 Co-Authored-By: João --- src/crewai/llm.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/crewai/llm.py b/src/crewai/llm.py index 4148a2555..0dbbfeeeb 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -413,7 +413,7 @@ class LLM(BaseLLM): ): content = message.get("content", "") if isinstance(content, str): - formatted_message["content"] = [ + formatted_message["content"] = [ # type: ignore[assignment] { "type": "text", "text": content, @@ -783,10 +783,21 @@ class LLM(BaseLLM): tool_call.function.arguments ) assert hasattr(crewai_event_bus, "emit") + # Convert ChatCompletionDeltaToolCall to ToolCall format + from crewai.events.types.llm_events import ToolCall, FunctionCall + converted_tool_call = ToolCall( + id=tool_call.id, + function=FunctionCall( + name=tool_call.function.name, + arguments=tool_call.function.arguments or "" + ), + type=tool_call.type, + index=tool_call.index + ) crewai_event_bus.emit( self, event=LLMStreamChunkEvent( - tool_call=tool_call.to_dict(), + tool_call=converted_tool_call, chunk=tool_call.function.arguments, from_task=from_task, from_agent=from_agent,