From fc4b0dd92310e67c519e71b5b834c488a0602a97 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 02:57:03 +0000 Subject: [PATCH] Fix function_calling_llm support for custom models MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add supports_function_calling() method to BaseLLM class with default True - Add supports_function_calling parameter to LLM class to allow override of litellm check - Add tests for both BaseLLM default and LLM override functionality - Fixes #3708: Custom models not in litellm's list can now use function calling Co-Authored-By: João --- src/crewai/llm.py | 5 +++++ src/crewai/llms/base_llm.py | 9 +++++++++ tests/test_custom_llm.py | 22 ++++++++++++++++++++++ tests/test_llm.py | 15 +++++++++++++++ 4 files changed, 51 insertions(+) diff --git a/src/crewai/llm.py b/src/crewai/llm.py index 733b46c79..0e50e96b3 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -299,6 +299,7 @@ class LLM(BaseLLM): callbacks: list[Any] | None = None, reasoning_effort: Literal["none", "low", "medium", "high"] | None = None, stream: bool = False, + supports_function_calling: bool | None = None, **kwargs, ): self.model = model @@ -325,6 +326,7 @@ class LLM(BaseLLM): self.additional_params = kwargs self.is_anthropic = self._is_anthropic_model(model) self.stream = stream + self._supports_function_calling_override = supports_function_calling litellm.drop_params = True @@ -1197,6 +1199,9 @@ class LLM(BaseLLM): ) def supports_function_calling(self) -> bool: + if self._supports_function_calling_override is not None: + return self._supports_function_calling_override + try: provider = self._get_custom_llm_provider() return litellm.utils.supports_function_calling( diff --git a/src/crewai/llms/base_llm.py b/src/crewai/llms/base_llm.py index 0cd95c347..9964e68b4 100644 --- a/src/crewai/llms/base_llm.py +++ b/src/crewai/llms/base_llm.py @@ -9,6 +9,7 @@ from typing import Any, Final DEFAULT_CONTEXT_WINDOW_SIZE: Final[int] = 4096 DEFAULT_SUPPORTS_STOP_WORDS: Final[bool] = True +DEFAULT_SUPPORTS_FUNCTION_CALLING: Final[bool] = True class BaseLLM(ABC): @@ -82,6 +83,14 @@ class BaseLLM(ABC): RuntimeError: If the LLM request fails for other reasons. """ + def supports_function_calling(self) -> bool: + """Check if the LLM supports function calling. + + Returns: + True if the LLM supports function calling, False otherwise. + """ + return DEFAULT_SUPPORTS_FUNCTION_CALLING + def supports_stop_words(self) -> bool: """Check if the LLM supports stop words. diff --git a/tests/test_custom_llm.py b/tests/test_custom_llm.py index 85a4b2e64..80c858bd3 100644 --- a/tests/test_custom_llm.py +++ b/tests/test_custom_llm.py @@ -358,3 +358,25 @@ def test_timeout_handling_llm(): with pytest.raises(TimeoutError, match="LLM request failed after 2 attempts"): llm.call("Test message") assert len(llm.calls) == 2 # Initial call + failed retry attempt + + +class MinimalCustomLLM(BaseLLM): + """Minimal custom LLM implementation that doesn't override supports_function_calling.""" + + def __init__(self): + super().__init__(model="minimal-model") + + def call( + self, + messages: Union[str, List[Dict[str, str]]], + tools: Optional[List[dict]] = None, + callbacks: Optional[List[Any]] = None, + available_functions: Optional[Dict[str, Any]] = None, + ) -> Union[str, Any]: + return "Minimal response" + + +def test_base_llm_supports_function_calling_default(): + """Test that BaseLLM supports function calling by default.""" + llm = MinimalCustomLLM() + assert llm.supports_function_calling() is True diff --git a/tests/test_llm.py b/tests/test_llm.py index 065687565..4b6e745dd 100644 --- a/tests/test_llm.py +++ b/tests/test_llm.py @@ -711,3 +711,18 @@ def test_ollama_does_not_modify_when_last_is_user(ollama_llm): formatted = ollama_llm._format_messages_for_provider(original_messages) assert formatted == original_messages + + +def test_supports_function_calling_with_override_true(): + llm = LLM(model="custom-model/my-model", supports_function_calling=True) + assert llm.supports_function_calling() is True + + +def test_supports_function_calling_with_override_false(): + llm = LLM(model="gpt-4o-mini", supports_function_calling=False) + assert llm.supports_function_calling() is False + + +def test_supports_function_calling_without_override(): + llm = LLM(model="gpt-4o-mini") + assert llm.supports_function_calling() is True