Fix function_calling_llm support for custom models

- Add supports_function_calling() method to BaseLLM class with default True
- Add supports_function_calling parameter to LLM class to allow override of litellm check
- Add tests for both BaseLLM default and LLM override functionality
- Fixes #3708: Custom models not in litellm's list can now use function calling

Co-Authored-By: João <joao@crewai.com>
This commit is contained in:
Devin AI
2025-10-15 02:57:03 +00:00
parent f0fb349ddf
commit fc4b0dd923
4 changed files with 51 additions and 0 deletions

View File

@@ -711,3 +711,18 @@ def test_ollama_does_not_modify_when_last_is_user(ollama_llm):
formatted = ollama_llm._format_messages_for_provider(original_messages)
assert formatted == original_messages
def test_supports_function_calling_with_override_true():
llm = LLM(model="custom-model/my-model", supports_function_calling=True)
assert llm.supports_function_calling() is True
def test_supports_function_calling_with_override_false():
llm = LLM(model="gpt-4o-mini", supports_function_calling=False)
assert llm.supports_function_calling() is False
def test_supports_function_calling_without_override():
llm = LLM(model="gpt-4o-mini")
assert llm.supports_function_calling() is True