mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-25 16:18:13 +00:00
Fix function_calling_llm support for custom models
- Add supports_function_calling() method to BaseLLM class with default True - Add supports_function_calling parameter to LLM class to allow override of litellm check - Add tests for both BaseLLM default and LLM override functionality - Fixes #3708: Custom models not in litellm's list can now use function calling Co-Authored-By: João <joao@crewai.com>
This commit is contained in:
@@ -358,3 +358,25 @@ def test_timeout_handling_llm():
|
||||
with pytest.raises(TimeoutError, match="LLM request failed after 2 attempts"):
|
||||
llm.call("Test message")
|
||||
assert len(llm.calls) == 2 # Initial call + failed retry attempt
|
||||
|
||||
|
||||
class MinimalCustomLLM(BaseLLM):
|
||||
"""Minimal custom LLM implementation that doesn't override supports_function_calling."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(model="minimal-model")
|
||||
|
||||
def call(
|
||||
self,
|
||||
messages: Union[str, List[Dict[str, str]]],
|
||||
tools: Optional[List[dict]] = None,
|
||||
callbacks: Optional[List[Any]] = None,
|
||||
available_functions: Optional[Dict[str, Any]] = None,
|
||||
) -> Union[str, Any]:
|
||||
return "Minimal response"
|
||||
|
||||
|
||||
def test_base_llm_supports_function_calling_default():
|
||||
"""Test that BaseLLM supports function calling by default."""
|
||||
llm = MinimalCustomLLM()
|
||||
assert llm.supports_function_calling() is True
|
||||
|
||||
@@ -711,3 +711,18 @@ def test_ollama_does_not_modify_when_last_is_user(ollama_llm):
|
||||
formatted = ollama_llm._format_messages_for_provider(original_messages)
|
||||
|
||||
assert formatted == original_messages
|
||||
|
||||
|
||||
def test_supports_function_calling_with_override_true():
|
||||
llm = LLM(model="custom-model/my-model", supports_function_calling=True)
|
||||
assert llm.supports_function_calling() is True
|
||||
|
||||
|
||||
def test_supports_function_calling_with_override_false():
|
||||
llm = LLM(model="gpt-4o-mini", supports_function_calling=False)
|
||||
assert llm.supports_function_calling() is False
|
||||
|
||||
|
||||
def test_supports_function_calling_without_override():
|
||||
llm = LLM(model="gpt-4o-mini")
|
||||
assert llm.supports_function_calling() is True
|
||||
|
||||
Reference in New Issue
Block a user