Checking supports_function_calling isntead of gpt models

This commit is contained in:
João Moura
2024-09-23 16:23:38 -03:00
parent 3b6d1838b4
commit 493f046c03
8 changed files with 34 additions and 61 deletions

View File

@@ -25,6 +25,7 @@ def test_evaluate_training_data(converter_mock):
}
agent_id = "agent_id"
original_agent = MagicMock()
original_agent.llm.supports_function_calling.return_value = False
function_return_value = TrainingTaskEvaluation(
suggestions=[
"The initial output was already good, having a detailed explanation. However, the improved output "

View File

@@ -11,11 +11,12 @@ from crewai.utilities.converter import (
create_converter,
get_conversion_instructions,
handle_partial_json,
is_gpt,
validate_model,
)
from pydantic import BaseModel
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
# Sample Pydantic models for testing
class EmailResponse(BaseModel):
@@ -198,14 +199,20 @@ def test_convert_with_instructions_failure(
def test_get_conversion_instructions_gpt():
mock_llm = Mock()
mock_llm.openai_api_base = None
with patch("crewai.utilities.converter.is_gpt", return_value=True):
with patch.object(LLM, "supports_function_calling") as supports_function_calling:
supports_function_calling.return_value = True
instructions = get_conversion_instructions(SimpleModel, mock_llm)
assert instructions == "I'm gonna convert this raw text into valid JSON."
model_schema = PydanticSchemaParser(model=SimpleModel).get_schema()
assert (
instructions
== f"I'm gonna convert this raw text into valid JSON.\n\nThe json should have the following structure, with the following keys:\n{model_schema}"
)
def test_get_conversion_instructions_non_gpt():
mock_llm = Mock()
with patch("crewai.utilities.converter.is_gpt", return_value=False):
with patch.object(LLM, "supports_function_calling") as supports_function_calling:
supports_function_calling.return_value = False
with patch("crewai.utilities.converter.PydanticSchemaParser") as mock_parser:
mock_parser.return_value.get_schema.return_value = "Sample schema"
instructions = get_conversion_instructions(SimpleModel, mock_llm)
@@ -213,14 +220,14 @@ def test_get_conversion_instructions_non_gpt():
# Tests for is_gpt
def test_is_gpt_true():
llm = LLM(model="gpt-4")
assert is_gpt(llm) is True
def test_supports_function_calling_true():
llm = LLM(model="gpt-4o")
assert llm.supports_function_calling() is True
def test_is_gpt_false():
llm = LLM(model="lol-4")
assert is_gpt(llm) is False
def test_supports_function_calling_false():
llm = LLM(model="non-existent-model")
assert llm.supports_function_calling() is False
class CustomConverter(Converter):