Enhance function_calling_llm handling with logging and type validation

Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
Devin AI
2025-03-11 08:50:39 +00:00
parent e74c4dd5d6
commit 4ab61eecba
2 changed files with 85 additions and 6 deletions

View File

@@ -172,11 +172,15 @@ def CrewBase(cls: T) -> T:
]
if function_calling_llm := agent_info.get("function_calling_llm"):
if not isinstance(function_calling_llm, str):
raise ValueError(f"function_calling_llm must be a string, got {type(function_calling_llm)}")
try:
self.agents_config[agent_name]["function_calling_llm"] = agents[
function_calling_llm
]()
except KeyError:
logging.debug(f"No agent found for function_calling_llm '{function_calling_llm}', using it as direct model name")
self.agents_config[agent_name]["function_calling_llm"] = function_calling_llm
if step_callback := agent_info.get("step_callback"):

View File

@@ -1,15 +1,24 @@
import os
import pytest
import tempfile
import yaml
import pytest
from crewai.project import CrewBase, agent, crew, task, tool
from crewai import Agent, Crew, Task, Process
from crewai.tools import BaseTool
from pydantic import BaseModel, Field
from typing import Type
from pydantic import BaseModel, Field
from crewai import Agent, Crew, Process, Task
from crewai.project import CrewBase, agent, crew, task, tool
from crewai.tools import BaseTool
def test_function_calling_llm_in_yaml():
"""Test that function_calling_llm can be specified in YAML."""
"""
Test function_calling_llm YAML configuration.
Tests:
- Direct model name specification
- Configuration persistence
- Integration with Agent initialization
"""
# Create temporary YAML files
with tempfile.TemporaryDirectory() as temp_dir:
# Create agents.yaml with function_calling_llm
@@ -77,3 +86,69 @@ def test_function_calling_llm_in_yaml():
# Verify that function_calling_llm was properly set
assert crew_instance.agents[0].function_calling_llm is not None
assert crew_instance.agents[0].function_calling_llm.model == "gpt-4o-mini"
def test_invalid_function_calling_llm_type():
"""Test that function_calling_llm must be a string."""
# Create temporary YAML files
with tempfile.TemporaryDirectory() as temp_dir:
# Create agents.yaml with invalid function_calling_llm type
agents_yaml = os.path.join(temp_dir, "agents.yaml")
with open(agents_yaml, "w") as f:
yaml.dump(
{
"test_agent": {
"role": "Test Agent",
"goal": "Test Goal",
"backstory": "Test Backstory",
"function_calling_llm": 123 # Invalid type
}
},
f
)
# Create tasks.yaml
tasks_yaml = os.path.join(temp_dir, "tasks.yaml")
with open(tasks_yaml, "w") as f:
yaml.dump(
{
"test_task": {
"description": "Test Task",
"expected_output": "Test Output",
"agent": "test_agent"
}
},
f
)
# Create a CrewBase class that uses the YAML files
@CrewBase
class TestCrew:
"""Test crew with invalid function_calling_llm type."""
agents_config = agents_yaml
tasks_config = tasks_yaml
@agent
def test_agent(self) -> Agent:
return Agent(
config=self.agents_config["test_agent"],
verbose=True
)
@task
def test_task(self) -> Task:
return Task(
config=self.tasks_config["test_task"]
)
@crew
def crew(self) -> Crew:
return Crew(
agents=self.agents,
tasks=self.tasks,
process=Process.sequential,
verbose=True
)
# Initialize the crew - this should raise a ValueError
with pytest.raises(ValueError, match="function_calling_llm must be a string"):
test_crew = TestCrew()