Compare commits

...

1 Commits

Author SHA1 Message Date
Devin AI
c9159ea09a Fix Agent.kickoff() to pass custom system_template and prompt_template to LiteAgent
- Add system_template and prompt_template fields to LiteAgent class
- Update LiteAgent._get_default_system_prompt() to use custom templates when provided
- Pass template parameters from Agent.kickoff() and Agent.kickoff_async() to LiteAgent
- Add comprehensive tests to verify custom templates are passed and used correctly
- Maintain backward compatibility with default behavior when no templates provided

Fixes #3261

Co-Authored-By: João <joao@crewai.com>
2025-08-02 21:39:19 +00:00
3 changed files with 161 additions and 2 deletions

View File

@@ -817,6 +817,8 @@ class Agent(BaseAgent):
original_agent=self,
guardrail=self.guardrail,
guardrail_max_retries=self.guardrail_max_retries,
system_template=self.system_template,
prompt_template=self.prompt_template,
)
return lite_agent.kickoff(messages)
@@ -853,6 +855,8 @@ class Agent(BaseAgent):
response_format=response_format,
i18n=self.i18n,
original_agent=self,
system_template=self.system_template,
prompt_template=self.prompt_template,
)
return await lite_agent.kickoff_async(messages)

View File

@@ -174,6 +174,14 @@ class LiteAgent(FlowTrackable, BaseModel):
default=[], description="Callbacks to be used for the agent"
)
# Template Properties
system_template: Optional[str] = Field(
default=None, description="Custom system template for the agent"
)
prompt_template: Optional[str] = Field(
default=None, description="Custom prompt template for the agent"
)
# Guardrail Properties
guardrail: Optional[Union[Callable[[LiteAgentOutput], Tuple[bool, Any]], str]] = (
Field(
@@ -446,6 +454,26 @@ class LiteAgent(FlowTrackable, BaseModel):
def _get_default_system_prompt(self) -> str:
"""Get the default system prompt for the agent."""
if self.system_template and self.prompt_template:
system_components = ["role_playing"]
if self._parsed_tools:
system_components.append("tools")
else:
system_components.append("no_tools")
system_parts = [self.i18n.slice(component) for component in system_components]
system_content = "".join(system_parts)
system_prompt = self.system_template.replace("{{ .System }}", system_content)
system_prompt = (
system_prompt.replace("{goal}", self.goal)
.replace("{role}", self.role)
.replace("{backstory}", self.backstory)
)
return system_prompt
base_prompt = ""
if self._parsed_tools:
# Use the prompt template for agents with tools
@@ -622,4 +650,4 @@ class LiteAgent(FlowTrackable, BaseModel):
def _append_message(self, text: str, role: str = "assistant") -> None:
"""Append a message to the message list with the given role."""
self._messages.append(format_message_for_llm(text, role=role))
self._messages.append(format_message_for_llm(text, role=role))

View File

@@ -118,6 +118,8 @@ def test_lite_agent_created_with_correct_parameters(monkeypatch, verbose):
assert created_lite_agent["max_iterations"] == max_iter
assert created_lite_agent["max_execution_time"] == max_execution_time
assert created_lite_agent["verbose"] == verbose
assert created_lite_agent["system_template"] is None
assert created_lite_agent["prompt_template"] is None
assert created_lite_agent["response_format"] is None
# Test with a response_format
@@ -492,4 +494,129 @@ def test_lite_agent_with_invalid_llm():
backstory="Test backstory",
llm="invalid-model"
)
assert "Expected LLM instance of type BaseLLM" in str(exc_info.value)
assert "Expected LLM instance of type BaseLLM" in str(exc_info.value)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_agent_kickoff_with_custom_templates(monkeypatch):
"""Test that Agent.kickoff() passes custom templates to LiteAgent and they are used correctly."""
custom_system_template = """<|start_header_id|>system<|end_header_id|>
{{ .System }}<|eot_id|>"""
custom_prompt_template = """<|start_header_id|>user<|end_header_id|>
{{ .Prompt }}<|eot_id|>"""
agent = Agent(
role="Test Agent",
goal="Test Goal",
backstory="Test Backstory",
system_template=custom_system_template,
prompt_template=custom_prompt_template,
llm=LLM(model="gpt-4o-mini"),
)
created_lite_agent = None
original_lite_agent = LiteAgent
class MockLiteAgent(original_lite_agent):
def __init__(self, **kwargs):
nonlocal created_lite_agent
created_lite_agent = kwargs
super().__init__(**kwargs)
def kickoff(self, messages):
return LiteAgentOutput(
raw="Test response",
agent_role=self.role
)
monkeypatch.setattr("crewai.agent.LiteAgent", MockLiteAgent)
result = agent.kickoff("Test query")
assert created_lite_agent is not None
assert created_lite_agent["system_template"] == custom_system_template
assert created_lite_agent["prompt_template"] == custom_prompt_template
assert isinstance(result, LiteAgentOutput)
assert result.raw == "Test response"
@pytest.mark.vcr(filter_headers=["authorization"])
def test_agent_kickoff_without_custom_templates(monkeypatch):
"""Test that Agent.kickoff() works normally when no custom templates are provided."""
agent = Agent(
role="Test Agent",
goal="Test Goal",
backstory="Test Backstory",
llm=LLM(model="gpt-4o-mini"),
)
created_lite_agent = None
original_lite_agent = LiteAgent
class MockLiteAgent(original_lite_agent):
def __init__(self, **kwargs):
nonlocal created_lite_agent
created_lite_agent = kwargs
super().__init__(**kwargs)
def kickoff(self, messages):
return LiteAgentOutput(
raw="Test response",
agent_role=self.role
)
monkeypatch.setattr("crewai.agent.LiteAgent", MockLiteAgent)
result = agent.kickoff("Test query")
assert created_lite_agent is not None
assert created_lite_agent["system_template"] is None
assert created_lite_agent["prompt_template"] is None
assert isinstance(result, LiteAgentOutput)
assert result.raw == "Test response"
@pytest.mark.vcr(filter_headers=["authorization"])
def test_lite_agent_uses_custom_templates_in_system_prompt():
"""Test that LiteAgent actually uses custom templates when building system prompts."""
custom_system_template = "CUSTOM SYSTEM: {{ .System }}"
custom_prompt_template = "CUSTOM PROMPT: {{ .Prompt }}"
lite_agent = LiteAgent(
role="Research Assistant",
goal="Help users find accurate information",
backstory="You are a helpful research assistant",
system_template=custom_system_template,
prompt_template=custom_prompt_template,
llm=LLM(model="gpt-4o-mini"),
)
system_prompt = lite_agent._get_default_system_prompt()
assert "CUSTOM SYSTEM:" in system_prompt
assert "Research Assistant" in system_prompt
assert "Help users find accurate information" in system_prompt
assert "You are a helpful research assistant" in system_prompt
@pytest.mark.vcr(filter_headers=["authorization"])
def test_lite_agent_falls_back_to_default_without_templates():
"""Test that LiteAgent falls back to default behavior when no custom templates are provided."""
lite_agent = LiteAgent(
role="Research Assistant",
goal="Help users find accurate information",
backstory="You are a helpful research assistant",
llm=LLM(model="gpt-4o-mini"),
)
system_prompt = lite_agent._get_default_system_prompt()
assert "CUSTOM SYSTEM:" not in system_prompt
assert "Research Assistant" in system_prompt
assert "Help users find accurate information" in system_prompt
assert "You are a helpful research assistant" in system_prompt