Fix: Apply additional_drop_params filtering across all LLM providers

- Added centralized _apply_additional_drop_params helper method in BaseLLM
- Applied filtering in LiteLLM fallback path (_prepare_completion_params in llm.py)
- Applied filtering in Azure native provider (azure/completion.py)
- Applied filtering in OpenAI native provider (openai/completion.py)
- Applied filtering in Anthropic native provider (anthropic/completion.py)
- Added comprehensive tests covering both direct LLM usage and Agent usage
- Tests verify filtering works for single and multiple parameters
- Tests verify backwards compatibility with misspelled drop_additionnal_params

Fixes #3814

Co-Authored-By: João <joao@crewai.com>
This commit is contained in:
Devin AI
2025-10-30 11:04:53 +00:00
parent 2e9eb8c32d
commit 26906113fe
6 changed files with 162 additions and 2 deletions

View File

@@ -498,7 +498,11 @@ class LLM(BaseLLM):
}
# Remove None values from params
return {k: v for k, v in params.items() if v is not None}
params = {k: v for k, v in params.items() if v is not None}
params = self._apply_additional_drop_params(params)
return params
def _handle_streaming_response(
self,

View File

@@ -223,6 +223,49 @@ class BaseLLM(ABC):
return content
def _apply_additional_drop_params(self, params: dict[str, Any]) -> dict[str, Any]:
"""Apply additional_drop_params filtering to remove unwanted parameters.
This method provides consistent parameter filtering across all LLM providers.
It should be called after building the final params dict and before making
the provider API call.
Args:
params: The parameters dictionary to filter
Returns:
Filtered parameters dictionary with specified params removed
Example:
>>> llm = LLM(model="o1-mini", additional_drop_params=["stop"])
>>> params = {"model": "o1-mini", "messages": [...], "stop": ["\\n"]}
>>> filtered = llm._apply_additional_drop_params(params)
>>> "stop" in filtered
False
"""
drop_params = (
self.additional_params.get("additional_drop_params")
or self.additional_params.get("drop_additionnal_params")
or []
)
if not drop_params:
return params
filtered_params = params.copy()
for param_name in drop_params:
if param_name in filtered_params:
logging.debug(
f"Dropping parameter '{param_name}' as specified in additional_drop_params"
)
filtered_params.pop(param_name)
filtered_params.pop("additional_drop_params", None)
filtered_params.pop("drop_additionnal_params", None)
return filtered_params
def get_context_window_size(self) -> int:
"""Get the context window size for the LLM.

View File

@@ -201,6 +201,8 @@ class AnthropicCompletion(BaseLLM):
if tools and self.supports_tools:
params["tools"] = self._convert_tools_for_interference(tools)
params = self._apply_additional_drop_params(params)
return params
def _convert_tools_for_interference(self, tools: list[dict]) -> list[dict]:

View File

@@ -273,6 +273,8 @@ class AzureCompletion(BaseLLM):
params["tools"] = self._convert_tools_for_interference(tools)
params["tool_choice"] = "auto"
params = self._apply_additional_drop_params(params)
return params
def _convert_tools_for_interference(self, tools: list[dict]) -> list[dict]:

View File

@@ -249,7 +249,11 @@ class OpenAICompletion(BaseLLM):
"timeout",
}
return {k: v for k, v in params.items() if k not in crewai_specific_params}
params = {k: v for k, v in params.items() if k not in crewai_specific_params}
params = self._apply_additional_drop_params(params)
return params
def _convert_tools_for_interference(self, tools: list[dict]) -> list[dict]:
"""Convert CrewAI tool format to OpenAI function calling format."""

View File

@@ -725,3 +725,108 @@ def test_native_provider_falls_back_to_litellm_when_not_in_supported_list():
# Should fall back to LiteLLM
assert llm.is_litellm is True
assert llm.model == "groq/llama-3.1-70b-versatile"
def test_additional_drop_params_filters_parameters_in_litellm():
"""Test that additional_drop_params correctly filters out specified parameters in LiteLLM path."""
with patch("crewai.llm.LITELLM_AVAILABLE", True), patch("crewai.llm.litellm"):
llm = LLM(
model="o1-mini",
stop=["stop_sequence"],
additional_drop_params=["stop"],
is_litellm=True,
)
messages = [{"role": "user", "content": "Hello"}]
params = llm._prepare_completion_params(messages)
assert "stop" not in params
assert "additional_drop_params" not in params
assert params["model"] == "o1-mini"
def test_additional_drop_params_with_agent():
"""Test that additional_drop_params works when LLM is used with an Agent."""
from unittest.mock import patch, MagicMock
from crewai import Agent, Task, Crew
with patch("crewai.llm.LITELLM_AVAILABLE", True), patch("crewai.llm.litellm") as mock_litellm:
llm = LLM(
model="o1-mini",
stop=["stop_sequence"],
additional_drop_params=["stop"],
is_litellm=True,
)
agent = Agent(
role="Test Agent",
goal="Test the LLM response format functionality.",
backstory="An AI developer testing LLM integrations.",
llm=llm,
)
task = Task(
description="Say hello",
expected_output="A greeting",
agent=agent,
)
mock_response = MagicMock()
mock_response.choices = [MagicMock()]
mock_response.choices[0].message.content = "Hello!"
mock_response.choices[0].message.tool_calls = None
mock_response.usage = MagicMock()
mock_response.usage.prompt_tokens = 10
mock_response.usage.completion_tokens = 5
mock_response.usage.total_tokens = 15
mock_litellm.completion.return_value = mock_response
crew = Crew(agents=[agent], tasks=[task], verbose=False)
crew.kickoff()
# Verify that litellm.completion was called
assert mock_litellm.completion.called
# Get the kwargs passed to litellm.completion
call_kwargs = mock_litellm.completion.call_args[1]
assert "stop" not in call_kwargs
assert "additional_drop_params" not in call_kwargs
def test_additional_drop_params_supports_misspelled_variant():
"""Test that drop_additionnal_params (misspelled) is also supported for backwards compatibility."""
with patch("crewai.llm.LITELLM_AVAILABLE", True), patch("crewai.llm.litellm"):
llm = LLM(
model="o1-mini",
stop=["stop_sequence"],
drop_additionnal_params=["stop"],
is_litellm=True,
)
messages = [{"role": "user", "content": "Hello"}]
params = llm._prepare_completion_params(messages)
assert "stop" not in params
assert "drop_additionnal_params" not in params
assert params["model"] == "o1-mini"
def test_additional_drop_params_filters_multiple_parameters():
"""Test that additional_drop_params can filter multiple parameters."""
with patch("crewai.llm.LITELLM_AVAILABLE", True), patch("crewai.llm.litellm"):
llm = LLM(
model="o1-mini",
stop=["stop_sequence"],
temperature=0.7,
additional_drop_params=["stop", "temperature"],
is_litellm=True,
)
messages = [{"role": "user", "content": "Hello"}]
params = llm._prepare_completion_params(messages)
assert "stop" not in params
assert "temperature" not in params
assert "additional_drop_params" not in params
assert params["model"] == "o1-mini"