mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-10 00:28:31 +00:00
Fix #2729: Add force_structured_output parameter to bypass response schema check for OpenRouter models
Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
@@ -270,6 +270,7 @@ class LLM(BaseLLM):
|
||||
callbacks: List[Any] = [],
|
||||
reasoning_effort: Optional[Literal["none", "low", "medium", "high"]] = None,
|
||||
stream: bool = False,
|
||||
force_structured_output: bool = False,
|
||||
**kwargs,
|
||||
):
|
||||
self.model = model
|
||||
@@ -296,6 +297,7 @@ class LLM(BaseLLM):
|
||||
self.additional_params = kwargs
|
||||
self.is_anthropic = self._is_anthropic_model(model)
|
||||
self.stream = stream
|
||||
self.force_structured_output = force_structured_output
|
||||
|
||||
litellm.drop_params = True
|
||||
|
||||
@@ -992,9 +994,11 @@ class LLM(BaseLLM):
|
||||
- If no slash is present, "openai" is assumed.
|
||||
"""
|
||||
provider = self._get_custom_llm_provider()
|
||||
if self.response_format is not None and not supports_response_schema(
|
||||
model=self.model,
|
||||
custom_llm_provider=provider,
|
||||
if self.response_format is not None and not (
|
||||
supports_response_schema(
|
||||
model=self.model,
|
||||
custom_llm_provider=provider,
|
||||
) or (provider == "openrouter" and self.force_structured_output)
|
||||
):
|
||||
raise ValueError(
|
||||
f"The model {self.model} does not support response_format for provider '{provider}'. "
|
||||
|
||||
@@ -256,6 +256,32 @@ def test_validate_call_params_no_response_format():
|
||||
llm._validate_call_params()
|
||||
|
||||
|
||||
def test_validate_call_params_openrouter_force_structured_output():
|
||||
class DummyResponse(BaseModel):
|
||||
a: int
|
||||
|
||||
# Test with OpenRouter and force_structured_output=True
|
||||
llm = LLM(
|
||||
model="openrouter/deepseek/deepseek-chat",
|
||||
response_format=DummyResponse,
|
||||
force_structured_output=True
|
||||
)
|
||||
# Should not raise any error with force_structured_output=True
|
||||
llm._validate_call_params()
|
||||
|
||||
# Test with OpenRouter and force_structured_output=False (default)
|
||||
# Patch supports_response_schema to simulate an unsupported model.
|
||||
with patch("crewai.llm.supports_response_schema", return_value=False):
|
||||
llm = LLM(
|
||||
model="openrouter/deepseek/deepseek-chat",
|
||||
response_format=DummyResponse,
|
||||
force_structured_output=False
|
||||
)
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
llm._validate_call_params()
|
||||
assert "does not support response_format" in str(excinfo.value)
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"], filter_query_parameters=["key"])
|
||||
@pytest.mark.parametrize(
|
||||
"model",
|
||||
|
||||
Reference in New Issue
Block a user