fix: Remove stop parameter for o4-mini model

Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
Devin AI
2025-04-22 16:00:41 +00:00
parent 6d0039b117
commit 130ed3481f
2 changed files with 22 additions and 1 deletions

View File

@@ -358,7 +358,12 @@ class LLM(BaseLLM):
}
# Remove None values from params
return {k: v for k, v in params.items() if v is not None}
params = {k: v for k, v in params.items() if v is not None}
if "o4-mini" in self.model:
params.pop("stop", None)
return params
def _handle_streaming_response(
self,

View File

@@ -533,3 +533,19 @@ def test_handle_streaming_tool_calls_no_tools(mock_emit):
expected_completed_llm_call=1,
expected_final_chunk_result=response,
)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_llm_o4_mini_stop_parameter():
"""Test that o4-mini model works correctly without stop parameter."""
llm = LLM(model="o4-mini", stop=["STOP", "END"])
# Check that stop parameter is set
assert llm.stop == ["STOP", "END"]
params = llm._prepare_completion_params(messages=[{"role": "user", "content": "Hello"}])
assert "stop" not in params
response = llm.call(messages=[{"role": "user", "content": "Hello, world!"}])
assert response is not None