From 130ed3481f7ebf6ca05f4f6d8974d89c4efe0859 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Tue, 22 Apr 2025 16:00:41 +0000 Subject: [PATCH] fix: Remove stop parameter for o4-mini model Co-Authored-By: Joe Moura --- src/crewai/llm.py | 7 ++++++- tests/llm_test.py | 16 ++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/src/crewai/llm.py b/src/crewai/llm.py index 332582744..8d8b5993f 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -358,7 +358,12 @@ class LLM(BaseLLM): } # Remove None values from params - return {k: v for k, v in params.items() if v is not None} + params = {k: v for k, v in params.items() if v is not None} + + if "o4-mini" in self.model: + params.pop("stop", None) + + return params def _handle_streaming_response( self, diff --git a/tests/llm_test.py b/tests/llm_test.py index 2bd03306b..de55b98a5 100644 --- a/tests/llm_test.py +++ b/tests/llm_test.py @@ -533,3 +533,19 @@ def test_handle_streaming_tool_calls_no_tools(mock_emit): expected_completed_llm_call=1, expected_final_chunk_result=response, ) + + +@pytest.mark.vcr(filter_headers=["authorization"]) +def test_llm_o4_mini_stop_parameter(): + """Test that o4-mini model works correctly without stop parameter.""" + llm = LLM(model="o4-mini", stop=["STOP", "END"]) + + # Check that stop parameter is set + assert llm.stop == ["STOP", "END"] + + params = llm._prepare_completion_params(messages=[{"role": "user", "content": "Hello"}]) + + assert "stop" not in params + + response = llm.call(messages=[{"role": "user", "content": "Hello, world!"}]) + assert response is not None