mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-07 23:28:30 +00:00
Fix issue #2843: Exclude stop parameter for models that don't support it
Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
@@ -155,7 +155,7 @@ class LLM:
|
||||
"temperature": self.temperature,
|
||||
"top_p": self.top_p,
|
||||
"n": self.n,
|
||||
"stop": self.stop,
|
||||
"stop": self.stop if self.supports_stop_words() else None,
|
||||
"max_tokens": self.max_tokens or self.max_completion_tokens,
|
||||
"presence_penalty": self.presence_penalty,
|
||||
"frequency_penalty": self.frequency_penalty,
|
||||
@@ -193,6 +193,12 @@ class LLM:
|
||||
return False
|
||||
|
||||
def supports_stop_words(self) -> bool:
|
||||
models_without_stop_support = ["o3", "o3-mini", "o4-mini"]
|
||||
|
||||
for model in models_without_stop_support:
|
||||
if self.model.startswith(model):
|
||||
return False
|
||||
|
||||
try:
|
||||
params = get_supported_openai_params(model=self.model)
|
||||
return "stop" in params
|
||||
|
||||
@@ -28,3 +28,33 @@ def test_llm_callback_replacement():
|
||||
assert usage_metrics_1.successful_requests == 1
|
||||
assert usage_metrics_2.successful_requests == 1
|
||||
assert usage_metrics_1 == calc_handler_1.token_cost_process.get_summary()
|
||||
|
||||
|
||||
def test_supports_stop_words_for_o3_model():
|
||||
"""Test that supports_stop_words returns False for o3 model."""
|
||||
llm = LLM(model="o3")
|
||||
assert not llm.supports_stop_words()
|
||||
|
||||
|
||||
def test_supports_stop_words_for_o4_mini_model():
|
||||
"""Test that supports_stop_words returns False for o4-mini model."""
|
||||
llm = LLM(model="o4-mini")
|
||||
assert not llm.supports_stop_words()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_llm_call_excludes_stop_parameter_for_unsupported_models(monkeypatch):
|
||||
"""Test that the LLM.call method excludes the stop parameter for models that don't support it."""
|
||||
def mock_completion(**kwargs):
|
||||
assert 'stop' not in kwargs, "Stop parameter should be excluded for o3 model"
|
||||
return {"choices": [{"message": {"content": "Hello, World!"}}]}
|
||||
|
||||
monkeypatch.setattr("litellm.completion", mock_completion)
|
||||
|
||||
llm = LLM(model="o3")
|
||||
llm.stop = ["STOP"]
|
||||
|
||||
messages = [{"role": "user", "content": "Say 'Hello, World!'"}]
|
||||
response = llm.call(messages)
|
||||
|
||||
assert response == "Hello, World!"
|
||||
|
||||
Reference in New Issue
Block a user