Update litellm dependency to v1.66.3 to fix #2640

Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
Devin AI
2025-04-18 08:44:20 +00:00
parent 371f19f3cd
commit 989ef138fc
2 changed files with 48 additions and 1 deletions

View File

@@ -11,7 +11,7 @@ dependencies = [
# Core Dependencies
"pydantic>=2.4.2",
"openai>=1.13.3",
"litellm==1.60.2",
"litellm==1.66.3",
"instructor>=1.3.3",
# Text Processing
"pdfplumber>=0.11.4",

View File

@@ -0,0 +1,47 @@
import pytest
from unittest.mock import MagicMock, patch
from crewai.llm import LLM
def test_llm_call_with_litellm_1_66_3():
"""Test that the LLM class works with litellm v1.66.3+"""
llm = LLM(
model="gpt-3.5-turbo",
temperature=0.7,
max_tokens=50,
stop=["STOP"],
presence_penalty=0.1,
frequency_penalty=0.1,
)
messages = [{"role": "user", "content": "Say 'Hello, World!' and then say STOP"}]
with patch("litellm.completion") as mocked_completion:
mock_message = MagicMock()
mock_message.content = "Hello, World! I won't say the stop word."
mock_choice = MagicMock()
mock_choice.message = mock_message
mock_response = MagicMock()
mock_response.choices = [mock_choice]
mock_response.usage = {
"prompt_tokens": 10,
"completion_tokens": 10,
"total_tokens": 20,
}
mocked_completion.return_value = mock_response
response = llm.call(messages)
mocked_completion.assert_called_once()
assert "Hello, World!" in response
assert "STOP" not in response
_, kwargs = mocked_completion.call_args
assert kwargs["model"] == "gpt-3.5-turbo"
assert kwargs["temperature"] == 0.7
assert kwargs["max_tokens"] == 50
assert kwargs["stop"] == ["STOP"]
assert kwargs["presence_penalty"] == 0.1
assert kwargs["frequency_penalty"] == 0.1