mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-27 17:18:13 +00:00
Update litellm dependency to v1.66.3 to fix #2640
Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
@@ -11,7 +11,7 @@ dependencies = [
|
|||||||
# Core Dependencies
|
# Core Dependencies
|
||||||
"pydantic>=2.4.2",
|
"pydantic>=2.4.2",
|
||||||
"openai>=1.13.3",
|
"openai>=1.13.3",
|
||||||
"litellm==1.60.2",
|
"litellm==1.66.3",
|
||||||
"instructor>=1.3.3",
|
"instructor>=1.3.3",
|
||||||
# Text Processing
|
# Text Processing
|
||||||
"pdfplumber>=0.11.4",
|
"pdfplumber>=0.11.4",
|
||||||
|
|||||||
47
tests/litellm_update_test.py
Normal file
47
tests/litellm_update_test.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from crewai.llm import LLM
|
||||||
|
|
||||||
|
|
||||||
|
def test_llm_call_with_litellm_1_66_3():
|
||||||
|
"""Test that the LLM class works with litellm v1.66.3+"""
|
||||||
|
llm = LLM(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
temperature=0.7,
|
||||||
|
max_tokens=50,
|
||||||
|
stop=["STOP"],
|
||||||
|
presence_penalty=0.1,
|
||||||
|
frequency_penalty=0.1,
|
||||||
|
)
|
||||||
|
messages = [{"role": "user", "content": "Say 'Hello, World!' and then say STOP"}]
|
||||||
|
|
||||||
|
with patch("litellm.completion") as mocked_completion:
|
||||||
|
mock_message = MagicMock()
|
||||||
|
mock_message.content = "Hello, World! I won't say the stop word."
|
||||||
|
mock_choice = MagicMock()
|
||||||
|
mock_choice.message = mock_message
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.choices = [mock_choice]
|
||||||
|
mock_response.usage = {
|
||||||
|
"prompt_tokens": 10,
|
||||||
|
"completion_tokens": 10,
|
||||||
|
"total_tokens": 20,
|
||||||
|
}
|
||||||
|
|
||||||
|
mocked_completion.return_value = mock_response
|
||||||
|
|
||||||
|
response = llm.call(messages)
|
||||||
|
|
||||||
|
mocked_completion.assert_called_once()
|
||||||
|
|
||||||
|
assert "Hello, World!" in response
|
||||||
|
assert "STOP" not in response
|
||||||
|
|
||||||
|
_, kwargs = mocked_completion.call_args
|
||||||
|
assert kwargs["model"] == "gpt-3.5-turbo"
|
||||||
|
assert kwargs["temperature"] == 0.7
|
||||||
|
assert kwargs["max_tokens"] == 50
|
||||||
|
assert kwargs["stop"] == ["STOP"]
|
||||||
|
assert kwargs["presence_penalty"] == 0.1
|
||||||
|
assert kwargs["frequency_penalty"] == 0.1
|
||||||
Reference in New Issue
Block a user