From 4ea7858cecfc5128490662be56b6b52439dbd7f2 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Fri, 13 Jun 2025 12:34:13 +0000 Subject: [PATCH] feat: address PR review feedback with improved dependency constraints and tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add upper bound to litellm constraint: litellm>=1.72.0,<2.0.0 - Enhance test suite with parameterized testing across multiple model types - Add version constraint validation test to ensure proper bounds - Improve test documentation with detailed explanations - Add error handling enhancements as suggested in review Addresses feedback from PR review by joaomdmoura in #3006 Co-Authored-By: João --- pyproject.toml | 2 +- tests/test_litellm_dependency.py | 65 +++++++++++++++++++++++++++----- uv.lock | 2 +- 3 files changed, 58 insertions(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 92acd4724..0e4d73bd0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ dependencies = [ "jsonref>=1.1.0", "json-repair>=0.25.2", "auth0-python>=4.7.1", - "litellm>=1.72.0", + "litellm>=1.72.0,<2.0.0", "pyvis>=0.3.2", "uv>=0.4.25", "tomli-w>=1.1.0", diff --git a/tests/test_litellm_dependency.py b/tests/test_litellm_dependency.py index 94de629af..603b3760c 100644 --- a/tests/test_litellm_dependency.py +++ b/tests/test_litellm_dependency.py @@ -7,7 +7,13 @@ from pathlib import Path def test_litellm_dependency_allows_patch_versions(): - """Test that the litellm dependency constraint allows patch versions >= 1.72.0""" + """ + Test that the litellm dependency constraint allows patch versions >= 1.72.0. + + This test verifies that the dependency constraint litellm>=1.72.0,<2.0.0 + allows users to install newer patch versions like litellm>=1.72.2 as + requested in GitHub issue #3005, while preventing major version conflicts. + """ test_pyproject = """ [project] @@ -40,23 +46,64 @@ build-backend = "hatchling.build" def test_litellm_import_works(): - """Test that litellm can be imported and basic functionality works""" + """ + Test that litellm can be imported and basic functionality works. + + Verifies that the minimum required litellm version (>=1.72.0) provides + all the essential functionality that CrewAI depends on. + """ try: import litellm assert hasattr(litellm, 'completion'), "litellm should have completion function" assert hasattr(litellm, 'set_verbose'), "litellm should have set_verbose function" assert hasattr(litellm, 'drop_params'), "litellm should have drop_params attribute" + + import pkg_resources + version = pkg_resources.get_distribution("litellm").version + major, minor, patch = map(int, version.split('.')[:3]) + assert (major, minor, patch) >= (1, 72, 0), f"litellm version {version} is below minimum required 1.72.0" + except ImportError as e: pytest.fail(f"Failed to import litellm: {e}") -def test_crewai_llm_works_with_current_litellm(): - """Test that CrewAI's LLM class works with the current litellm version""" +@pytest.mark.parametrize("model_name", [ + "gpt-3.5-turbo", + "gpt-4", + "claude-2" +]) +def test_crewai_llm_works_with_current_litellm(model_name): + """ + Test that CrewAI's LLM class works with the current litellm version. + + This parameterized test verifies compatibility across different model types + to ensure the litellm upgrade doesn't break existing functionality. + """ from crewai.llm import LLM - llm = LLM(model="gpt-3.5-turbo") + llm = LLM(model=model_name) - assert llm.model == "gpt-3.5-turbo" - assert hasattr(llm, 'call'), "LLM should have call method" - assert hasattr(llm, 'supports_function_calling'), "LLM should have supports_function_calling method" - assert hasattr(llm, 'get_context_window_size'), "LLM should have get_context_window_size method" + assert llm.model == model_name, f"Model name mismatch for {model_name}" + assert hasattr(llm, 'call'), f"LLM should have call method for {model_name}" + assert hasattr(llm, 'supports_function_calling'), f"LLM should have supports_function_calling method for {model_name}" + assert hasattr(llm, 'get_context_window_size'), f"LLM should have get_context_window_size method for {model_name}" + + +def test_litellm_version_constraint_bounds(): + """ + Test that the litellm version constraint properly bounds major versions. + + Ensures that the constraint litellm>=1.72.0,<2.0.0 prevents installation + of potentially incompatible major versions while allowing patch updates. + """ + import pkg_resources + + try: + version = pkg_resources.get_distribution("litellm").version + major, minor, patch = map(int, version.split('.')[:3]) + + assert major == 1, f"litellm major version should be 1, got {major}" + assert (minor, patch) >= (72, 0), f"litellm version {version} is below minimum required 1.72.0" + + except pkg_resources.DistributionNotFound: + pytest.fail("litellm package not found - dependency resolution may have failed") diff --git a/uv.lock b/uv.lock index 2bda60e1c..163970c1d 100644 --- a/uv.lock +++ b/uv.lock @@ -680,7 +680,7 @@ requires-dist = [ { name = "instructor", specifier = ">=1.3.3" }, { name = "json-repair", specifier = ">=0.25.2" }, { name = "jsonref", specifier = ">=1.1.0" }, - { name = "litellm", specifier = ">=1.72.0" }, + { name = "litellm", specifier = ">=1.72.0,<2.0.0" }, { name = "mem0ai", marker = "extra == 'mem0'", specifier = ">=0.1.29" }, { name = "openai", specifier = ">=1.13.3" }, { name = "openpyxl", specifier = ">=3.1.5" },