Compare commits

..

5 Commits

Author SHA1 Message Date
Lucas Gomide
c9689b8878 Merge branch 'main' into lg-downgrade-litellm 2025-04-29 12:49:23 -03:00
Greyson LaLonde
25c8155609 chore: add missing __init__.py files (#2719)
Some checks failed
Notify Downstream / notify-downstream (push) Has been cancelled
Mark stale issues and pull requests / stale (push) Has been cancelled
Add `__init__.py` files to 20 directories to conform with Python package standards. This ensures directories are properly recognized as packages, enabling cleaner imports.
2025-04-29 07:35:26 -07:00
Lucas Gomide
73b15b5d32 Merge branch 'main' into lg-downgrade-litellm 2025-04-29 11:23:51 -03:00
Vini Brasil
55b07506c2 Remove logging setting from global context (#2720)
This commit fixes a bug where changing logging level would be overriden
by `src/crewai/project/crew_base.py`. For example, the following snippet
on top of a crew or flow would not work:

```python
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
```

Crews and flows should be able to set their own log level, without being
overriden by CrewAI library code.
2025-04-29 11:21:41 -03:00
Lucas Gomide
4efa935c1a build: downgrade litellm to 1.167.1
The version 1.167.2 is not compatible with Windows
2025-04-28 19:44:41 -03:00
28 changed files with 2285 additions and 2323 deletions

View File

@@ -11,7 +11,7 @@ dependencies = [
# Core Dependencies
"pydantic>=2.4.2",
"openai>=1.13.3",
"litellm==1.67.2",
"litellm==1.67.1",
"instructor>=1.3.3",
# Text Processing
"pdfplumber>=0.11.4",

View File

@@ -1,8 +1,5 @@
import warnings
from crewai.patches.litellm_patch import apply_patches
apply_patches()
from crewai.agent import Agent
from crewai.crew import Crew
from crewai.crews.crew_output import CrewOutput

View File

@@ -0,0 +1 @@
"""LangGraph adapter for crewAI."""

View File

@@ -0,0 +1 @@
"""OpenAI agent adapters for crewAI."""

View File

@@ -0,0 +1 @@
"""Poem crew template."""

View File

@@ -0,0 +1 @@
"""Knowledge utilities for crewAI."""

View File

@@ -0,0 +1 @@
"""LLM implementations for crewAI."""

View File

@@ -0,0 +1 @@
"""Third-party LLM implementations for crewAI."""

View File

@@ -0,0 +1 @@
"""Memory storage implementations for crewAI."""

View File

@@ -1 +0,0 @@
# This file is intentionally left empty to make the directory a package

View File

@@ -1,73 +0,0 @@
"""
Patch for litellm to fix UnicodeDecodeError on Windows systems.
This patch ensures that all file open operations in litellm use UTF-8 encoding,
which prevents UnicodeDecodeError when loading JSON files on Windows systems
where the default encoding is cp1252 or cp1254.
WARNING: This patch monkey-patches the built-in open() function globally on Windows.
It forces UTF-8 encoding on all text-mode file opens, which could affect third-party
libraries expecting default platform encodings. Apply with caution and test comprehensively.
"""
import builtins
import functools
import io
import json
import logging
import os
import sys
from importlib import resources
from typing import Any, Optional, Union
logger = logging.getLogger(__name__)
def apply_patches():
"""
Apply patches to fix litellm encoding issues on Windows systems.
This function only applies the patch on Windows platforms where the issue occurs.
It stores the original open function for proper restoration later.
"""
# Only apply patch on Windows systems
if sys.platform != "win32":
logger.debug("Skipping litellm encoding patches on non-Windows platform")
return
if hasattr(builtins, '_original_open'):
logger.debug("Litellm encoding patches already applied")
return
logger.debug("Applying litellm encoding patches on Windows")
builtins._original_open = builtins.open
@functools.wraps(builtins._original_open)
def patched_open(
file, mode='r', buffering=-1, encoding=None,
errors=None, newline=None, closefd=True, opener=None
):
if 'r' in mode and encoding is None and 'b' not in mode:
encoding = 'utf-8'
return builtins._original_open(
file, mode, buffering, encoding,
errors, newline, closefd, opener
)
builtins.open = patched_open
logger.debug("Successfully applied litellm encoding patches")
def remove_patches():
"""
Remove all patches (for testing purposes).
This function properly restores the original open function if it was patched.
"""
if hasattr(builtins, '_original_open'):
builtins.open = builtins._original_open
delattr(builtins, '_original_open')
logger.debug("Removed litellm encoding patches")

View File

@@ -8,8 +8,6 @@ from dotenv import load_dotenv
load_dotenv()
logging.basicConfig(level=logging.WARNING)
T = TypeVar("T", bound=type)
"""Base decorator for creating crew classes with configuration and function management."""

View File

@@ -0,0 +1 @@
"""Agent tools for crewAI."""

View File

@@ -0,0 +1 @@
"""Evaluators for crewAI."""

View File

@@ -0,0 +1 @@
"""Event utilities for crewAI."""

View File

@@ -0,0 +1 @@
"""Exceptions for crewAI."""

View File

@@ -0,0 +1 @@
"""Tests for agent adapters."""

View File

@@ -0,0 +1 @@
"""Tests for agent builder."""

View File

@@ -0,0 +1 @@
"""Tests for CLI deploy."""

View File

@@ -1,51 +0,0 @@
import json
import os
import sys
import unittest
from unittest.mock import mock_open, patch
from crewai.llm import LLM
from crewai.patches.litellm_patch import apply_patches, remove_patches
class TestLitellmEncoding(unittest.TestCase):
"""Test that the litellm encoding patch works correctly."""
def setUp(self):
"""Set up the test environment by applying the patch."""
apply_patches()
def tearDown(self):
"""Clean up the test environment by removing the patch."""
remove_patches()
def test_json_load_with_utf8_encoding(self):
"""Test that json.load is called with UTF-8 encoding."""
mock_content = '{"test": "日本語テキスト"}' # Japanese text that would fail with cp1252
with patch('builtins.open', mock_open(read_data=mock_content)):
import litellm
self.assertTrue(hasattr(litellm.utils, 'json_data'))
with open('test.json', 'r') as f:
data = json.load(f)
self.assertEqual(data['test'], '日本語テキスト')
def test_without_patch(self):
"""Test that demonstrates the issue without the patch."""
remove_patches()
mock_content = '{"test": "日本語テキスト"}' # Japanese text that would fail with cp1252
with patch('sys.platform', 'win32'):
mock_open_without_encoding = mock_open(read_data=mock_content)
mock_open_without_encoding.side_effect = UnicodeDecodeError('cp1252', b'\x81', 0, 1, 'invalid start byte')
with patch('builtins.open', mock_open_without_encoding):
with self.assertRaises(UnicodeDecodeError):
with open('test.json', 'r') as f:
json.load(f)
apply_patches()

1
tests/memory/__init__.py Normal file
View File

@@ -0,0 +1 @@
"""Tests for memory."""

View File

@@ -0,0 +1 @@
"""Tests for storage."""

1
tests/tools/__init__.py Normal file
View File

@@ -0,0 +1 @@
"""Tests for tools."""

View File

@@ -0,0 +1 @@
"""Tests for utilities."""

View File

@@ -0,0 +1 @@
"""Tests for evaluators."""

View File

@@ -0,0 +1 @@
"""Tests for events."""

4456
uv.lock generated

File diff suppressed because it is too large Load Diff