Feat/poetry to uv migration (#1406)

* feat: Start migrating to UV

* feat: add uv to flows

* feat: update docs on Poetry -> uv

* feat: update docs and uv.locl

* feat: update tests and github CI

* feat: run ruff format

* feat: update typechecking

* feat: fix type checking

* feat: update python version

* feat: type checking gic

* feat: adapt uv command to run the tool repo

* Adapt tool build command to uv

* feat: update logic to let only projects with crew to be deployed

* feat: add uv to tools

* fix; tests

* fix: remove breakpoint

* fix :test

* feat: add crewai update to migrate from poetry to uv

* fix: tests

* feat: add validation for ˆ character on pyproject

* feat: add run_crew to pyproject if doesnt exist

* feat: add validation for poetry migration

* fix: warning

---------

Co-authored-by: Vinicius Brasil <vini@hey.com>
This commit is contained in:
Eduardo Chiarotti
2024-10-11 19:11:27 -03:00
committed by GitHub
parent 6534a909d6
commit 7f387dd7c3
33 changed files with 5753 additions and 314 deletions

View File

@@ -1,21 +1,21 @@
"""Test Agent creation and execution basic functionality."""
import os
from unittest import mock
from unittest.mock import patch
import os
import pytest
from crewai_tools import tool
from crewai import Agent, Crew, Task
from crewai.agents.cache import CacheHandler
from crewai.agents.crew_agent_executor import CrewAgentExecutor
from crewai.agents.parser import AgentAction, CrewAgentParser, OutputParserException
from crewai.llm import LLM
from crewai.agents.parser import CrewAgentParser, OutputParserException
from crewai.tools.tool_calling import InstructorToolCalling
from crewai.tools.tool_usage import ToolUsage
from crewai.tools.tool_usage_events import ToolUsageFinished
from crewai.utilities import RPMController
from crewai_tools import tool
from crewai.agents.parser import AgentAction
from crewai.utilities.events import Emitter
@@ -73,7 +73,7 @@ def test_agent_creation():
def test_agent_default_values():
agent = Agent(role="test role", goal="test goal", backstory="test backstory")
assert agent.llm.model == "gpt-4o"
assert agent.llm.model == "gpt-4o-mini"
assert agent.allow_delegation is False
@@ -116,6 +116,7 @@ def test_custom_llm_temperature_preservation():
@pytest.mark.vcr(filter_headers=["authorization"])
def test_agent_execute_task():
from langchain_openai import ChatOpenAI
from crewai import Task
agent = Agent(
@@ -206,7 +207,7 @@ def test_logging_tool_usage():
verbose=True,
)
assert agent.llm.model == "gpt-4o"
assert agent.llm.model == "gpt-4o-mini"
assert agent.tools_handler.last_used_tool == {}
task = Task(
description="What is 3 times 4?",
@@ -602,6 +603,7 @@ def test_agent_respect_the_max_rpm_set(capsys):
@pytest.mark.vcr(filter_headers=["authorization"])
def test_agent_respect_the_max_rpm_set_over_crew_rpm(capsys):
from unittest.mock import patch
from crewai_tools import tool
@tool
@@ -693,6 +695,7 @@ def test_agent_without_max_rpm_respet_crew_rpm(capsys):
@pytest.mark.vcr(filter_headers=["authorization"])
def test_agent_error_on_parsing_tool(capsys):
from unittest.mock import patch
from crewai_tools import tool
@tool
@@ -855,7 +858,9 @@ def test_agent_function_calling_llm():
tasks = [essay]
crew = Crew(agents=[agent1], tasks=tasks)
from unittest.mock import patch
import instructor
from crewai.tools.tool_usage import ToolUsage
with patch.object(

View File

@@ -1,12 +1,12 @@
import pytest
import requests
import sys
import unittest
from io import StringIO
from requests.exceptions import JSONDecodeError
from unittest.mock import MagicMock, Mock, patch
import pytest
import requests
from requests.exceptions import JSONDecodeError
from crewai.cli.deploy.main import DeployCommand
from crewai.cli.utils import parse_toml
@@ -228,13 +228,11 @@ class TestDeployCommand(unittest.TestCase):
"builtins.open",
new_callable=unittest.mock.mock_open,
read_data="""
[tool.poetry]
[project]
name = "test_project"
version = "0.1.0"
[tool.poetry.dependencies]
python = "^3.10"
crewai = { extras = ["tools"], version = ">=0.51.0,<1.0.0" }
requires-python = ">=3.10,<=3.13"
dependencies = ["crewai"]
""",
)
def test_get_project_name_python_310(self, mock_open):
@@ -248,13 +246,11 @@ class TestDeployCommand(unittest.TestCase):
"builtins.open",
new_callable=unittest.mock.mock_open,
read_data="""
[tool.poetry]
[project]
name = "test_project"
version = "0.1.0"
[tool.poetry.dependencies]
python = "^3.11"
crewai = { extras = ["tools"], version = ">=0.51.0,<1.0.0" }
requires-python = ">=3.10,<=3.13"
dependencies = ["crewai"]
""",
)
def test_get_project_name_python_311_plus(self, mock_open):

View File

@@ -18,12 +18,12 @@ from crewai.cli import evaluate_crew
def test_crew_success(mock_subprocess_run, n_iterations, model):
"""Test the crew function for successful execution."""
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=f"poetry run test {n_iterations} {model}", returncode=0
args=f"uv run test {n_iterations} {model}", returncode=0
)
result = evaluate_crew.evaluate_crew(n_iterations, model)
mock_subprocess_run.assert_called_once_with(
["poetry", "run", "test", str(n_iterations), model],
["uv", "run", "test", str(n_iterations), model],
capture_output=False,
text=True,
check=True,
@@ -55,14 +55,14 @@ def test_test_crew_called_process_error(mock_subprocess_run, click):
n_iterations = 5
mock_subprocess_run.side_effect = subprocess.CalledProcessError(
returncode=1,
cmd=["poetry", "run", "test", str(n_iterations), "gpt-4o"],
cmd=["uv", "run", "test", str(n_iterations), "gpt-4o"],
output="Error",
stderr="Some error occurred",
)
evaluate_crew.evaluate_crew(n_iterations, "gpt-4o")
mock_subprocess_run.assert_called_once_with(
["poetry", "run", "test", "5", "gpt-4o"],
["uv", "run", "test", "5", "gpt-4o"],
capture_output=False,
text=True,
check=True,
@@ -70,7 +70,7 @@ def test_test_crew_called_process_error(mock_subprocess_run, click):
click.echo.assert_has_calls(
[
mock.call.echo(
"An error occurred while testing the crew: Command '['poetry', 'run', 'test', '5', 'gpt-4o']' returned non-zero exit status 1.",
"An error occurred while testing the crew: Command '['uv', 'run', 'test', '5', 'gpt-4o']' returned non-zero exit status 1.",
err=True,
),
mock.call.echo("Error", err=True),
@@ -87,7 +87,7 @@ def test_test_crew_unexpected_exception(mock_subprocess_run, click):
evaluate_crew.evaluate_crew(n_iterations, "gpt-4o")
mock_subprocess_run.assert_called_once_with(
["poetry", "run", "test", "5", "gpt-4o"],
["uv", "run", "test", "5", "gpt-4o"],
capture_output=False,
text=True,
check=True,

View File

@@ -1,13 +1,16 @@
import os
import tempfile
import unittest
import unittest.mock
import os
from contextlib import contextmanager
from io import StringIO
from unittest import mock
from unittest.mock import MagicMock, patch
from pytest import raises
from crewai.cli.tools.main import ToolCommand
from io import StringIO
from unittest.mock import patch, MagicMock
@contextmanager
def in_temp_dir():
@@ -19,6 +22,7 @@ def in_temp_dir():
finally:
os.chdir(original_dir)
@patch("crewai.cli.tools.main.subprocess.run")
def test_create_success(mock_subprocess):
with in_temp_dir():
@@ -38,9 +42,7 @@ def test_create_success(mock_subprocess):
)
assert os.path.isfile(os.path.join("test_tool", "src", "test_tool", "tool.py"))
with open(
os.path.join("test_tool", "src", "test_tool", "tool.py"), "r"
) as f:
with open(os.path.join("test_tool", "src", "test_tool", "tool.py"), "r") as f:
content = f.read()
assert "class TestTool" in content
@@ -49,6 +51,7 @@ def test_create_success(mock_subprocess):
assert "Creating custom tool test_tool..." in output
@patch("crewai.cli.tools.main.subprocess.run")
@patch("crewai.cli.plus_api.PlusAPI.get_tool")
def test_install_success(mock_get, mock_subprocess_run):
@@ -67,9 +70,15 @@ def test_install_success(mock_get, mock_subprocess_run):
tool_command.install("sample-tool")
output = fake_out.getvalue()
mock_get.assert_called_once_with("sample-tool")
mock_get.assert_has_calls([mock.call("sample-tool"), mock.call().json()])
mock_subprocess_run.assert_any_call(
["poetry", "add", "--source", "crewai-sample-repo", "sample-tool"],
[
"uv",
"add",
"--extra-index-url",
"https://app.crewai.com/pypi/sample-repo",
"sample-tool",
],
capture_output=False,
text=True,
check=True,
@@ -77,6 +86,7 @@ def test_install_success(mock_get, mock_subprocess_run):
assert "Succesfully installed sample-tool" in output
@patch("crewai.cli.plus_api.PlusAPI.get_tool")
def test_install_tool_not_found(mock_get):
mock_get_response = MagicMock()
@@ -95,6 +105,7 @@ def test_install_tool_not_found(mock_get):
mock_get.assert_called_once_with("non-existent-tool")
assert "No tool found with this name" in output
@patch("crewai.cli.plus_api.PlusAPI.get_tool")
def test_install_api_error(mock_get):
mock_get_response = MagicMock()
@@ -113,15 +124,16 @@ def test_install_api_error(mock_get):
mock_get.assert_called_once_with("error-tool")
assert "Failed to get tool details" in output
@patch("crewai.cli.tools.main.git.Repository.is_synced", return_value=False)
def test_publish_when_not_in_sync(mock_is_synced):
with patch("sys.stdout", new=StringIO()) as fake_out, \
raises(SystemExit):
with patch("sys.stdout", new=StringIO()) as fake_out, raises(SystemExit):
tool_command = ToolCommand()
tool_command.publish(is_public=True)
assert "Local changes need to be resolved before publishing" in fake_out.getvalue()
@patch("crewai.cli.tools.main.get_project_name", return_value="sample-tool")
@patch("crewai.cli.tools.main.get_project_version", return_value="1.0.0")
@patch("crewai.cli.tools.main.get_project_description", return_value="A sample tool")
@@ -156,7 +168,7 @@ def test_publish_when_not_in_sync_and_force(
mock_get_project_version.assert_called_with(require=True)
mock_get_project_description.assert_called_with(require=False)
mock_subprocess_run.assert_called_with(
["poetry", "build", "-f", "sdist", "--output", unittest.mock.ANY],
["uv", "build", "--sdist", "--out-dir", unittest.mock.ANY],
check=True,
capture_output=False,
)
@@ -169,6 +181,7 @@ def test_publish_when_not_in_sync_and_force(
encoded_file=unittest.mock.ANY,
)
@patch("crewai.cli.tools.main.get_project_name", return_value="sample-tool")
@patch("crewai.cli.tools.main.get_project_version", return_value="1.0.0")
@patch("crewai.cli.tools.main.get_project_description", return_value="A sample tool")
@@ -203,7 +216,7 @@ def test_publish_success(
mock_get_project_version.assert_called_with(require=True)
mock_get_project_description.assert_called_with(require=False)
mock_subprocess_run.assert_called_with(
["poetry", "build", "-f", "sdist", "--output", unittest.mock.ANY],
["uv", "build", "--sdist", "--out-dir", unittest.mock.ANY],
check=True,
capture_output=False,
)
@@ -216,6 +229,7 @@ def test_publish_success(
encoded_file=unittest.mock.ANY,
)
@patch("crewai.cli.tools.main.get_project_name", return_value="sample-tool")
@patch("crewai.cli.tools.main.get_project_version", return_value="1.0.0")
@patch("crewai.cli.tools.main.get_project_description", return_value="A sample tool")
@@ -254,6 +268,7 @@ def test_publish_failure(
assert "Failed to complete operation" in output
assert "Name is already taken" in output
@patch("crewai.cli.tools.main.get_project_name", return_value="sample-tool")
@patch("crewai.cli.tools.main.get_project_version", return_value="1.0.0")
@patch("crewai.cli.tools.main.get_project_description", return_value="A sample tool")
@@ -291,54 +306,3 @@ def test_publish_api_error(
mock_publish.assert_called_once()
assert "Request to Enterprise API failed" in output
@patch("crewai.cli.plus_api.PlusAPI.login_to_tool_repository")
@patch("crewai.cli.tools.main.subprocess.run")
def test_login_success(mock_subprocess_run, mock_login):
mock_login_response = MagicMock()
mock_login_response.status_code = 200
mock_login_response.json.return_value = {
"repositories": [
{
"handle": "tools",
"url": "https://example.com/repo",
}
],
"credential": {"username": "user", "password": "pass"},
}
mock_login.return_value = mock_login_response
mock_subprocess_run.return_value = MagicMock(stderr=None)
tool_command = ToolCommand()
with patch("sys.stdout", new=StringIO()) as fake_out:
tool_command.login()
output = fake_out.getvalue()
mock_login.assert_called_once()
mock_subprocess_run.assert_any_call(
[
"poetry",
"source",
"add",
"--priority=explicit",
"crewai-tools",
"https://example.com/repo",
],
text=True,
check=True,
)
mock_subprocess_run.assert_any_call(
[
"poetry",
"config",
"http-basic.crewai-tools",
"user",
"pass",
],
capture_output=False,
text=True,
check=True,
)
assert "Succesfully authenticated to the tool repository" in output

View File

@@ -8,7 +8,7 @@ from crewai.cli.train_crew import train_crew
def test_train_crew_positive_iterations(mock_subprocess_run):
n_iterations = 5
mock_subprocess_run.return_value = subprocess.CompletedProcess(
args=["poetry", "run", "train", str(n_iterations)],
args=["uv", "run", "train", str(n_iterations)],
returncode=0,
stdout="Success",
stderr="",
@@ -17,7 +17,7 @@ def test_train_crew_positive_iterations(mock_subprocess_run):
train_crew(n_iterations, "trained_agents_data.pkl")
mock_subprocess_run.assert_called_once_with(
["poetry", "run", "train", str(n_iterations), "trained_agents_data.pkl"],
["uv", "run", "train", str(n_iterations), "trained_agents_data.pkl"],
capture_output=False,
text=True,
check=True,
@@ -48,14 +48,14 @@ def test_train_crew_called_process_error(mock_subprocess_run, click):
n_iterations = 5
mock_subprocess_run.side_effect = subprocess.CalledProcessError(
returncode=1,
cmd=["poetry", "run", "train", str(n_iterations)],
cmd=["uv", "run", "train", str(n_iterations)],
output="Error",
stderr="Some error occurred",
)
train_crew(n_iterations, "trained_agents_data.pkl")
mock_subprocess_run.assert_called_once_with(
["poetry", "run", "train", str(n_iterations), "trained_agents_data.pkl"],
["uv", "run", "train", str(n_iterations), "trained_agents_data.pkl"],
capture_output=False,
text=True,
check=True,
@@ -63,7 +63,7 @@ def test_train_crew_called_process_error(mock_subprocess_run, click):
click.echo.assert_has_calls(
[
mock.call.echo(
"An error occurred while training the crew: Command '['poetry', 'run', 'train', '5']' returned non-zero exit status 1.",
"An error occurred while training the crew: Command '['uv', 'run', 'train', '5']' returned non-zero exit status 1.",
err=True,
),
mock.call.echo("Error", err=True),
@@ -79,7 +79,7 @@ def test_train_crew_unexpected_exception(mock_subprocess_run, click):
train_crew(n_iterations, "trained_agents_data.pkl")
mock_subprocess_run.assert_called_once_with(
["poetry", "run", "train", str(n_iterations), "trained_agents_data.pkl"],
["uv", "run", "train", str(n_iterations), "trained_agents_data.pkl"],
capture_output=False,
text=True,
check=True,