feat: add experimental folder for beta features

This commit is contained in:
Lucas Gomide
2025-07-11 14:27:13 -03:00
parent e3b044c044
commit a810c31ced
26 changed files with 125 additions and 70 deletions

View File

@@ -2,10 +2,10 @@ import pytest
from unittest.mock import MagicMock, patch
from crewai.crew import Crew
from crewai.evaluation.experiment.runner import ExperimentRunner
from crewai.evaluation.experiment.result import ExperimentResults
from crewai.evaluation.evaluation_display import AgentAggregatedEvaluationResult
from crewai.evaluation.base_evaluator import MetricCategory, EvaluationScore
from crewai.experimental.evaluation.experiment.runner import ExperimentRunner
from crewai.experimental.evaluation.experiment.result import ExperimentResults
from crewai.experimental.evaluation.evaluation_display import AgentAggregatedEvaluationResult
from crewai.experimental.evaluation.base_evaluator import MetricCategory, EvaluationScore
class TestExperimentRunner:
@@ -44,7 +44,7 @@ class TestExperimentRunner:
return {"Test Agent": agent_evaluation}
@patch('crewai.evaluation.experiment.runner.create_default_evaluator')
@patch('crewai.experimental.evaluation.experiment.runner.create_default_evaluator')
def test_run_success(self, mock_create_evaluator, mock_crew, mock_evaluator_results):
dataset = [
{
@@ -102,7 +102,7 @@ class TestExperimentRunner:
assert mock_evaluator.get_agent_evaluation.call_count == 3
@patch('crewai.evaluation.experiment.runner.create_default_evaluator')
@patch('crewai.experimental.evaluation.experiment.runner.create_default_evaluator')
def test_run_success_with_unknown_metric(self, mock_create_evaluator, mock_crew, mock_evaluator_results):
dataset = [
{
@@ -130,7 +130,7 @@ class TestExperimentRunner:
assert "unknown_metric" in result.expected_score.keys()
assert result.passed is True
@patch('crewai.evaluation.experiment.runner.create_default_evaluator')
@patch('crewai.experimental.evaluation.experiment.runner.create_default_evaluator')
def test_run_success_with_single_metric_evaluator_and_expected_specific_metric(self, mock_create_evaluator, mock_crew, mock_evaluator_results):
dataset = [
{
@@ -163,7 +163,7 @@ class TestExperimentRunner:
assert "goal_alignment" in result.expected_score.keys()
assert result.passed is True
@patch('crewai.evaluation.experiment.runner.create_default_evaluator')
@patch('crewai.experimental.evaluation.experiment.runner.create_default_evaluator')
def test_run_success_when_expected_metric_is_not_available(self, mock_create_evaluator, mock_crew, mock_evaluator_results):
dataset = [
{