Add Record/Replay functionality for offline processing (Issue #2759)

Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
Devin AI
2025-05-05 22:23:03 +00:00
parent dabf02a90d
commit d5dfd5a1f5
8 changed files with 463 additions and 10 deletions

View File

@@ -0,0 +1,78 @@
import pytest
from unittest.mock import MagicMock, patch
from crewai.llm import LLM
from crewai.utilities.llm_response_cache_handler import LLMResponseCacheHandler
@pytest.fixture
def handler():
handler = LLMResponseCacheHandler()
handler.storage.add = MagicMock()
handler.storage.get = MagicMock()
return handler
@pytest.mark.vcr(filter_headers=["authorization"])
def test_llm_recording(handler):
handler.start_recording()
llm = LLM(model="gpt-4o-mini")
llm.set_response_cache_handler(handler)
messages = [{"role": "user", "content": "Hello, world!"}]
with patch('litellm.completion') as mock_completion:
mock_completion.return_value = {
"choices": [{"message": {"content": "Hello, human!"}}]
}
response = llm.call(messages)
assert response == "Hello, human!"
handler.storage.add.assert_called_once_with(
"gpt-4o-mini", messages, "Hello, human!"
)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_llm_replaying(handler):
handler.start_replaying()
handler.storage.get.return_value = "Cached response"
llm = LLM(model="gpt-4o-mini")
llm.set_response_cache_handler(handler)
messages = [{"role": "user", "content": "Hello, world!"}]
with patch('litellm.completion') as mock_completion:
response = llm.call(messages)
assert response == "Cached response"
mock_completion.assert_not_called()
handler.storage.get.assert_called_once_with("gpt-4o-mini", messages)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_llm_replay_fallback(handler):
handler.start_replaying()
handler.storage.get.return_value = None
llm = LLM(model="gpt-4o-mini")
llm.set_response_cache_handler(handler)
messages = [{"role": "user", "content": "Hello, world!"}]
with patch('litellm.completion') as mock_completion:
mock_completion.return_value = {
"choices": [{"message": {"content": "Hello, human!"}}]
}
response = llm.call(messages)
assert response == "Hello, human!"
mock_completion.assert_called_once()

View File

@@ -0,0 +1,90 @@
import pytest
from unittest.mock import MagicMock, patch
from crewai.agent import Agent
from crewai.crew import Crew
from crewai.process import Process
from crewai.task import Task
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_recording_mode():
agent = Agent(
role="Test Agent",
goal="Test the recording functionality",
backstory="A test agent for recording LLM responses",
)
task = Task(
description="Return a simple response",
expected_output="A simple response",
agent=agent,
)
crew = Crew(
agents=[agent],
tasks=[task],
process=Process.sequential,
record_mode=True,
)
mock_handler = MagicMock()
crew._llm_response_cache_handler = mock_handler
mock_llm = MagicMock()
agent.llm = mock_llm
with patch('crewai.utilities.llm_response_cache_handler.LLMResponseCacheHandler', return_value=mock_handler):
crew.kickoff()
mock_handler.start_recording.assert_called_once()
mock_llm.set_response_cache_handler.assert_called_once_with(mock_handler)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_replay_mode():
agent = Agent(
role="Test Agent",
goal="Test the replay functionality",
backstory="A test agent for replaying LLM responses",
)
task = Task(
description="Return a simple response",
expected_output="A simple response",
agent=agent,
)
crew = Crew(
agents=[agent],
tasks=[task],
process=Process.sequential,
replay_mode=True,
)
mock_handler = MagicMock()
crew._llm_response_cache_handler = mock_handler
mock_llm = MagicMock()
agent.llm = mock_llm
with patch('crewai.utilities.llm_response_cache_handler.LLMResponseCacheHandler', return_value=mock_handler):
crew.kickoff()
mock_handler.start_replaying.assert_called_once()
mock_llm.set_response_cache_handler.assert_called_once_with(mock_handler)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_record_replay_flags_conflict():
with pytest.raises(ValueError):
crew = Crew(
agents=[],
tasks=[],
process=Process.sequential,
record_mode=True,
replay_mode=True,
)
crew.kickoff()