mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-05-03 00:02:36 +00:00
feat: improve event bus thread safety and async support
Add thread-safe, async-compatible event bus with read–write locking and handler dependency ordering. Remove blinker dependency and implement direct dispatch. Improve type safety, error handling, and deterministic event synchronization. Refactor tests to auto-wait for async handlers, ensure clean teardown, and add comprehensive concurrency coverage. Replace thread-local state in AgentEvaluator with instance-based locking for correct cross-thread access. Enhance tracing reliability and event finalization.
This commit is contained in:
206
lib/crewai/tests/utilities/events/test_async_event_bus.py
Normal file
206
lib/crewai/tests/utilities/events/test_async_event_bus.py
Normal file
@@ -0,0 +1,206 @@
|
||||
"""Tests for async event handling in CrewAI event bus.
|
||||
|
||||
This module tests async handler registration, execution, and the aemit method.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from crewai.events.base_events import BaseEvent
|
||||
from crewai.events.event_bus import crewai_event_bus
|
||||
|
||||
|
||||
class AsyncTestEvent(BaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_handler_execution():
|
||||
received_events = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def async_handler(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.01)
|
||||
received_events.append(event)
|
||||
|
||||
event = AsyncTestEvent(type="async_test")
|
||||
crewai_event_bus.emit("test_source", event)
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0] == event
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_aemit_with_async_handlers():
|
||||
received_events = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def async_handler(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.01)
|
||||
received_events.append(event)
|
||||
|
||||
event = AsyncTestEvent(type="async_test")
|
||||
await crewai_event_bus.aemit("test_source", event)
|
||||
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0] == event
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_multiple_async_handlers():
|
||||
received_events_1 = []
|
||||
received_events_2 = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def handler_1(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.01)
|
||||
received_events_1.append(event)
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def handler_2(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.02)
|
||||
received_events_2.append(event)
|
||||
|
||||
event = AsyncTestEvent(type="async_test")
|
||||
await crewai_event_bus.aemit("test_source", event)
|
||||
|
||||
assert len(received_events_1) == 1
|
||||
assert len(received_events_2) == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mixed_sync_and_async_handlers():
|
||||
sync_events = []
|
||||
async_events = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
def sync_handler(source: object, event: BaseEvent) -> None:
|
||||
sync_events.append(event)
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def async_handler(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.01)
|
||||
async_events.append(event)
|
||||
|
||||
event = AsyncTestEvent(type="mixed_test")
|
||||
crewai_event_bus.emit("test_source", event)
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
assert len(sync_events) == 1
|
||||
assert len(async_events) == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_handler_error_handling():
|
||||
successful_handler_called = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def failing_handler(source: object, event: BaseEvent) -> None:
|
||||
raise ValueError("Async handler error")
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def successful_handler(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.01)
|
||||
successful_handler_called.append(True)
|
||||
|
||||
event = AsyncTestEvent(type="error_test")
|
||||
await crewai_event_bus.aemit("test_source", event)
|
||||
|
||||
assert len(successful_handler_called) == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_aemit_with_no_handlers():
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
event = AsyncTestEvent(type="no_handlers")
|
||||
await crewai_event_bus.aemit("test_source", event)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_handler_registration_via_register_handler():
|
||||
received_events = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
async def custom_async_handler(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.01)
|
||||
received_events.append(event)
|
||||
|
||||
crewai_event_bus.register_handler(AsyncTestEvent, custom_async_handler)
|
||||
|
||||
event = AsyncTestEvent(type="register_test")
|
||||
await crewai_event_bus.aemit("test_source", event)
|
||||
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0] == event
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_emit_async_handlers_fire_and_forget():
|
||||
received_events = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def slow_async_handler(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.05)
|
||||
received_events.append(event)
|
||||
|
||||
event = AsyncTestEvent(type="fire_forget_test")
|
||||
crewai_event_bus.emit("test_source", event)
|
||||
|
||||
assert len(received_events) == 0
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
assert len(received_events) == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_scoped_handlers_with_async():
|
||||
received_before = []
|
||||
received_during = []
|
||||
received_after = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def before_handler(source: object, event: BaseEvent) -> None:
|
||||
received_before.append(event)
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def scoped_handler(source: object, event: BaseEvent) -> None:
|
||||
received_during.append(event)
|
||||
|
||||
event1 = AsyncTestEvent(type="during_scope")
|
||||
await crewai_event_bus.aemit("test_source", event1)
|
||||
|
||||
assert len(received_before) == 0
|
||||
assert len(received_during) == 1
|
||||
|
||||
@crewai_event_bus.on(AsyncTestEvent)
|
||||
async def after_handler(source: object, event: BaseEvent) -> None:
|
||||
received_after.append(event)
|
||||
|
||||
event2 = AsyncTestEvent(type="after_scope")
|
||||
await crewai_event_bus.aemit("test_source", event2)
|
||||
|
||||
assert len(received_before) == 1
|
||||
assert len(received_during) == 1
|
||||
assert len(received_after) == 1
|
||||
@@ -1,3 +1,4 @@
|
||||
import threading
|
||||
from unittest.mock import Mock
|
||||
|
||||
from crewai.events.base_events import BaseEvent
|
||||
@@ -21,27 +22,42 @@ def test_specific_event_handler():
|
||||
mock_handler.assert_called_once_with("source_object", event)
|
||||
|
||||
|
||||
def test_wildcard_event_handler():
|
||||
mock_handler = Mock()
|
||||
def test_multiple_handlers_same_event():
|
||||
"""Test that multiple handlers can be registered for the same event type."""
|
||||
mock_handler1 = Mock()
|
||||
mock_handler2 = Mock()
|
||||
|
||||
@crewai_event_bus.on(BaseEvent)
|
||||
def handler(source, event):
|
||||
mock_handler(source, event)
|
||||
@crewai_event_bus.on(TestEvent)
|
||||
def handler1(source, event):
|
||||
mock_handler1(source, event)
|
||||
|
||||
@crewai_event_bus.on(TestEvent)
|
||||
def handler2(source, event):
|
||||
mock_handler2(source, event)
|
||||
|
||||
event = TestEvent(type="test_event")
|
||||
crewai_event_bus.emit("source_object", event)
|
||||
|
||||
mock_handler.assert_called_once_with("source_object", event)
|
||||
mock_handler1.assert_called_once_with("source_object", event)
|
||||
mock_handler2.assert_called_once_with("source_object", event)
|
||||
|
||||
|
||||
def test_event_bus_error_handling(capfd):
|
||||
@crewai_event_bus.on(BaseEvent)
|
||||
def test_event_bus_error_handling():
|
||||
"""Test that handler exceptions are caught and don't break the event bus."""
|
||||
called = threading.Event()
|
||||
error_caught = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(TestEvent)
|
||||
def broken_handler(source, event):
|
||||
called.set()
|
||||
raise ValueError("Simulated handler failure")
|
||||
|
||||
@crewai_event_bus.on(TestEvent)
|
||||
def working_handler(source, event):
|
||||
error_caught.set()
|
||||
|
||||
event = TestEvent(type="test_event")
|
||||
crewai_event_bus.emit("source_object", event)
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
assert "Simulated handler failure" in out
|
||||
assert "Handler 'broken_handler' failed" in out
|
||||
assert called.wait(timeout=2), "Broken handler was never called"
|
||||
assert error_caught.wait(timeout=2), "Working handler was never called after error"
|
||||
|
||||
264
lib/crewai/tests/utilities/events/test_rw_lock.py
Normal file
264
lib/crewai/tests/utilities/events/test_rw_lock.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""Tests for read-write lock implementation.
|
||||
|
||||
This module tests the RWLock class for correct concurrent read and write behavior.
|
||||
"""
|
||||
|
||||
import threading
|
||||
import time
|
||||
|
||||
from crewai.events.utils.rw_lock import RWLock
|
||||
|
||||
|
||||
def test_multiple_readers_concurrent():
|
||||
lock = RWLock()
|
||||
active_readers = [0]
|
||||
max_concurrent_readers = [0]
|
||||
lock_for_counters = threading.Lock()
|
||||
|
||||
def reader(reader_id: int) -> None:
|
||||
with lock.r_locked():
|
||||
with lock_for_counters:
|
||||
active_readers[0] += 1
|
||||
max_concurrent_readers[0] = max(
|
||||
max_concurrent_readers[0], active_readers[0]
|
||||
)
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
with lock_for_counters:
|
||||
active_readers[0] -= 1
|
||||
|
||||
threads = [threading.Thread(target=reader, args=(i,)) for i in range(5)]
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
assert max_concurrent_readers[0] == 5
|
||||
|
||||
|
||||
def test_writer_blocks_readers():
|
||||
lock = RWLock()
|
||||
writer_holding_lock = [False]
|
||||
reader_accessed_during_write = [False]
|
||||
|
||||
def writer() -> None:
|
||||
with lock.w_locked():
|
||||
writer_holding_lock[0] = True
|
||||
time.sleep(0.2)
|
||||
writer_holding_lock[0] = False
|
||||
|
||||
def reader() -> None:
|
||||
time.sleep(0.05)
|
||||
with lock.r_locked():
|
||||
if writer_holding_lock[0]:
|
||||
reader_accessed_during_write[0] = True
|
||||
|
||||
writer_thread = threading.Thread(target=writer)
|
||||
reader_thread = threading.Thread(target=reader)
|
||||
|
||||
writer_thread.start()
|
||||
reader_thread.start()
|
||||
|
||||
writer_thread.join()
|
||||
reader_thread.join()
|
||||
|
||||
assert not reader_accessed_during_write[0]
|
||||
|
||||
|
||||
def test_writer_blocks_other_writers():
|
||||
lock = RWLock()
|
||||
execution_order: list[int] = []
|
||||
lock_for_order = threading.Lock()
|
||||
|
||||
def writer(writer_id: int) -> None:
|
||||
with lock.w_locked():
|
||||
with lock_for_order:
|
||||
execution_order.append(writer_id)
|
||||
time.sleep(0.1)
|
||||
|
||||
threads = [threading.Thread(target=writer, args=(i,)) for i in range(3)]
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
assert len(execution_order) == 3
|
||||
assert len(set(execution_order)) == 3
|
||||
|
||||
|
||||
def test_readers_block_writers():
|
||||
lock = RWLock()
|
||||
reader_count = [0]
|
||||
writer_accessed_during_read = [False]
|
||||
lock_for_counters = threading.Lock()
|
||||
|
||||
def reader() -> None:
|
||||
with lock.r_locked():
|
||||
with lock_for_counters:
|
||||
reader_count[0] += 1
|
||||
time.sleep(0.2)
|
||||
with lock_for_counters:
|
||||
reader_count[0] -= 1
|
||||
|
||||
def writer() -> None:
|
||||
time.sleep(0.05)
|
||||
with lock.w_locked():
|
||||
with lock_for_counters:
|
||||
if reader_count[0] > 0:
|
||||
writer_accessed_during_read[0] = True
|
||||
|
||||
reader_thread = threading.Thread(target=reader)
|
||||
writer_thread = threading.Thread(target=writer)
|
||||
|
||||
reader_thread.start()
|
||||
writer_thread.start()
|
||||
|
||||
reader_thread.join()
|
||||
writer_thread.join()
|
||||
|
||||
assert not writer_accessed_during_read[0]
|
||||
|
||||
|
||||
def test_alternating_readers_and_writers():
|
||||
lock = RWLock()
|
||||
operations: list[str] = []
|
||||
lock_for_operations = threading.Lock()
|
||||
|
||||
def reader(reader_id: int) -> None:
|
||||
with lock.r_locked():
|
||||
with lock_for_operations:
|
||||
operations.append(f"r{reader_id}_start")
|
||||
time.sleep(0.05)
|
||||
with lock_for_operations:
|
||||
operations.append(f"r{reader_id}_end")
|
||||
|
||||
def writer(writer_id: int) -> None:
|
||||
with lock.w_locked():
|
||||
with lock_for_operations:
|
||||
operations.append(f"w{writer_id}_start")
|
||||
time.sleep(0.05)
|
||||
with lock_for_operations:
|
||||
operations.append(f"w{writer_id}_end")
|
||||
|
||||
threads = [
|
||||
threading.Thread(target=reader, args=(0,)),
|
||||
threading.Thread(target=writer, args=(0,)),
|
||||
threading.Thread(target=reader, args=(1,)),
|
||||
threading.Thread(target=writer, args=(1,)),
|
||||
threading.Thread(target=reader, args=(2,)),
|
||||
]
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
assert len(operations) == 10
|
||||
|
||||
start_ops = [op for op in operations if "_start" in op]
|
||||
end_ops = [op for op in operations if "_end" in op]
|
||||
assert len(start_ops) == 5
|
||||
assert len(end_ops) == 5
|
||||
|
||||
|
||||
def test_context_manager_releases_on_exception():
|
||||
lock = RWLock()
|
||||
exception_raised = False
|
||||
|
||||
try:
|
||||
with lock.r_locked():
|
||||
raise ValueError("Test exception")
|
||||
except ValueError:
|
||||
exception_raised = True
|
||||
|
||||
assert exception_raised
|
||||
|
||||
acquired = False
|
||||
with lock.w_locked():
|
||||
acquired = True
|
||||
|
||||
assert acquired
|
||||
|
||||
|
||||
def test_write_lock_releases_on_exception():
|
||||
lock = RWLock()
|
||||
exception_raised = False
|
||||
|
||||
try:
|
||||
with lock.w_locked():
|
||||
raise ValueError("Test exception")
|
||||
except ValueError:
|
||||
exception_raised = True
|
||||
|
||||
assert exception_raised
|
||||
|
||||
acquired = False
|
||||
with lock.r_locked():
|
||||
acquired = True
|
||||
|
||||
assert acquired
|
||||
|
||||
|
||||
def test_stress_many_readers_few_writers():
|
||||
lock = RWLock()
|
||||
read_count = [0]
|
||||
write_count = [0]
|
||||
lock_for_counters = threading.Lock()
|
||||
|
||||
def reader() -> None:
|
||||
for _ in range(10):
|
||||
with lock.r_locked():
|
||||
with lock_for_counters:
|
||||
read_count[0] += 1
|
||||
time.sleep(0.001)
|
||||
|
||||
def writer() -> None:
|
||||
for _ in range(5):
|
||||
with lock.w_locked():
|
||||
with lock_for_counters:
|
||||
write_count[0] += 1
|
||||
time.sleep(0.01)
|
||||
|
||||
reader_threads = [threading.Thread(target=reader) for _ in range(10)]
|
||||
writer_threads = [threading.Thread(target=writer) for _ in range(2)]
|
||||
|
||||
all_threads = reader_threads + writer_threads
|
||||
|
||||
for thread in all_threads:
|
||||
thread.start()
|
||||
|
||||
for thread in all_threads:
|
||||
thread.join()
|
||||
|
||||
assert read_count[0] == 100
|
||||
assert write_count[0] == 10
|
||||
|
||||
|
||||
def test_nested_read_locks_same_thread():
|
||||
lock = RWLock()
|
||||
nested_acquired = False
|
||||
|
||||
with lock.r_locked():
|
||||
with lock.r_locked():
|
||||
nested_acquired = True
|
||||
|
||||
assert nested_acquired
|
||||
|
||||
|
||||
def test_manual_acquire_release():
|
||||
lock = RWLock()
|
||||
|
||||
lock.r_acquire()
|
||||
lock.r_release()
|
||||
|
||||
lock.w_acquire()
|
||||
lock.w_release()
|
||||
|
||||
with lock.r_locked():
|
||||
pass
|
||||
247
lib/crewai/tests/utilities/events/test_shutdown.py
Normal file
247
lib/crewai/tests/utilities/events/test_shutdown.py
Normal file
@@ -0,0 +1,247 @@
|
||||
"""Tests for event bus shutdown and cleanup behavior.
|
||||
|
||||
This module tests graceful shutdown, task completion, and cleanup operations.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import threading
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
from crewai.events.base_events import BaseEvent
|
||||
from crewai.events.event_bus import CrewAIEventsBus
|
||||
|
||||
|
||||
class ShutdownTestEvent(BaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
def test_shutdown_prevents_new_events():
|
||||
bus = CrewAIEventsBus()
|
||||
received_events = []
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
def handler(source: object, event: BaseEvent) -> None:
|
||||
received_events.append(event)
|
||||
|
||||
bus._shutting_down = True
|
||||
|
||||
event = ShutdownTestEvent(type="after_shutdown")
|
||||
bus.emit("test_source", event)
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
assert len(received_events) == 0
|
||||
|
||||
bus._shutting_down = False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_aemit_during_shutdown():
|
||||
bus = CrewAIEventsBus()
|
||||
received_events = []
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
async def handler(source: object, event: BaseEvent) -> None:
|
||||
received_events.append(event)
|
||||
|
||||
bus._shutting_down = True
|
||||
|
||||
event = ShutdownTestEvent(type="aemit_during_shutdown")
|
||||
await bus.aemit("test_source", event)
|
||||
|
||||
assert len(received_events) == 0
|
||||
|
||||
bus._shutting_down = False
|
||||
|
||||
|
||||
def test_shutdown_flag_prevents_emit():
|
||||
bus = CrewAIEventsBus()
|
||||
emitted_count = [0]
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
def handler(source: object, event: BaseEvent) -> None:
|
||||
emitted_count[0] += 1
|
||||
|
||||
event1 = ShutdownTestEvent(type="before_shutdown")
|
||||
bus.emit("test_source", event1)
|
||||
|
||||
time.sleep(0.1)
|
||||
assert emitted_count[0] == 1
|
||||
|
||||
bus._shutting_down = True
|
||||
|
||||
event2 = ShutdownTestEvent(type="during_shutdown")
|
||||
bus.emit("test_source", event2)
|
||||
|
||||
time.sleep(0.1)
|
||||
assert emitted_count[0] == 1
|
||||
|
||||
bus._shutting_down = False
|
||||
|
||||
|
||||
def test_concurrent_access_during_shutdown_flag():
|
||||
bus = CrewAIEventsBus()
|
||||
received_events = []
|
||||
lock = threading.Lock()
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
def handler(source: object, event: BaseEvent) -> None:
|
||||
with lock:
|
||||
received_events.append(event)
|
||||
|
||||
def emit_events() -> None:
|
||||
for i in range(10):
|
||||
event = ShutdownTestEvent(type=f"event_{i}")
|
||||
bus.emit("source", event)
|
||||
time.sleep(0.01)
|
||||
|
||||
def set_shutdown_flag() -> None:
|
||||
time.sleep(0.05)
|
||||
bus._shutting_down = True
|
||||
|
||||
emit_thread = threading.Thread(target=emit_events)
|
||||
shutdown_thread = threading.Thread(target=set_shutdown_flag)
|
||||
|
||||
emit_thread.start()
|
||||
shutdown_thread.start()
|
||||
|
||||
emit_thread.join()
|
||||
shutdown_thread.join()
|
||||
|
||||
time.sleep(0.2)
|
||||
|
||||
assert len(received_events) < 10
|
||||
assert len(received_events) > 0
|
||||
|
||||
bus._shutting_down = False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_handlers_complete_before_shutdown_flag():
|
||||
bus = CrewAIEventsBus()
|
||||
completed_handlers = []
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
async def async_handler(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.05)
|
||||
if not bus._shutting_down:
|
||||
completed_handlers.append(event)
|
||||
|
||||
for i in range(5):
|
||||
event = ShutdownTestEvent(type=f"event_{i}")
|
||||
bus.emit("source", event)
|
||||
|
||||
await asyncio.sleep(0.3)
|
||||
|
||||
assert len(completed_handlers) == 5
|
||||
|
||||
|
||||
def test_scoped_handlers_cleanup():
|
||||
bus = CrewAIEventsBus()
|
||||
received_before = []
|
||||
received_during = []
|
||||
received_after = []
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
def before_handler(source: object, event: BaseEvent) -> None:
|
||||
received_before.append(event)
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
def during_handler(source: object, event: BaseEvent) -> None:
|
||||
received_during.append(event)
|
||||
|
||||
event1 = ShutdownTestEvent(type="during")
|
||||
bus.emit("source", event1)
|
||||
time.sleep(0.1)
|
||||
|
||||
assert len(received_before) == 0
|
||||
assert len(received_during) == 1
|
||||
|
||||
event2 = ShutdownTestEvent(type="after_inner_scope")
|
||||
bus.emit("source", event2)
|
||||
time.sleep(0.1)
|
||||
|
||||
assert len(received_before) == 1
|
||||
assert len(received_during) == 1
|
||||
|
||||
event3 = ShutdownTestEvent(type="after_outer_scope")
|
||||
bus.emit("source", event3)
|
||||
time.sleep(0.1)
|
||||
|
||||
assert len(received_before) == 1
|
||||
assert len(received_during) == 1
|
||||
assert len(received_after) == 0
|
||||
|
||||
|
||||
def test_handler_registration_thread_safety():
|
||||
bus = CrewAIEventsBus()
|
||||
handlers_registered = [0]
|
||||
lock = threading.Lock()
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
def register_handlers() -> None:
|
||||
for _ in range(20):
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
def handler(source: object, event: BaseEvent) -> None:
|
||||
pass
|
||||
|
||||
with lock:
|
||||
handlers_registered[0] += 1
|
||||
|
||||
time.sleep(0.001)
|
||||
|
||||
threads = [threading.Thread(target=register_handlers) for _ in range(3)]
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
assert handlers_registered[0] == 60
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mixed_sync_async_handler_execution():
|
||||
bus = CrewAIEventsBus()
|
||||
sync_executed = []
|
||||
async_executed = []
|
||||
|
||||
with bus.scoped_handlers():
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
def sync_handler(source: object, event: BaseEvent) -> None:
|
||||
time.sleep(0.01)
|
||||
sync_executed.append(event)
|
||||
|
||||
@bus.on(ShutdownTestEvent)
|
||||
async def async_handler(source: object, event: BaseEvent) -> None:
|
||||
await asyncio.sleep(0.01)
|
||||
async_executed.append(event)
|
||||
|
||||
for i in range(5):
|
||||
event = ShutdownTestEvent(type=f"event_{i}")
|
||||
bus.emit("source", event)
|
||||
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
assert len(sync_executed) == 5
|
||||
assert len(async_executed) == 5
|
||||
189
lib/crewai/tests/utilities/events/test_thread_safety.py
Normal file
189
lib/crewai/tests/utilities/events/test_thread_safety.py
Normal file
@@ -0,0 +1,189 @@
|
||||
"""Tests for thread safety in CrewAI event bus.
|
||||
|
||||
This module tests concurrent event emission and handler registration.
|
||||
"""
|
||||
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
|
||||
from crewai.events.base_events import BaseEvent
|
||||
from crewai.events.event_bus import crewai_event_bus
|
||||
|
||||
|
||||
class ThreadSafetyTestEvent(BaseEvent):
|
||||
pass
|
||||
|
||||
|
||||
def test_concurrent_emit_from_multiple_threads():
|
||||
received_events: list[BaseEvent] = []
|
||||
lock = threading.Lock()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(ThreadSafetyTestEvent)
|
||||
def handler(source: object, event: BaseEvent) -> None:
|
||||
with lock:
|
||||
received_events.append(event)
|
||||
|
||||
threads: list[threading.Thread] = []
|
||||
num_threads = 10
|
||||
events_per_thread = 10
|
||||
|
||||
def emit_events(thread_id: int) -> None:
|
||||
for i in range(events_per_thread):
|
||||
event = ThreadSafetyTestEvent(type=f"thread_{thread_id}_event_{i}")
|
||||
crewai_event_bus.emit(f"source_{thread_id}", event)
|
||||
|
||||
for i in range(num_threads):
|
||||
thread = threading.Thread(target=emit_events, args=(i,))
|
||||
threads.append(thread)
|
||||
thread.start()
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
assert len(received_events) == num_threads * events_per_thread
|
||||
|
||||
|
||||
def test_concurrent_handler_registration():
|
||||
handlers_executed: list[int] = []
|
||||
lock = threading.Lock()
|
||||
|
||||
def create_handler(handler_id: int) -> Callable[[object, BaseEvent], None]:
|
||||
def handler(source: object, event: BaseEvent) -> None:
|
||||
with lock:
|
||||
handlers_executed.append(handler_id)
|
||||
|
||||
return handler
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
threads: list[threading.Thread] = []
|
||||
num_handlers = 20
|
||||
|
||||
def register_handler(handler_id: int) -> None:
|
||||
crewai_event_bus.register_handler(
|
||||
ThreadSafetyTestEvent, create_handler(handler_id)
|
||||
)
|
||||
|
||||
for i in range(num_handlers):
|
||||
thread = threading.Thread(target=register_handler, args=(i,))
|
||||
threads.append(thread)
|
||||
thread.start()
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
event = ThreadSafetyTestEvent(type="registration_test")
|
||||
crewai_event_bus.emit("test_source", event)
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
assert len(handlers_executed) == num_handlers
|
||||
assert set(handlers_executed) == set(range(num_handlers))
|
||||
|
||||
|
||||
def test_concurrent_emit_and_registration():
|
||||
received_events: list[BaseEvent] = []
|
||||
lock = threading.Lock()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
def emit_continuously() -> None:
|
||||
for i in range(50):
|
||||
event = ThreadSafetyTestEvent(type=f"emit_event_{i}")
|
||||
crewai_event_bus.emit("emitter", event)
|
||||
time.sleep(0.001)
|
||||
|
||||
def register_continuously() -> None:
|
||||
for _ in range(10):
|
||||
|
||||
@crewai_event_bus.on(ThreadSafetyTestEvent)
|
||||
def handler(source: object, event: BaseEvent) -> None:
|
||||
with lock:
|
||||
received_events.append(event)
|
||||
|
||||
time.sleep(0.005)
|
||||
|
||||
emit_thread = threading.Thread(target=emit_continuously)
|
||||
register_thread = threading.Thread(target=register_continuously)
|
||||
|
||||
emit_thread.start()
|
||||
register_thread.start()
|
||||
|
||||
emit_thread.join()
|
||||
register_thread.join()
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
assert len(received_events) > 0
|
||||
|
||||
|
||||
def test_stress_test_rapid_emit():
|
||||
received_count = [0]
|
||||
lock = threading.Lock()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(ThreadSafetyTestEvent)
|
||||
def counter_handler(source: object, event: BaseEvent) -> None:
|
||||
with lock:
|
||||
received_count[0] += 1
|
||||
|
||||
num_events = 1000
|
||||
|
||||
for i in range(num_events):
|
||||
event = ThreadSafetyTestEvent(type=f"rapid_event_{i}")
|
||||
crewai_event_bus.emit("rapid_source", event)
|
||||
|
||||
time.sleep(1.0)
|
||||
|
||||
assert received_count[0] == num_events
|
||||
|
||||
|
||||
def test_multiple_event_types_concurrent():
|
||||
class EventTypeA(BaseEvent):
|
||||
pass
|
||||
|
||||
class EventTypeB(BaseEvent):
|
||||
pass
|
||||
|
||||
received_a: list[BaseEvent] = []
|
||||
received_b: list[BaseEvent] = []
|
||||
lock = threading.Lock()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(EventTypeA)
|
||||
def handler_a(source: object, event: BaseEvent) -> None:
|
||||
with lock:
|
||||
received_a.append(event)
|
||||
|
||||
@crewai_event_bus.on(EventTypeB)
|
||||
def handler_b(source: object, event: BaseEvent) -> None:
|
||||
with lock:
|
||||
received_b.append(event)
|
||||
|
||||
def emit_type_a() -> None:
|
||||
for i in range(50):
|
||||
crewai_event_bus.emit("source_a", EventTypeA(type=f"type_a_{i}"))
|
||||
|
||||
def emit_type_b() -> None:
|
||||
for i in range(50):
|
||||
crewai_event_bus.emit("source_b", EventTypeB(type=f"type_b_{i}"))
|
||||
|
||||
thread_a = threading.Thread(target=emit_type_a)
|
||||
thread_b = threading.Thread(target=emit_type_b)
|
||||
|
||||
thread_a.start()
|
||||
thread_b.start()
|
||||
|
||||
thread_a.join()
|
||||
thread_b.join()
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
assert len(received_a) == 50
|
||||
assert len(received_b) == 50
|
||||
@@ -1,3 +1,4 @@
|
||||
import threading
|
||||
from datetime import datetime
|
||||
import os
|
||||
from unittest.mock import Mock, patch
|
||||
@@ -49,6 +50,8 @@ from crewai.tools.base_tool import BaseTool
|
||||
from pydantic import Field
|
||||
import pytest
|
||||
|
||||
from ..utils import wait_for_event_handlers
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def vcr_config(request) -> dict:
|
||||
@@ -118,6 +121,7 @@ def test_crew_emits_start_kickoff_event(
|
||||
# Now when Crew creates EventListener, it will use our mocked telemetry
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
crew.kickoff()
|
||||
wait_for_event_handlers()
|
||||
|
||||
mock_telemetry.crew_execution_span.assert_called_once_with(crew, None)
|
||||
mock_telemetry.end_crew.assert_called_once_with(crew, "hi")
|
||||
@@ -131,15 +135,20 @@ def test_crew_emits_start_kickoff_event(
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_crew_emits_end_kickoff_event(base_agent, base_task):
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(CrewKickoffCompletedEvent)
|
||||
def handle_crew_end(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
|
||||
crew.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=5), (
|
||||
"Timeout waiting for crew kickoff completed event"
|
||||
)
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].crew_name == "TestCrew"
|
||||
assert isinstance(received_events[0].timestamp, datetime)
|
||||
@@ -165,6 +174,7 @@ def test_crew_emits_test_kickoff_type_event(base_agent, base_task):
|
||||
eval_llm = LLM(model="gpt-4o-mini")
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
crew.test(n_iterations=1, eval_llm=eval_llm)
|
||||
wait_for_event_handlers()
|
||||
|
||||
assert len(received_events) == 3
|
||||
assert received_events[0].crew_name == "TestCrew"
|
||||
@@ -181,40 +191,44 @@ def test_crew_emits_test_kickoff_type_event(base_agent, base_task):
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_crew_emits_kickoff_failed_event(base_agent, base_task):
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(CrewKickoffFailedEvent)
|
||||
def handle_crew_failed(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
@crewai_event_bus.on(CrewKickoffFailedEvent)
|
||||
def handle_crew_failed(source, event):
|
||||
received_events.append(event)
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
with patch.object(Crew, "_execute_tasks") as mock_execute:
|
||||
error_message = "Simulated crew kickoff failure"
|
||||
mock_execute.side_effect = Exception(error_message)
|
||||
|
||||
with patch.object(Crew, "_execute_tasks") as mock_execute:
|
||||
error_message = "Simulated crew kickoff failure"
|
||||
mock_execute.side_effect = Exception(error_message)
|
||||
with pytest.raises(Exception): # noqa: B017
|
||||
crew.kickoff()
|
||||
|
||||
with pytest.raises(Exception): # noqa: B017
|
||||
crew.kickoff()
|
||||
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].error == error_message
|
||||
assert isinstance(received_events[0].timestamp, datetime)
|
||||
assert received_events[0].type == "crew_kickoff_failed"
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for failed event"
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].error == error_message
|
||||
assert isinstance(received_events[0].timestamp, datetime)
|
||||
assert received_events[0].type == "crew_kickoff_failed"
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_crew_emits_start_task_event(base_agent, base_task):
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(TaskStartedEvent)
|
||||
def handle_task_start(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
|
||||
crew.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for task started event"
|
||||
assert len(received_events) == 1
|
||||
assert isinstance(received_events[0].timestamp, datetime)
|
||||
assert received_events[0].type == "task_started"
|
||||
@@ -225,10 +239,12 @@ def test_crew_emits_end_task_event(
|
||||
base_agent, base_task, reset_event_listener_singleton
|
||||
):
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(TaskCompletedEvent)
|
||||
def handle_task_end(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
mock_span = Mock()
|
||||
|
||||
@@ -246,6 +262,7 @@ def test_crew_emits_end_task_event(
|
||||
mock_telemetry.task_started.assert_called_once_with(crew=crew, task=base_task)
|
||||
mock_telemetry.task_ended.assert_called_once_with(mock_span, base_task, crew)
|
||||
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for task completed event"
|
||||
assert len(received_events) == 1
|
||||
assert isinstance(received_events[0].timestamp, datetime)
|
||||
assert received_events[0].type == "task_completed"
|
||||
@@ -255,11 +272,13 @@ def test_crew_emits_end_task_event(
|
||||
def test_task_emits_failed_event_on_execution_error(base_agent, base_task):
|
||||
received_events = []
|
||||
received_sources = []
|
||||
event_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(TaskFailedEvent)
|
||||
def handle_task_failed(source, event):
|
||||
received_events.append(event)
|
||||
received_sources.append(source)
|
||||
event_received.set()
|
||||
|
||||
with patch.object(
|
||||
Task,
|
||||
@@ -281,6 +300,9 @@ def test_task_emits_failed_event_on_execution_error(base_agent, base_task):
|
||||
with pytest.raises(Exception): # noqa: B017
|
||||
agent.execute_task(task=task)
|
||||
|
||||
assert event_received.wait(timeout=5), (
|
||||
"Timeout waiting for task failed event"
|
||||
)
|
||||
assert len(received_events) == 1
|
||||
assert received_sources[0] == task
|
||||
assert received_events[0].error == error_message
|
||||
@@ -291,17 +313,27 @@ def test_task_emits_failed_event_on_execution_error(base_agent, base_task):
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agent_emits_execution_started_and_completed_events(base_agent, base_task):
|
||||
received_events = []
|
||||
lock = threading.Lock()
|
||||
all_events_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(AgentExecutionStartedEvent)
|
||||
def handle_agent_start(source, event):
|
||||
received_events.append(event)
|
||||
with lock:
|
||||
received_events.append(event)
|
||||
|
||||
@crewai_event_bus.on(AgentExecutionCompletedEvent)
|
||||
def handle_agent_completed(source, event):
|
||||
received_events.append(event)
|
||||
with lock:
|
||||
received_events.append(event)
|
||||
if len(received_events) >= 2:
|
||||
all_events_received.set()
|
||||
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
crew.kickoff()
|
||||
|
||||
assert all_events_received.wait(timeout=5), (
|
||||
"Timeout waiting for agent execution events"
|
||||
)
|
||||
assert len(received_events) == 2
|
||||
assert received_events[0].agent == base_agent
|
||||
assert received_events[0].task == base_task
|
||||
@@ -320,10 +352,12 @@ def test_agent_emits_execution_started_and_completed_events(base_agent, base_tas
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agent_emits_execution_error_event(base_agent, base_task):
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(AgentExecutionErrorEvent)
|
||||
def handle_agent_start(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
error_message = "Error happening while sending prompt to model."
|
||||
base_agent.max_retry_limit = 0
|
||||
@@ -337,6 +371,9 @@ def test_agent_emits_execution_error_event(base_agent, base_task):
|
||||
task=base_task,
|
||||
)
|
||||
|
||||
assert event_received.wait(timeout=5), (
|
||||
"Timeout waiting for agent execution error event"
|
||||
)
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].agent == base_agent
|
||||
assert received_events[0].task == base_task
|
||||
@@ -358,10 +395,12 @@ class SayHiTool(BaseTool):
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_tools_emits_finished_events():
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(ToolUsageFinishedEvent)
|
||||
def handle_tool_end(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
agent = Agent(
|
||||
role="base_agent",
|
||||
@@ -377,6 +416,10 @@ def test_tools_emits_finished_events():
|
||||
)
|
||||
crew = Crew(agents=[agent], tasks=[task], name="TestCrew")
|
||||
crew.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=5), (
|
||||
"Timeout waiting for tool usage finished event"
|
||||
)
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].agent_key == agent.key
|
||||
assert received_events[0].agent_role == agent.role
|
||||
@@ -389,10 +432,15 @@ def test_tools_emits_finished_events():
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_tools_emits_error_events():
|
||||
received_events = []
|
||||
lock = threading.Lock()
|
||||
all_events_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(ToolUsageErrorEvent)
|
||||
def handle_tool_end(source, event):
|
||||
received_events.append(event)
|
||||
with lock:
|
||||
received_events.append(event)
|
||||
if len(received_events) >= 48:
|
||||
all_events_received.set()
|
||||
|
||||
class ErrorTool(BaseTool):
|
||||
name: str = Field(
|
||||
@@ -423,6 +471,9 @@ def test_tools_emits_error_events():
|
||||
crew = Crew(agents=[agent], tasks=[task], name="TestCrew")
|
||||
crew.kickoff()
|
||||
|
||||
assert all_events_received.wait(timeout=5), (
|
||||
"Timeout waiting for tool usage error events"
|
||||
)
|
||||
assert len(received_events) == 48
|
||||
assert received_events[0].agent_key == agent.key
|
||||
assert received_events[0].agent_role == agent.role
|
||||
@@ -435,11 +486,13 @@ def test_tools_emits_error_events():
|
||||
|
||||
def test_flow_emits_start_event(reset_event_listener_singleton):
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
mock_span = Mock()
|
||||
|
||||
@crewai_event_bus.on(FlowStartedEvent)
|
||||
def handle_flow_start(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
class TestFlow(Flow[dict]):
|
||||
@start()
|
||||
@@ -458,6 +511,7 @@ def test_flow_emits_start_event(reset_event_listener_singleton):
|
||||
flow = TestFlow()
|
||||
flow.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for flow started event"
|
||||
mock_telemetry.flow_execution_span.assert_called_once_with("TestFlow", ["begin"])
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].flow_name == "TestFlow"
|
||||
@@ -466,6 +520,7 @@ def test_flow_emits_start_event(reset_event_listener_singleton):
|
||||
|
||||
def test_flow_name_emitted_to_event_bus():
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
class MyFlowClass(Flow):
|
||||
name = "PRODUCTION_FLOW"
|
||||
@@ -477,118 +532,133 @@ def test_flow_name_emitted_to_event_bus():
|
||||
@crewai_event_bus.on(FlowStartedEvent)
|
||||
def handle_flow_start(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
flow = MyFlowClass()
|
||||
flow.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for flow started event"
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].flow_name == "PRODUCTION_FLOW"
|
||||
|
||||
|
||||
def test_flow_emits_finish_event():
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(FlowFinishedEvent)
|
||||
def handle_flow_finish(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
@crewai_event_bus.on(FlowFinishedEvent)
|
||||
def handle_flow_finish(source, event):
|
||||
received_events.append(event)
|
||||
class TestFlow(Flow[dict]):
|
||||
@start()
|
||||
def begin(self):
|
||||
return "completed"
|
||||
|
||||
class TestFlow(Flow[dict]):
|
||||
@start()
|
||||
def begin(self):
|
||||
return "completed"
|
||||
flow = TestFlow()
|
||||
result = flow.kickoff()
|
||||
|
||||
flow = TestFlow()
|
||||
result = flow.kickoff()
|
||||
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].flow_name == "TestFlow"
|
||||
assert received_events[0].type == "flow_finished"
|
||||
assert received_events[0].result == "completed"
|
||||
assert result == "completed"
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for finish event"
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].flow_name == "TestFlow"
|
||||
assert received_events[0].type == "flow_finished"
|
||||
assert received_events[0].result == "completed"
|
||||
assert result == "completed"
|
||||
|
||||
|
||||
def test_flow_emits_method_execution_started_event():
|
||||
received_events = []
|
||||
lock = threading.Lock()
|
||||
second_event_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
|
||||
@crewai_event_bus.on(MethodExecutionStartedEvent)
|
||||
def handle_method_start(source, event):
|
||||
@crewai_event_bus.on(MethodExecutionStartedEvent)
|
||||
async def handle_method_start(source, event):
|
||||
with lock:
|
||||
received_events.append(event)
|
||||
if event.method_name == "second_method":
|
||||
second_event_received.set()
|
||||
|
||||
class TestFlow(Flow[dict]):
|
||||
@start()
|
||||
def begin(self):
|
||||
return "started"
|
||||
class TestFlow(Flow[dict]):
|
||||
@start()
|
||||
def begin(self):
|
||||
return "started"
|
||||
|
||||
@listen("begin")
|
||||
def second_method(self):
|
||||
return "executed"
|
||||
@listen("begin")
|
||||
def second_method(self):
|
||||
return "executed"
|
||||
|
||||
flow = TestFlow()
|
||||
flow.kickoff()
|
||||
flow = TestFlow()
|
||||
flow.kickoff()
|
||||
|
||||
assert len(received_events) == 2
|
||||
assert second_event_received.wait(timeout=5), (
|
||||
"Timeout waiting for second_method event"
|
||||
)
|
||||
assert len(received_events) == 2
|
||||
|
||||
assert received_events[0].method_name == "begin"
|
||||
assert received_events[0].flow_name == "TestFlow"
|
||||
assert received_events[0].type == "method_execution_started"
|
||||
# Events may arrive in any order due to async handlers, so check both are present
|
||||
method_names = {event.method_name for event in received_events}
|
||||
assert method_names == {"begin", "second_method"}
|
||||
|
||||
assert received_events[1].method_name == "second_method"
|
||||
assert received_events[1].flow_name == "TestFlow"
|
||||
assert received_events[1].type == "method_execution_started"
|
||||
for event in received_events:
|
||||
assert event.flow_name == "TestFlow"
|
||||
assert event.type == "method_execution_started"
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_register_handler_adds_new_handler(base_agent, base_task):
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
def custom_handler(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
crewai_event_bus.register_handler(CrewKickoffStartedEvent, custom_handler)
|
||||
crewai_event_bus.register_handler(CrewKickoffStartedEvent, custom_handler)
|
||||
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
crew.kickoff()
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
crew.kickoff()
|
||||
|
||||
assert len(received_events) == 1
|
||||
assert isinstance(received_events[0].timestamp, datetime)
|
||||
assert received_events[0].type == "crew_kickoff_started"
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for handler event"
|
||||
assert len(received_events) == 1
|
||||
assert isinstance(received_events[0].timestamp, datetime)
|
||||
assert received_events[0].type == "crew_kickoff_started"
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_multiple_handlers_for_same_event(base_agent, base_task):
|
||||
received_events_1 = []
|
||||
received_events_2 = []
|
||||
event_received = threading.Event()
|
||||
|
||||
def handler_1(source, event):
|
||||
received_events_1.append(event)
|
||||
|
||||
def handler_2(source, event):
|
||||
received_events_2.append(event)
|
||||
event_received.set()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
crewai_event_bus.register_handler(CrewKickoffStartedEvent, handler_1)
|
||||
crewai_event_bus.register_handler(CrewKickoffStartedEvent, handler_2)
|
||||
crewai_event_bus.register_handler(CrewKickoffStartedEvent, handler_1)
|
||||
crewai_event_bus.register_handler(CrewKickoffStartedEvent, handler_2)
|
||||
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
crew.kickoff()
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task], name="TestCrew")
|
||||
crew.kickoff()
|
||||
|
||||
assert len(received_events_1) == 1
|
||||
assert len(received_events_2) == 1
|
||||
assert received_events_1[0].type == "crew_kickoff_started"
|
||||
assert received_events_2[0].type == "crew_kickoff_started"
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for handler events"
|
||||
assert len(received_events_1) == 1
|
||||
assert len(received_events_2) == 1
|
||||
assert received_events_1[0].type == "crew_kickoff_started"
|
||||
assert received_events_2[0].type == "crew_kickoff_started"
|
||||
|
||||
|
||||
def test_flow_emits_created_event():
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(FlowCreatedEvent)
|
||||
def handle_flow_created(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
class TestFlow(Flow[dict]):
|
||||
@start()
|
||||
@@ -598,6 +668,7 @@ def test_flow_emits_created_event():
|
||||
flow = TestFlow()
|
||||
flow.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for flow created event"
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].flow_name == "TestFlow"
|
||||
assert received_events[0].type == "flow_created"
|
||||
@@ -605,11 +676,13 @@ def test_flow_emits_created_event():
|
||||
|
||||
def test_flow_emits_method_execution_failed_event():
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
error = Exception("Simulated method failure")
|
||||
|
||||
@crewai_event_bus.on(MethodExecutionFailedEvent)
|
||||
def handle_method_failed(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
class TestFlow(Flow[dict]):
|
||||
@start()
|
||||
@@ -620,6 +693,9 @@ def test_flow_emits_method_execution_failed_event():
|
||||
with pytest.raises(Exception): # noqa: B017
|
||||
flow.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=5), (
|
||||
"Timeout waiting for method execution failed event"
|
||||
)
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].method_name == "begin"
|
||||
assert received_events[0].flow_name == "TestFlow"
|
||||
@@ -641,6 +717,7 @@ def test_llm_emits_call_started_event():
|
||||
|
||||
llm = LLM(model="gpt-4o-mini")
|
||||
llm.call("Hello, how are you?")
|
||||
wait_for_event_handlers()
|
||||
|
||||
assert len(received_events) == 2
|
||||
assert received_events[0].type == "llm_call_started"
|
||||
@@ -656,10 +733,12 @@ def test_llm_emits_call_started_event():
|
||||
@pytest.mark.isolated
|
||||
def test_llm_emits_call_failed_event():
|
||||
received_events = []
|
||||
event_received = threading.Event()
|
||||
|
||||
@crewai_event_bus.on(LLMCallFailedEvent)
|
||||
def handle_llm_call_failed(source, event):
|
||||
received_events.append(event)
|
||||
event_received.set()
|
||||
|
||||
error_message = "OpenAI API call failed: Simulated API failure"
|
||||
|
||||
@@ -673,6 +752,7 @@ def test_llm_emits_call_failed_event():
|
||||
llm.call("Hello, how are you?")
|
||||
|
||||
assert str(exc_info.value) == "Simulated API failure"
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for failed event"
|
||||
assert len(received_events) == 1
|
||||
assert received_events[0].type == "llm_call_failed"
|
||||
assert received_events[0].error == error_message
|
||||
@@ -686,24 +766,28 @@ def test_llm_emits_call_failed_event():
|
||||
def test_llm_emits_stream_chunk_events():
|
||||
"""Test that LLM emits stream chunk events when streaming is enabled."""
|
||||
received_chunks = []
|
||||
event_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_stream_chunk(source, event):
|
||||
received_chunks.append(event.chunk)
|
||||
if len(received_chunks) >= 1:
|
||||
event_received.set()
|
||||
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_stream_chunk(source, event):
|
||||
received_chunks.append(event.chunk)
|
||||
# Create an LLM with streaming enabled
|
||||
llm = LLM(model="gpt-4o", stream=True)
|
||||
|
||||
# Create an LLM with streaming enabled
|
||||
llm = LLM(model="gpt-4o", stream=True)
|
||||
# Call the LLM with a simple message
|
||||
response = llm.call("Tell me a short joke")
|
||||
|
||||
# Call the LLM with a simple message
|
||||
response = llm.call("Tell me a short joke")
|
||||
# Wait for at least one chunk
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for stream chunks"
|
||||
|
||||
# Verify that we received chunks
|
||||
assert len(received_chunks) > 0
|
||||
# Verify that we received chunks
|
||||
assert len(received_chunks) > 0
|
||||
|
||||
# Verify that concatenating all chunks equals the final response
|
||||
assert "".join(received_chunks) == response
|
||||
# Verify that concatenating all chunks equals the final response
|
||||
assert "".join(received_chunks) == response
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -711,23 +795,21 @@ def test_llm_no_stream_chunks_when_streaming_disabled():
|
||||
"""Test that LLM doesn't emit stream chunk events when streaming is disabled."""
|
||||
received_chunks = []
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_stream_chunk(source, event):
|
||||
received_chunks.append(event.chunk)
|
||||
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_stream_chunk(source, event):
|
||||
received_chunks.append(event.chunk)
|
||||
# Create an LLM with streaming disabled
|
||||
llm = LLM(model="gpt-4o", stream=False)
|
||||
|
||||
# Create an LLM with streaming disabled
|
||||
llm = LLM(model="gpt-4o", stream=False)
|
||||
# Call the LLM with a simple message
|
||||
response = llm.call("Tell me a short joke")
|
||||
|
||||
# Call the LLM with a simple message
|
||||
response = llm.call("Tell me a short joke")
|
||||
# Verify that we didn't receive any chunks
|
||||
assert len(received_chunks) == 0
|
||||
|
||||
# Verify that we didn't receive any chunks
|
||||
assert len(received_chunks) == 0
|
||||
|
||||
# Verify we got a response
|
||||
assert response and isinstance(response, str)
|
||||
# Verify we got a response
|
||||
assert response and isinstance(response, str)
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -735,98 +817,105 @@ def test_streaming_fallback_to_non_streaming():
|
||||
"""Test that streaming falls back to non-streaming when there's an error."""
|
||||
received_chunks = []
|
||||
fallback_called = False
|
||||
event_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_stream_chunk(source, event):
|
||||
received_chunks.append(event.chunk)
|
||||
if len(received_chunks) >= 2:
|
||||
event_received.set()
|
||||
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_stream_chunk(source, event):
|
||||
received_chunks.append(event.chunk)
|
||||
# Create an LLM with streaming enabled
|
||||
llm = LLM(model="gpt-4o", stream=True)
|
||||
|
||||
# Create an LLM with streaming enabled
|
||||
llm = LLM(model="gpt-4o", stream=True)
|
||||
# Store original methods
|
||||
original_call = llm.call
|
||||
|
||||
# Store original methods
|
||||
original_call = llm.call
|
||||
# Create a mock call method that handles the streaming error
|
||||
def mock_call(messages, tools=None, callbacks=None, available_functions=None):
|
||||
nonlocal fallback_called
|
||||
# Emit a couple of chunks to simulate partial streaming
|
||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk="Test chunk 1"))
|
||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk="Test chunk 2"))
|
||||
|
||||
# Create a mock call method that handles the streaming error
|
||||
def mock_call(messages, tools=None, callbacks=None, available_functions=None):
|
||||
nonlocal fallback_called
|
||||
# Emit a couple of chunks to simulate partial streaming
|
||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk="Test chunk 1"))
|
||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk="Test chunk 2"))
|
||||
# Mark that fallback would be called
|
||||
fallback_called = True
|
||||
|
||||
# Mark that fallback would be called
|
||||
fallback_called = True
|
||||
# Return a response as if fallback succeeded
|
||||
return "Fallback response after streaming error"
|
||||
|
||||
# Return a response as if fallback succeeded
|
||||
return "Fallback response after streaming error"
|
||||
# Replace the call method with our mock
|
||||
llm.call = mock_call
|
||||
|
||||
# Replace the call method with our mock
|
||||
llm.call = mock_call
|
||||
try:
|
||||
# Call the LLM
|
||||
response = llm.call("Tell me a short joke")
|
||||
wait_for_event_handlers()
|
||||
|
||||
try:
|
||||
# Call the LLM
|
||||
response = llm.call("Tell me a short joke")
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for stream chunks"
|
||||
|
||||
# Verify that we received some chunks
|
||||
assert len(received_chunks) == 2
|
||||
assert received_chunks[0] == "Test chunk 1"
|
||||
assert received_chunks[1] == "Test chunk 2"
|
||||
# Verify that we received some chunks
|
||||
assert len(received_chunks) == 2
|
||||
assert received_chunks[0] == "Test chunk 1"
|
||||
assert received_chunks[1] == "Test chunk 2"
|
||||
|
||||
# Verify fallback was triggered
|
||||
assert fallback_called
|
||||
# Verify fallback was triggered
|
||||
assert fallback_called
|
||||
|
||||
# Verify we got the fallback response
|
||||
assert response == "Fallback response after streaming error"
|
||||
# Verify we got the fallback response
|
||||
assert response == "Fallback response after streaming error"
|
||||
|
||||
finally:
|
||||
# Restore the original method
|
||||
llm.call = original_call
|
||||
finally:
|
||||
# Restore the original method
|
||||
llm.call = original_call
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_streaming_empty_response_handling():
|
||||
"""Test that streaming handles empty responses correctly."""
|
||||
received_chunks = []
|
||||
event_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_stream_chunk(source, event):
|
||||
received_chunks.append(event.chunk)
|
||||
if len(received_chunks) >= 3:
|
||||
event_received.set()
|
||||
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_stream_chunk(source, event):
|
||||
received_chunks.append(event.chunk)
|
||||
# Create an LLM with streaming enabled
|
||||
llm = LLM(model="gpt-3.5-turbo", stream=True)
|
||||
|
||||
# Create an LLM with streaming enabled
|
||||
llm = LLM(model="gpt-3.5-turbo", stream=True)
|
||||
# Store original methods
|
||||
original_call = llm.call
|
||||
|
||||
# Store original methods
|
||||
original_call = llm.call
|
||||
# Create a mock call method that simulates empty chunks
|
||||
def mock_call(messages, tools=None, callbacks=None, available_functions=None):
|
||||
# Emit a few empty chunks
|
||||
for _ in range(3):
|
||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk=""))
|
||||
|
||||
# Create a mock call method that simulates empty chunks
|
||||
def mock_call(messages, tools=None, callbacks=None, available_functions=None):
|
||||
# Emit a few empty chunks
|
||||
for _ in range(3):
|
||||
crewai_event_bus.emit(llm, event=LLMStreamChunkEvent(chunk=""))
|
||||
# Return the default message for empty responses
|
||||
return "I apologize, but I couldn't generate a proper response. Please try again or rephrase your request."
|
||||
|
||||
# Return the default message for empty responses
|
||||
return "I apologize, but I couldn't generate a proper response. Please try again or rephrase your request."
|
||||
# Replace the call method with our mock
|
||||
llm.call = mock_call
|
||||
|
||||
# Replace the call method with our mock
|
||||
llm.call = mock_call
|
||||
try:
|
||||
# Call the LLM - this should handle empty response
|
||||
response = llm.call("Tell me a short joke")
|
||||
|
||||
try:
|
||||
# Call the LLM - this should handle empty response
|
||||
response = llm.call("Tell me a short joke")
|
||||
assert event_received.wait(timeout=5), "Timeout waiting for empty chunks"
|
||||
|
||||
# Verify that we received empty chunks
|
||||
assert len(received_chunks) == 3
|
||||
assert all(chunk == "" for chunk in received_chunks)
|
||||
# Verify that we received empty chunks
|
||||
assert len(received_chunks) == 3
|
||||
assert all(chunk == "" for chunk in received_chunks)
|
||||
|
||||
# Verify the response is the default message for empty responses
|
||||
assert "I apologize" in response and "couldn't generate" in response
|
||||
# Verify the response is the default message for empty responses
|
||||
assert "I apologize" in response and "couldn't generate" in response
|
||||
|
||||
finally:
|
||||
# Restore the original method
|
||||
llm.call = original_call
|
||||
finally:
|
||||
# Restore the original method
|
||||
llm.call = original_call
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -835,41 +924,49 @@ def test_stream_llm_emits_event_with_task_and_agent_info():
|
||||
failed_event = []
|
||||
started_event = []
|
||||
stream_event = []
|
||||
event_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(LLMCallFailedEvent)
|
||||
def handle_llm_failed(source, event):
|
||||
failed_event.append(event)
|
||||
|
||||
@crewai_event_bus.on(LLMCallFailedEvent)
|
||||
def handle_llm_failed(source, event):
|
||||
failed_event.append(event)
|
||||
@crewai_event_bus.on(LLMCallStartedEvent)
|
||||
def handle_llm_started(source, event):
|
||||
started_event.append(event)
|
||||
|
||||
@crewai_event_bus.on(LLMCallStartedEvent)
|
||||
def handle_llm_started(source, event):
|
||||
started_event.append(event)
|
||||
@crewai_event_bus.on(LLMCallCompletedEvent)
|
||||
def handle_llm_completed(source, event):
|
||||
completed_event.append(event)
|
||||
if len(started_event) >= 1 and len(stream_event) >= 12:
|
||||
event_received.set()
|
||||
|
||||
@crewai_event_bus.on(LLMCallCompletedEvent)
|
||||
def handle_llm_completed(source, event):
|
||||
completed_event.append(event)
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_llm_stream_chunk(source, event):
|
||||
stream_event.append(event)
|
||||
if (
|
||||
len(completed_event) >= 1
|
||||
and len(started_event) >= 1
|
||||
and len(stream_event) >= 12
|
||||
):
|
||||
event_received.set()
|
||||
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_llm_stream_chunk(source, event):
|
||||
stream_event.append(event)
|
||||
agent = Agent(
|
||||
role="TestAgent",
|
||||
llm=LLM(model="gpt-4o-mini", stream=True),
|
||||
goal="Just say hi",
|
||||
backstory="You are a helpful assistant that just says hi",
|
||||
)
|
||||
task = Task(
|
||||
description="Just say hi",
|
||||
expected_output="hi",
|
||||
llm=LLM(model="gpt-4o-mini", stream=True),
|
||||
agent=agent,
|
||||
)
|
||||
|
||||
agent = Agent(
|
||||
role="TestAgent",
|
||||
llm=LLM(model="gpt-4o-mini", stream=True),
|
||||
goal="Just say hi",
|
||||
backstory="You are a helpful assistant that just says hi",
|
||||
)
|
||||
task = Task(
|
||||
description="Just say hi",
|
||||
expected_output="hi",
|
||||
llm=LLM(model="gpt-4o-mini", stream=True),
|
||||
agent=agent,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[agent], tasks=[task])
|
||||
crew.kickoff()
|
||||
crew = Crew(agents=[agent], tasks=[task])
|
||||
crew.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=10), "Timeout waiting for LLM events"
|
||||
assert len(completed_event) == 1
|
||||
assert len(failed_event) == 0
|
||||
assert len(started_event) == 1
|
||||
@@ -899,28 +996,30 @@ def test_llm_emits_event_with_task_and_agent_info(base_agent, base_task):
|
||||
failed_event = []
|
||||
started_event = []
|
||||
stream_event = []
|
||||
event_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(LLMCallFailedEvent)
|
||||
def handle_llm_failed(source, event):
|
||||
failed_event.append(event)
|
||||
|
||||
@crewai_event_bus.on(LLMCallFailedEvent)
|
||||
def handle_llm_failed(source, event):
|
||||
failed_event.append(event)
|
||||
@crewai_event_bus.on(LLMCallStartedEvent)
|
||||
def handle_llm_started(source, event):
|
||||
started_event.append(event)
|
||||
|
||||
@crewai_event_bus.on(LLMCallStartedEvent)
|
||||
def handle_llm_started(source, event):
|
||||
started_event.append(event)
|
||||
@crewai_event_bus.on(LLMCallCompletedEvent)
|
||||
def handle_llm_completed(source, event):
|
||||
completed_event.append(event)
|
||||
if len(started_event) >= 1:
|
||||
event_received.set()
|
||||
|
||||
@crewai_event_bus.on(LLMCallCompletedEvent)
|
||||
def handle_llm_completed(source, event):
|
||||
completed_event.append(event)
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_llm_stream_chunk(source, event):
|
||||
stream_event.append(event)
|
||||
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_llm_stream_chunk(source, event):
|
||||
stream_event.append(event)
|
||||
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task])
|
||||
crew.kickoff()
|
||||
crew = Crew(agents=[base_agent], tasks=[base_task])
|
||||
crew.kickoff()
|
||||
|
||||
assert event_received.wait(timeout=10), "Timeout waiting for LLM events"
|
||||
assert len(completed_event) == 1
|
||||
assert len(failed_event) == 0
|
||||
assert len(started_event) == 1
|
||||
@@ -950,32 +1049,41 @@ def test_llm_emits_event_with_lite_agent():
|
||||
failed_event = []
|
||||
started_event = []
|
||||
stream_event = []
|
||||
all_events_received = threading.Event()
|
||||
|
||||
with crewai_event_bus.scoped_handlers():
|
||||
@crewai_event_bus.on(LLMCallFailedEvent)
|
||||
def handle_llm_failed(source, event):
|
||||
failed_event.append(event)
|
||||
|
||||
@crewai_event_bus.on(LLMCallFailedEvent)
|
||||
def handle_llm_failed(source, event):
|
||||
failed_event.append(event)
|
||||
@crewai_event_bus.on(LLMCallStartedEvent)
|
||||
def handle_llm_started(source, event):
|
||||
started_event.append(event)
|
||||
|
||||
@crewai_event_bus.on(LLMCallStartedEvent)
|
||||
def handle_llm_started(source, event):
|
||||
started_event.append(event)
|
||||
@crewai_event_bus.on(LLMCallCompletedEvent)
|
||||
def handle_llm_completed(source, event):
|
||||
completed_event.append(event)
|
||||
if len(started_event) >= 1 and len(stream_event) >= 15:
|
||||
all_events_received.set()
|
||||
|
||||
@crewai_event_bus.on(LLMCallCompletedEvent)
|
||||
def handle_llm_completed(source, event):
|
||||
completed_event.append(event)
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_llm_stream_chunk(source, event):
|
||||
stream_event.append(event)
|
||||
if (
|
||||
len(completed_event) >= 1
|
||||
and len(started_event) >= 1
|
||||
and len(stream_event) >= 15
|
||||
):
|
||||
all_events_received.set()
|
||||
|
||||
@crewai_event_bus.on(LLMStreamChunkEvent)
|
||||
def handle_llm_stream_chunk(source, event):
|
||||
stream_event.append(event)
|
||||
agent = Agent(
|
||||
role="Speaker",
|
||||
llm=LLM(model="gpt-4o-mini", stream=True),
|
||||
goal="Just say hi",
|
||||
backstory="You are a helpful assistant that just says hi",
|
||||
)
|
||||
agent.kickoff(messages=[{"role": "user", "content": "say hi!"}])
|
||||
|
||||
agent = Agent(
|
||||
role="Speaker",
|
||||
llm=LLM(model="gpt-4o-mini", stream=True),
|
||||
goal="Just say hi",
|
||||
backstory="You are a helpful assistant that just says hi",
|
||||
)
|
||||
agent.kickoff(messages=[{"role": "user", "content": "say hi!"}])
|
||||
assert all_events_received.wait(timeout=10), "Timeout waiting for all events"
|
||||
|
||||
assert len(completed_event) == 1
|
||||
assert len(failed_event) == 0
|
||||
|
||||
Reference in New Issue
Block a user