Ensure @start methods emit MethodExecutionStartedEvent (#2114)

Previously, `@start` methods triggered a `FlowStartedEvent` but did not
emit a `MethodExecutionStartedEvent`. This was fine for a single entry
point but caused ambiguity when multiple `@start` methods existed.

This commit (1) emits events for starting points, (2) adds tests
ensuring ordering, (3) adds more fields to events.
This commit is contained in:
Vini Brasil
2025-02-12 14:19:41 -06:00
committed by GitHub
parent 2fd7506ed9
commit d6800d8957
3 changed files with 259 additions and 22 deletions

View File

@@ -1,4 +1,5 @@
import asyncio
import copy
import inspect
import logging
from typing import (
@@ -394,7 +395,6 @@ class FlowMeta(type):
or hasattr(attr_value, "__trigger_methods__")
or hasattr(attr_value, "__is_router__")
):
# Register start methods
if hasattr(attr_value, "__is_start_method__"):
start_methods.append(attr_name)
@@ -569,6 +569,9 @@ class Flow(Generic[T], metaclass=FlowMeta):
f"Initial state must be dict or BaseModel, got {type(self.initial_state)}"
)
def _copy_state(self) -> T:
return copy.deepcopy(self._state)
@property
def state(self) -> T:
return self._state
@@ -740,6 +743,7 @@ class Flow(Generic[T], metaclass=FlowMeta):
event=FlowStartedEvent(
type="flow_started",
flow_name=self.__class__.__name__,
inputs=inputs,
),
)
self._log_flow_event(
@@ -803,6 +807,18 @@ class Flow(Generic[T], metaclass=FlowMeta):
async def _execute_method(
self, method_name: str, method: Callable, *args: Any, **kwargs: Any
) -> Any:
dumped_params = {f"_{i}": arg for i, arg in enumerate(args)} | (kwargs or {})
self.event_emitter.send(
self,
event=MethodExecutionStartedEvent(
type="method_execution_started",
method_name=method_name,
flow_name=self.__class__.__name__,
params=dumped_params,
state=self._copy_state(),
),
)
result = (
await method(*args, **kwargs)
if asyncio.iscoroutinefunction(method)
@@ -812,6 +828,18 @@ class Flow(Generic[T], metaclass=FlowMeta):
self._method_execution_counts[method_name] = (
self._method_execution_counts.get(method_name, 0) + 1
)
self.event_emitter.send(
self,
event=MethodExecutionFinishedEvent(
type="method_execution_finished",
method_name=method_name,
flow_name=self.__class__.__name__,
state=self._copy_state(),
result=result,
),
)
return result
async def _execute_listeners(self, trigger_method: str, result: Any) -> None:
@@ -950,16 +978,6 @@ class Flow(Generic[T], metaclass=FlowMeta):
"""
try:
method = self._methods[listener_name]
self.event_emitter.send(
self,
event=MethodExecutionStartedEvent(
type="method_execution_started",
method_name=listener_name,
flow_name=self.__class__.__name__,
),
)
sig = inspect.signature(method)
params = list(sig.parameters.values())
method_params = [p for p in params if p.name != "self"]
@@ -971,15 +989,6 @@ class Flow(Generic[T], metaclass=FlowMeta):
else:
listener_result = await self._execute_method(listener_name, method)
self.event_emitter.send(
self,
event=MethodExecutionFinishedEvent(
type="method_execution_finished",
method_name=listener_name,
flow_name=self.__class__.__name__,
),
)
# Execute listeners (and possibly routers) of this listener
await self._execute_listeners(listener_name, listener_result)