feat: add capability to track LLM calls by task and agent (#3087)

* feat: add capability to track LLM calls by task and agent

This makes it possible to filter or scope LLM events by specific agents or tasks, which can be very useful for debugging or analytics in real-time application

* feat: add docs about LLM tracking by Agents and Tasks

* fix incompatible BaseLLM.call method signature

* feat: support to filter LLM Events from Lite Agent
This commit is contained in:
Lucas Gomide
2025-07-01 10:30:16 -03:00
committed by GitHub
parent af9c01f5d3
commit b7bf15681e
14 changed files with 788 additions and 44 deletions

View File

@@ -1,5 +1,5 @@
from abc import ABC, abstractmethod
from typing import Any, Callable, Dict, List, Optional, Union
from typing import Any, Dict, List, Optional, Union
class BaseLLM(ABC):
@@ -47,6 +47,8 @@ class BaseLLM(ABC):
tools: Optional[List[dict]] = None,
callbacks: Optional[List[Any]] = None,
available_functions: Optional[Dict[str, Any]] = None,
from_task: Optional[Any] = None,
from_agent: Optional[Any] = None,
) -> Union[str, Any]:
"""Call the LLM with the given messages.
@@ -61,6 +63,7 @@ class BaseLLM(ABC):
during and after the LLM call.
available_functions: Optional dict mapping function names to callables
that can be invoked by the LLM.
from_task: Optional task caller to be used for the LLM call.
Returns:
Either a text response from the LLM (str) or

View File

@@ -16,6 +16,8 @@ class AISuiteLLM(BaseLLM):
tools: Optional[List[dict]] = None,
callbacks: Optional[List[Any]] = None,
available_functions: Optional[Dict[str, Any]] = None,
from_task: Optional[Any] = None,
from_agent: Optional[Any] = None,
) -> Union[str, Any]:
completion_params = self._prepare_completion_params(messages, tools)
response = self.client.chat.completions.create(**completion_params)