chore: modernize LLM interface typing and add constants (#3483)

* chore: update LLM interfaces to Python 3.10+ typing

* fix: add missing stop attribute to mock LLM and improve test infrastructure

* fix: correct type ignore comment for aisuite import
This commit is contained in:
Greyson LaLonde
2025-09-10 08:30:49 -04:00
committed by GitHub
parent 6676d94ba1
commit 83682d511f
5 changed files with 126 additions and 58 deletions

View File

@@ -1,5 +1,14 @@
"""Base LLM abstract class for CrewAI.
This module provides the abstract base class for all LLM implementations
in CrewAI.
"""
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Union
from typing import Any, Final
DEFAULT_CONTEXT_WINDOW_SIZE: Final[int] = 4096
DEFAULT_SUPPORTS_STOP_WORDS: Final[bool] = True
class BaseLLM(ABC):
@@ -15,41 +24,38 @@ class BaseLLM(ABC):
messages when things go wrong.
Attributes:
stop (list): A list of stop sequences that the LLM should use to stop generation.
This is used by the CrewAgentExecutor and other components.
model: The model identifier/name.
temperature: Optional temperature setting for response generation.
stop: A list of stop sequences that the LLM should use to stop generation.
"""
model: str
temperature: Optional[float] = None
stop: Optional[List[str]] = None
def __init__(
self,
model: str,
temperature: Optional[float] = None,
):
temperature: float | None = None,
stop: list[str] | None = None,
) -> None:
"""Initialize the BaseLLM with default attributes.
This constructor sets default values for attributes that are expected
by the CrewAgentExecutor and other components.
All custom LLM implementations should call super().__init__() to ensure
that these default attributes are properly initialized.
Args:
model: The model identifier/name.
temperature: Optional temperature setting for response generation.
stop: Optional list of stop sequences for generation.
"""
self.model = model
self.temperature = temperature
self.stop = []
self.stop: list[str] = stop or []
@abstractmethod
def call(
self,
messages: Union[str, List[Dict[str, str]]],
tools: Optional[List[dict]] = None,
callbacks: Optional[List[Any]] = None,
available_functions: Optional[Dict[str, Any]] = None,
from_task: Optional[Any] = None,
from_agent: Optional[Any] = None,
) -> Union[str, Any]:
messages: str | list[dict[str, str]],
tools: list[dict] | None = None,
callbacks: list[Any] | None = None,
available_functions: dict[str, Any] | None = None,
from_task: Any | None = None,
from_agent: Any | None = None,
) -> str | Any:
"""Call the LLM with the given messages.
Args:
@@ -64,6 +70,7 @@ class BaseLLM(ABC):
available_functions: Optional dict mapping function names to callables
that can be invoked by the LLM.
from_task: Optional task caller to be used for the LLM call.
from_agent: Optional agent caller to be used for the LLM call.
Returns:
Either a text response from the LLM (str) or
@@ -74,21 +81,20 @@ class BaseLLM(ABC):
TimeoutError: If the LLM request times out.
RuntimeError: If the LLM request fails for other reasons.
"""
pass
def supports_stop_words(self) -> bool:
"""Check if the LLM supports stop words.
Returns:
bool: True if the LLM supports stop words, False otherwise.
True if the LLM supports stop words, False otherwise.
"""
return True # Default implementation assumes support for stop words
return DEFAULT_SUPPORTS_STOP_WORDS
def get_context_window_size(self) -> int:
"""Get the context window size for the LLM.
Returns:
int: The number of tokens/characters the model can handle.
The number of tokens/characters the model can handle.
"""
# Default implementation - subclasses should override with model-specific values
return 4096
return DEFAULT_CONTEXT_WINDOW_SIZE

View File

@@ -1,24 +1,62 @@
from typing import Any, Dict, List, Optional, Union
"""AI Suite LLM integration for CrewAI.
import aisuite as ai
This module provides integration with AI Suite for LLM capabilities.
"""
from typing import Any
import aisuite as ai # type: ignore
from crewai.llms.base_llm import BaseLLM
class AISuiteLLM(BaseLLM):
def __init__(self, model: str, temperature: Optional[float] = None, **kwargs):
super().__init__(model, temperature, **kwargs)
"""AI Suite LLM implementation.
This class provides integration with AI Suite models through the BaseLLM interface.
"""
def __init__(
self,
model: str,
temperature: float | None = None,
stop: list[str] | None = None,
**kwargs: Any,
) -> None:
"""Initialize the AI Suite LLM.
Args:
model: The model identifier for AI Suite.
temperature: Optional temperature setting for response generation.
stop: Optional list of stop sequences for generation.
**kwargs: Additional keyword arguments passed to the AI Suite client.
"""
super().__init__(model, temperature, stop)
self.client = ai.Client()
self.kwargs = kwargs
def call(
self,
messages: Union[str, List[Dict[str, str]]],
tools: Optional[List[dict]] = None,
callbacks: Optional[List[Any]] = None,
available_functions: Optional[Dict[str, Any]] = None,
from_task: Optional[Any] = None,
from_agent: Optional[Any] = None,
) -> Union[str, Any]:
messages: str | list[dict[str, str]],
tools: list[dict] | None = None,
callbacks: list[Any] | None = None,
available_functions: dict[str, Any] | None = None,
from_task: Any | None = None,
from_agent: Any | None = None,
) -> str | Any:
"""Call the AI Suite LLM with the given messages.
Args:
messages: Input messages for the LLM.
tools: Optional list of tool schemas for function calling.
callbacks: Optional list of callback functions.
available_functions: Optional dict mapping function names to callables.
from_task: Optional task caller.
from_agent: Optional agent caller.
Returns:
The text response from the LLM.
"""
completion_params = self._prepare_completion_params(messages, tools)
response = self.client.chat.completions.create(**completion_params)
@@ -26,15 +64,35 @@ class AISuiteLLM(BaseLLM):
def _prepare_completion_params(
self,
messages: Union[str, List[Dict[str, str]]],
tools: Optional[List[dict]] = None,
) -> Dict[str, Any]:
return {
messages: str | list[dict[str, str]],
tools: list[dict] | None = None,
) -> dict[str, Any]:
"""Prepare parameters for the AI Suite completion call.
Args:
messages: Input messages for the LLM.
tools: Optional list of tool schemas.
Returns:
Dictionary of parameters for the completion API.
"""
params: dict[str, Any] = {
"model": self.model,
"messages": messages,
"temperature": self.temperature,
"tools": tools,
**self.kwargs,
}
if self.stop:
params["stop"] = self.stop
return params
def supports_function_calling(self) -> bool:
"""Check if the LLM supports function calling.
Returns:
False, as AI Suite does not currently support function calling.
"""
return False