Add AISuite LLM support and update dependencies

- Integrate AISuite as a new third-party LLM option
- Update pyproject.toml and uv.lock to include aisuite package
- Modify BaseLLM to support more flexible initialization
- Remove unnecessary LLM imports across multiple files
- Implement AISuiteLLM with basic chat completion functionality
This commit is contained in:
Lorenze Jay
2025-03-11 15:48:49 -07:00
parent 0cece5fd59
commit 25c64ae86d
8 changed files with 95 additions and 9 deletions

View File

@@ -64,6 +64,9 @@ mem0 = ["mem0ai>=0.1.29"]
docling = [ docling = [
"docling>=2.12.0", "docling>=2.12.0",
] ]
aisuite = [
"aisuite>=0.1.10",
]
[tool.uv] [tool.uv]
dev-dependencies = [ dev-dependencies = [

View File

@@ -11,7 +11,7 @@ from crewai.agents.crew_agent_executor import CrewAgentExecutor
from crewai.knowledge.knowledge import Knowledge from crewai.knowledge.knowledge import Knowledge
from crewai.knowledge.source.base_knowledge_source import BaseKnowledgeSource from crewai.knowledge.source.base_knowledge_source import BaseKnowledgeSource
from crewai.knowledge.utils.knowledge_utils import extract_knowledge_context from crewai.knowledge.utils.knowledge_utils import extract_knowledge_context
from crewai.llm import LLM, BaseLLM from crewai.llm import BaseLLM
from crewai.memory.contextual.contextual_memory import ContextualMemory from crewai.memory.contextual.contextual_memory import ContextualMemory
from crewai.task import Task from crewai.task import Task
from crewai.tools import BaseTool from crewai.tools import BaseTool
@@ -117,7 +117,9 @@ class Agent(BaseAgent):
self.agent_ops_agent_name = self.role self.agent_ops_agent_name = self.role
self.llm = create_llm(self.llm) self.llm = create_llm(self.llm)
if self.function_calling_llm and not isinstance(self.function_calling_llm, BaseLLM): if self.function_calling_llm and not isinstance(
self.function_calling_llm, BaseLLM
):
self.function_calling_llm = create_llm(self.function_calling_llm) self.function_calling_llm = create_llm(self.function_calling_llm)
if not self.agent_executor: if not self.agent_executor:

View File

@@ -13,7 +13,7 @@ from crewai.agents.parser import (
OutputParserException, OutputParserException,
) )
from crewai.agents.tools_handler import ToolsHandler from crewai.agents.tools_handler import ToolsHandler
from crewai.llm import LLM from crewai.llm import BaseLLM
from crewai.tools.base_tool import BaseTool from crewai.tools.base_tool import BaseTool
from crewai.tools.tool_usage import ToolUsage, ToolUsageErrorException from crewai.tools.tool_usage import ToolUsage, ToolUsageErrorException
from crewai.utilities import I18N, Printer from crewai.utilities import I18N, Printer
@@ -61,7 +61,7 @@ class CrewAgentExecutor(CrewAgentExecutorMixin):
callbacks: List[Any] = [], callbacks: List[Any] = [],
): ):
self._i18n: I18N = I18N() self._i18n: I18N = I18N()
self.llm: LLM = llm self.llm: BaseLLM = llm
self.task = task self.task = task
self.agent = agent self.agent = agent
self.crew = crew self.crew = crew

View File

@@ -14,7 +14,7 @@ from packaging import version
from crewai.cli.utils import read_toml from crewai.cli.utils import read_toml
from crewai.cli.version import get_crewai_version from crewai.cli.version import get_crewai_version
from crewai.crew import Crew from crewai.crew import Crew
from crewai.llm import LLM, BaseLLM from crewai.llm import BaseLLM
from crewai.types.crew_chat import ChatInputField, ChatInputs from crewai.types.crew_chat import ChatInputField, ChatInputs
from crewai.utilities.llm_utils import create_llm from crewai.utilities.llm_utils import create_llm

View File

@@ -19,7 +19,10 @@ class BaseLLM(ABC):
This is used by the CrewAgentExecutor and other components. This is used by the CrewAgentExecutor and other components.
""" """
def __init__(self): model: str
temperature: Optional[float] = None
def __init__(self, model: str, temperature: Optional[float] = None):
"""Initialize the BaseLLM with default attributes. """Initialize the BaseLLM with default attributes.
This constructor sets default values for attributes that are expected This constructor sets default values for attributes that are expected
@@ -29,6 +32,8 @@ class BaseLLM(ABC):
that these default attributes are properly initialized. that these default attributes are properly initialized.
""" """
self.stop = [] self.stop = []
self.model = model
self.temperature = temperature
@abstractmethod @abstractmethod
def call( def call(
@@ -102,3 +107,13 @@ class BaseLLM(ABC):
The context window size as an integer. The context window size as an integer.
""" """
pass pass
@abstractmethod
def set_callbacks(self, callbacks: List[Any]) -> None:
"""Set callback functions for the LLM.
Args:
callbacks: List of callback functions to be executed during
and after LLM calls.
"""
pass

50
src/crewai/llms/third_party/ai_suite.py vendored Normal file
View File

@@ -0,0 +1,50 @@
from typing import Any, Dict, List, Optional
import aisuite as ai
from crewai.llms.base_llm import BaseLLM
class AISuiteLLM(BaseLLM):
def __init__(self, model: str, temperature: Optional[float] = None, **kwargs):
super().__init__(model, temperature, **kwargs)
self.client = ai.Client()
def call(
self,
messages: List[Dict[str, str]],
tools: Optional[List[dict]] = None,
callbacks: Optional[List[Any]] = None,
available_functions: Optional[Dict[str, Any]] = None,
) -> str:
completion_params = self._prepare_completion_params(messages)
# print(f"Completion params: {completion_params}")
response = self.client.chat.completions.create(**completion_params)
print(f"Response: {response}")
tool_calls = getattr(response.choices[0].message, "tool_calls", [])
print(f"Tool calls: {tool_calls}")
return response.choices[0].message.content
def _prepare_completion_params(
self, messages: List[Dict[str, str]]
) -> Dict[str, Any]:
print(f"Preparing completion params for {self.model}")
# print(f"Messages: {messages}")
print(f"Temperature: {self.temperature}")
return {
"model": self.model,
"messages": messages,
"temperature": self.temperature,
}
def supports_function_calling(self) -> bool:
return False
def supports_stop_words(self) -> bool:
return False
def get_context_window_size(self):
pass
def set_callbacks(self, callbacks: List[Any]) -> None:
pass

View File

@@ -6,7 +6,7 @@ from rich.console import Console
from rich.table import Table from rich.table import Table
from crewai.agent import Agent from crewai.agent import Agent
from crewai.llm import LLM from crewai.llm import BaseLLM
from crewai.task import Task from crewai.task import Task
from crewai.tasks.task_output import TaskOutput from crewai.tasks.task_output import TaskOutput
from crewai.telemetry import Telemetry from crewai.telemetry import Telemetry
@@ -24,7 +24,7 @@ class CrewEvaluator:
Attributes: Attributes:
crew (Crew): The crew of agents to evaluate. crew (Crew): The crew of agents to evaluate.
eval_llm (LLM): Language model instance to use for evaluations eval_llm (BaseLLM): Language model instance to use for evaluations
tasks_scores (defaultdict): A dictionary to store the scores of the agents for each task. tasks_scores (defaultdict): A dictionary to store the scores of the agents for each task.
iteration (int): The current iteration of the evaluation. iteration (int): The current iteration of the evaluation.
""" """
@@ -33,7 +33,7 @@ class CrewEvaluator:
run_execution_times: defaultdict = defaultdict(list) run_execution_times: defaultdict = defaultdict(list)
iteration: int = 0 iteration: int = 0
def __init__(self, crew, eval_llm: InstanceOf[LLM]): def __init__(self, crew, eval_llm: InstanceOf[BaseLLM]):
self.crew = crew self.crew = crew
self.llm = eval_llm self.llm = eval_llm
self._telemetry = Telemetry() self._telemetry = Telemetry()

16
uv.lock generated
View File

@@ -139,6 +139,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/ac/a7305707cb852b7e16ff80eaf5692309bde30e2b1100a1fcacdc8f731d97/aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17", size = 7617 }, { url = "https://files.pythonhosted.org/packages/76/ac/a7305707cb852b7e16ff80eaf5692309bde30e2b1100a1fcacdc8f731d97/aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17", size = 7617 },
] ]
[[package]]
name = "aisuite"
version = "0.1.10"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6a/9d/c7a8a76abb9011dd2bc9a5cb8ffa8231640e20bbdae177ce9ab6cb67c66c/aisuite-0.1.10.tar.gz", hash = "sha256:170e62d4c91fecb22e82a04e058154a111cef473681171e5df7346272e77f414", size = 29052 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/58/c2/9a34a01516de107e5f9406dbfd319b6004340708101d67fa107373da4058/aisuite-0.1.10-py3-none-any.whl", hash = "sha256:c8510ebe38d6546b6a06819171e201fcaf0bf9ae020ffcfe19b6bd90430781ad", size = 43984 },
]
[[package]] [[package]]
name = "alembic" name = "alembic"
version = "1.13.3" version = "1.13.3"
@@ -651,6 +663,9 @@ dependencies = [
agentops = [ agentops = [
{ name = "agentops" }, { name = "agentops" },
] ]
aisuite = [
{ name = "aisuite" },
]
docling = [ docling = [
{ name = "docling" }, { name = "docling" },
] ]
@@ -698,6 +713,7 @@ dev = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "agentops", marker = "extra == 'agentops'", specifier = ">=0.3.0" }, { name = "agentops", marker = "extra == 'agentops'", specifier = ">=0.3.0" },
{ name = "aisuite", marker = "extra == 'aisuite'", specifier = ">=0.1.10" },
{ name = "appdirs", specifier = ">=1.4.4" }, { name = "appdirs", specifier = ">=1.4.4" },
{ name = "auth0-python", specifier = ">=4.7.1" }, { name = "auth0-python", specifier = ">=4.7.1" },
{ name = "blinker", specifier = ">=1.9.0" }, { name = "blinker", specifier = ">=1.9.0" },