mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-27 17:18:13 +00:00
Update AISuiteLLM and LLM utility type handling
- Modify AISuiteLLM to support more flexible input types for messages - Update type hints in AISuiteLLM to allow string or list of message dictionaries - Enhance LLM utility function to support broader LLM type annotations - Remove default `self.stop` attribute from BaseLLM initialization
This commit is contained in:
@@ -31,7 +31,6 @@ class BaseLLM(ABC):
|
|||||||
All custom LLM implementations should call super().__init__() to ensure
|
All custom LLM implementations should call super().__init__() to ensure
|
||||||
that these default attributes are properly initialized.
|
that these default attributes are properly initialized.
|
||||||
"""
|
"""
|
||||||
self.stop = []
|
|
||||||
self.model = model
|
self.model = model
|
||||||
self.temperature = temperature
|
self.temperature = temperature
|
||||||
|
|
||||||
|
|||||||
8
src/crewai/llms/third_party/ai_suite.py
vendored
8
src/crewai/llms/third_party/ai_suite.py
vendored
@@ -1,4 +1,4 @@
|
|||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
import aisuite as ai
|
import aisuite as ai
|
||||||
|
|
||||||
@@ -12,11 +12,11 @@ class AISuiteLLM(BaseLLM):
|
|||||||
|
|
||||||
def call(
|
def call(
|
||||||
self,
|
self,
|
||||||
messages: List[Dict[str, str]],
|
messages: Union[str, List[Dict[str, str]]],
|
||||||
tools: Optional[List[dict]] = None,
|
tools: Optional[List[dict]] = None,
|
||||||
callbacks: Optional[List[Any]] = None,
|
callbacks: Optional[List[Any]] = None,
|
||||||
available_functions: Optional[Dict[str, Any]] = None,
|
available_functions: Optional[Dict[str, Any]] = None,
|
||||||
) -> str:
|
) -> Union[str, Any]:
|
||||||
completion_params = self._prepare_completion_params(messages)
|
completion_params = self._prepare_completion_params(messages)
|
||||||
# print(f"Completion params: {completion_params}")
|
# print(f"Completion params: {completion_params}")
|
||||||
response = self.client.chat.completions.create(**completion_params)
|
response = self.client.chat.completions.create(**completion_params)
|
||||||
@@ -26,7 +26,7 @@ class AISuiteLLM(BaseLLM):
|
|||||||
return response.choices[0].message.content
|
return response.choices[0].message.content
|
||||||
|
|
||||||
def _prepare_completion_params(
|
def _prepare_completion_params(
|
||||||
self, messages: List[Dict[str, str]]
|
self, messages: Union[str, List[Dict[str, str]]]
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
print(f"Preparing completion params for {self.model}")
|
print(f"Preparing completion params for {self.model}")
|
||||||
# print(f"Messages: {messages}")
|
# print(f"Messages: {messages}")
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ from crewai.llm import LLM, BaseLLM
|
|||||||
|
|
||||||
|
|
||||||
def create_llm(
|
def create_llm(
|
||||||
llm_value: Union[str, BaseLLM, Any, None] = None,
|
llm_value: Union[str, BaseLLM, LLM, Any, None] = None,
|
||||||
) -> Optional[BaseLLM]:
|
) -> Optional[BaseLLM | LLM]:
|
||||||
"""
|
"""
|
||||||
Creates or returns an LLM instance based on the given llm_value.
|
Creates or returns an LLM instance based on the given llm_value.
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user