Update LLM imports and type hints across multiple files

- Modify imports in crew_chat.py to use LLM instead of BaseLLM
- Update type hints in llm_utils.py to use LLM type
- Add optional `stop` parameter to BaseLLM initialization
- Refactor type handling for LLM creation and usage
This commit is contained in:
Lorenze Jay
2025-03-11 16:01:10 -07:00
parent a40abbf490
commit 7cb3c8bb4b
3 changed files with 14 additions and 7 deletions

View File

@@ -14,7 +14,7 @@ from packaging import version
from crewai.cli.utils import read_toml from crewai.cli.utils import read_toml
from crewai.cli.version import get_crewai_version from crewai.cli.version import get_crewai_version
from crewai.crew import Crew from crewai.crew import Crew
from crewai.llm import BaseLLM from crewai.llm import LLM
from crewai.types.crew_chat import ChatInputField, ChatInputs from crewai.types.crew_chat import ChatInputField, ChatInputs
from crewai.utilities.llm_utils import create_llm from crewai.utilities.llm_utils import create_llm
@@ -116,7 +116,7 @@ def show_loading(event: threading.Event):
print() print()
def initialize_chat_llm(crew: Crew) -> Optional[BaseLLM]: def initialize_chat_llm(crew: Crew) -> Optional[LLM]:
"""Initializes the chat LLM and handles exceptions.""" """Initializes the chat LLM and handles exceptions."""
try: try:
return create_llm(crew.chat_llm) return create_llm(crew.chat_llm)
@@ -220,7 +220,7 @@ def get_user_input() -> str:
def handle_user_input( def handle_user_input(
user_input: str, user_input: str,
chat_llm: BaseLLM, chat_llm: LLM,
messages: List[Dict[str, str]], messages: List[Dict[str, str]],
crew_tool_schema: Dict[str, Any], crew_tool_schema: Dict[str, Any],
available_functions: Dict[str, Any], available_functions: Dict[str, Any],

View File

@@ -21,8 +21,14 @@ class BaseLLM(ABC):
model: str model: str
temperature: Optional[float] = None temperature: Optional[float] = None
stop: Optional[Union[str, List[str]]] = None
def __init__(self, model: str, temperature: Optional[float] = None): def __init__(
self,
model: str,
temperature: Optional[float] = None,
stop: Optional[Union[str, List[str]]] = None,
):
"""Initialize the BaseLLM with default attributes. """Initialize the BaseLLM with default attributes.
This constructor sets default values for attributes that are expected This constructor sets default values for attributes that are expected
@@ -33,6 +39,7 @@ class BaseLLM(ABC):
""" """
self.model = model self.model = model
self.temperature = temperature self.temperature = temperature
self.stop = stop
@abstractmethod @abstractmethod
def call( def call(

View File

@@ -6,8 +6,8 @@ from crewai.llm import LLM, BaseLLM
def create_llm( def create_llm(
llm_value: Union[str, BaseLLM, LLM, Any, None] = None, llm_value: Union[str, LLM, Any, None] = None,
) -> Optional[BaseLLM | LLM]: ) -> Optional[LLM]:
""" """
Creates or returns an LLM instance based on the given llm_value. Creates or returns an LLM instance based on the given llm_value.
@@ -23,7 +23,7 @@ def create_llm(
""" """
# 1) If llm_value is already a BaseLLM object, return it directly # 1) If llm_value is already a BaseLLM object, return it directly
if isinstance(llm_value, BaseLLM): if isinstance(llm_value, LLM):
return llm_value return llm_value
# 2) If llm_value is a string (model name) # 2) If llm_value is a string (model name)