diff --git a/src/crewai/llm.py b/src/crewai/llm.py index 44dbe5186..085a0abdc 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -151,32 +151,6 @@ class LLM: self.set_callbacks(callbacks) self.set_env_callbacks() - def to_dict(self) -> dict: - """ - Return a dict of all relevant parameters for serialization. - """ - return { - "model": self.model, - "timeout": self.timeout, - "temperature": self.temperature, - "top_p": self.top_p, - "n": self.n, - "stop": self.stop, - "max_completion_tokens": self.max_completion_tokens, - "max_tokens": self.max_tokens, - "presence_penalty": self.presence_penalty, - "frequency_penalty": self.frequency_penalty, - "logit_bias": self.logit_bias, - "response_format": self.response_format, - "seed": self.seed, - "logprobs": self.logprobs, - "top_logprobs": self.top_logprobs, - "base_url": self.base_url, - "api_version": self.api_version, - "api_key": self.api_key, - "callbacks": self.callbacks, - } - def call( self, messages: List[Dict[str, str]], diff --git a/src/crewai/utilities/llm_utils.py b/src/crewai/utilities/llm_utils.py index f2d09a667..d26d8fde9 100644 --- a/src/crewai/utilities/llm_utils.py +++ b/src/crewai/utilities/llm_utils.py @@ -74,35 +74,6 @@ def create_llm( return None -def create_chat_llm() -> Optional[LLM]: - """ - Creates a Chat LLM with additional checks, such as verifying crewAI version - or reading from pyproject.toml. Then calls `create_llm(None, default_model)`. - - Args: - default_model (str): Fallback model if not set in environment. - - Returns: - An instance of LLM or None if instantiation fails. - """ - print("[create_chat_llm] Checking environment and version info...") - - crewai_version = get_crewai_version() - min_required_version = "0.87.0" # Update to latest if needed - - pyproject_data = read_toml() - if pyproject_data.get("tool", {}).get("poetry") and ( - version.parse(crewai_version) < version.parse(min_required_version) - ): - print( - f"You are running an older version of crewAI ({crewai_version}) that uses poetry.\n" - "Please run `crewai update` to switch to uv-based builds." - ) - - # After checks, simply call create_llm with None (meaning "use env or fallback"): - return create_llm(None) - - def _llm_via_environment_or_fallback() -> Optional[LLM]: """ Helper function: if llm_value is None, we load environment variables or fallback default model.