Fix type-checker errors and linting issues

Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
Devin AI
2025-02-25 14:00:02 +00:00
parent e8d61d32db
commit c956588586
2 changed files with 10 additions and 3 deletions

View File

@@ -230,7 +230,10 @@ class LLM:
def supports_function_calling(self) -> bool: def supports_function_calling(self) -> bool:
try: try:
params = get_supported_openai_params(model=self.model) # Handle None model case
if self.model is None:
return False
params = get_supported_openai_params(model=str(self.model))
return "response_format" in params return "response_format" in params
except Exception as e: except Exception as e:
logging.error(f"Failed to get supported params: {str(e)}") logging.error(f"Failed to get supported params: {str(e)}")
@@ -238,7 +241,10 @@ class LLM:
def supports_stop_words(self) -> bool: def supports_stop_words(self) -> bool:
try: try:
params = get_supported_openai_params(model=self.model) # Handle None model case
if self.model is None:
return False
params = get_supported_openai_params(model=str(self.model))
return "stop" in params return "stop" in params
except Exception as e: except Exception as e:
logging.error(f"Failed to get supported params: {str(e)}") logging.error(f"Failed to get supported params: {str(e)}")

View File

@@ -1,6 +1,7 @@
from crewai.llm import LLM
import pytest import pytest
from crewai.llm import LLM
@pytest.mark.parametrize( @pytest.mark.parametrize(
"invalid_model,error_message", "invalid_model,error_message",