From 0173a3ceaf4e2c9ed7522f40fffb99ec5cf26132 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Wed, 30 Apr 2025 12:07:57 +0000 Subject: [PATCH] Fix segmentation fault in Python 3.11 tests by improving provider comparison and replacing logging with print Co-Authored-By: Joe Moura --- src/crewai/llm.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/crewai/llm.py b/src/crewai/llm.py index d14f6710c..949871f60 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -1031,25 +1031,23 @@ class LLM(BaseLLM): return provider = self._get_custom_llm_provider() - provider_lower = provider.lower() if provider else "" # Check if we're bypassing validation for OpenRouter - is_openrouter_bypass = ( - provider_lower == OPENROUTER_PROVIDER.lower() and self.force_structured_output - ) + is_openrouter = provider and provider.lower() == OPENROUTER_PROVIDER.lower() + is_openrouter_bypass = is_openrouter and self.force_structured_output - if is_openrouter_bypass: - logging.warning( - f"Forcing structured output for OpenRouter model {self.model}. " - "Please ensure the model supports the expected response format." - ) - # Check if the model supports response schema is_schema_supported = supports_response_schema( model=self.model, custom_llm_provider=provider, ) + if is_openrouter_bypass: + print( + f"Warning: Forcing structured output for OpenRouter model {self.model}. " + "Please ensure the model supports the expected response format." + ) + if not (is_schema_supported or is_openrouter_bypass): raise ValueError( f"The model {self.model} does not support response_format for provider '{provider}'. "