Compare commits

...

3 Commits

Author SHA1 Message Date
Devin AI
81c8dd4e1c Fix import sorting in integration tests
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-03-13 08:48:50 +00:00
Devin AI
997462db6d Add Azure credential validation and error handling
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-03-13 08:46:58 +00:00
Devin AI
1145900b39 Fix Azure OpenAI authentication for models without azure/ prefix (fixes #2358)
Co-Authored-By: Joe Moura <joao@crewai.com>
2025-03-13 08:41:04 +00:00
4 changed files with 175 additions and 4 deletions

View File

@@ -829,22 +829,57 @@ class LLM:
Derives the custom_llm_provider from the model string.
- For example, if the model is "openrouter/deepseek/deepseek-chat", returns "openrouter".
- If the model is "gemini/gemini-1.5-pro", returns "gemini".
- If there is no '/', defaults to "openai".
- If the model is "azure/gpt-4", returns "azure".
- If Azure-specific parameters are provided (api_key, api_base, api_version), returns "azure".
- If there is no '/' and no Azure parameters, returns None.
"""
# Check if model explicitly has a provider prefix
if "/" in self.model:
return self.model.split("/")[0]
# Check if all Azure parameters are present
if self.api_key and self.api_base and self.api_version:
return "azure"
return None
def _validate_azure_credentials(self) -> bool:
"""
Validates that all required Azure credentials are present and of the correct type.
Returns:
bool: True if all Azure credentials are valid, False otherwise.
"""
return bool(
self.api_key
and self.api_base
and self.api_version
and all(isinstance(x, str) for x in [self.api_key, self.api_base, self.api_version])
)
def _validate_call_params(self) -> None:
"""
Validate parameters before making a call. Currently this only checks if
a response_format is provided and whether the model supports it.
Validate parameters before making a call.
The custom_llm_provider is dynamically determined from the model:
- E.g., "openrouter/deepseek/deepseek-chat" yields "openrouter"
- "gemini/gemini-1.5-pro" yields "gemini"
- If no slash is present, "openai" is assumed.
- "azure/gpt-4" yields "azure"
- If Azure parameters (api_key, api_base, api_version) are present, "azure" is used
- If no slash is present and no Azure parameters, None is returned
Raises:
ValueError: If response_format is not supported by the model or if Azure credentials are incomplete.
"""
provider = self._get_custom_llm_provider()
# Validate Azure credentials if provider is Azure
if provider == "azure" and not self._validate_azure_credentials():
raise ValueError(
"Incomplete Azure credentials. Please provide api_key, api_base, and api_version as strings."
)
# Validate response_format
if self.response_format is not None and not supports_response_schema(
model=self.model,
custom_llm_provider=provider,

37
test_azure_integration.py Normal file
View File

@@ -0,0 +1,37 @@
# test_azure_integration.py
from src.crewai.llm import LLM
# Test with Azure parameters but without azure/ prefix
llm = LLM(
api_key='test_key',
api_base='test_base',
model='gpt-4o-mini-2024-07-18',
api_version='test_version'
)
# Print the detected provider
provider = llm._get_custom_llm_provider()
print(f"Detected provider: {provider}")
print(f"Is Azure detected correctly: {provider == 'azure'}")
# Prepare parameters that would be passed to LiteLLM
params = llm._prepare_completion_params(messages=[{"role": "user", "content": "test"}])
print(f"Parameters passed to LiteLLM: {params}")
# Test with Azure parameters and with azure/ prefix for comparison
llm_with_prefix = LLM(
api_key='test_key',
api_base='test_base',
model='azure/gpt-4o-mini-2024-07-18',
api_version='test_version'
)
# Print the detected provider
provider_with_prefix = llm_with_prefix._get_custom_llm_provider()
print(f"\nWith azure/ prefix:")
print(f"Detected provider: {provider_with_prefix}")
print(f"Is Azure detected correctly: {provider_with_prefix == 'azure'}")
# Prepare parameters that would be passed to LiteLLM
params_with_prefix = llm_with_prefix._prepare_completion_params(messages=[{"role": "user", "content": "test"}])
print(f"Parameters passed to LiteLLM: {params_with_prefix}")

View File

@@ -0,0 +1,40 @@
import pytest
from src.crewai.llm import LLM
def test_azure_detection_with_credentials():
"""Test that Azure is detected correctly when credentials are provided but model lacks azure/ prefix."""
# Create LLM instance with Azure parameters but without azure/ prefix
llm = LLM(
api_key='test_key',
api_base='test_base',
model='gpt-4o-mini-2024-07-18', # Model from issue #2358
api_version='test_version'
)
# Check if provider is detected correctly
provider = llm._get_custom_llm_provider()
assert provider == 'azure', "Azure provider should be detected based on credentials"
# Prepare parameters that would be passed to LiteLLM
params = llm._prepare_completion_params(messages=[{"role": "user", "content": "test"}])
assert params.get('api_key') == 'test_key', "API key should be included in params"
assert params.get('api_base') == 'test_base', "API base should be included in params"
assert params.get('api_version') == 'test_version', "API version should be included in params"
def test_azure_validation_error():
"""Test that validation error is raised when Azure credentials are incomplete."""
# Create LLM instance with incomplete Azure parameters
llm = LLM(
model='azure/gpt-4',
api_key='test_key',
# Missing api_base and api_version
)
# Validation should fail
with pytest.raises(ValueError) as excinfo:
llm._validate_call_params()
assert "Incomplete Azure credentials" in str(excinfo.value)

View File

@@ -221,6 +221,20 @@ def test_get_custom_llm_provider_openai():
llm = LLM(model="gpt-4")
assert llm._get_custom_llm_provider() == None
def test_get_custom_llm_provider_azure_with_prefix():
llm = LLM(model="azure/gpt-4")
assert llm._get_custom_llm_provider() == "azure"
def test_get_custom_llm_provider_azure_without_prefix():
llm = LLM(
model="gpt-4",
api_key="test_key",
api_base="test_base",
api_version="test_version"
)
assert llm._get_custom_llm_provider() == "azure"
def test_validate_call_params_supported():
class DummyResponse(BaseModel):
@@ -253,6 +267,51 @@ def test_validate_call_params_no_response_format():
llm._validate_call_params()
def test_validate_azure_credentials_valid():
llm = LLM(
model="gpt-4",
api_key="test_key",
api_base="test_base",
api_version="test_version"
)
assert llm._validate_azure_credentials() == True
def test_validate_azure_credentials_invalid():
# Missing api_version
llm = LLM(
model="gpt-4",
api_key="test_key",
api_base="test_base"
)
assert llm._validate_azure_credentials() == False
# Non-string value
llm = LLM(
model="gpt-4",
api_key="test_key",
api_base="test_base",
api_version=123 # Not a string
)
assert llm._validate_azure_credentials() == False
def test_validate_call_params_azure_invalid():
# Test with incomplete Azure credentials
llm = LLM(
model="azure/gpt-4",
api_key="test_key",
# Missing api_base and api_version
)
# Should raise ValueError due to incomplete credentials
with pytest.raises(ValueError) as excinfo:
llm._validate_call_params()
# Check error message
assert "Incomplete Azure credentials" in str(excinfo.value)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_o3_mini_reasoning_effort_high():
llm = LLM(