mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-04-17 16:32:36 +00:00
Fix Azure OpenAI authentication for models without azure/ prefix (fixes #2358)
Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
@@ -829,10 +829,18 @@ class LLM:
|
||||
Derives the custom_llm_provider from the model string.
|
||||
- For example, if the model is "openrouter/deepseek/deepseek-chat", returns "openrouter".
|
||||
- If the model is "gemini/gemini-1.5-pro", returns "gemini".
|
||||
- If there is no '/', defaults to "openai".
|
||||
- If the model is "azure/gpt-4", returns "azure".
|
||||
- If Azure-specific parameters are provided (api_key, api_base, api_version), returns "azure".
|
||||
- If there is no '/' and no Azure parameters, returns None.
|
||||
"""
|
||||
# Check if model explicitly has a provider prefix
|
||||
if "/" in self.model:
|
||||
return self.model.split("/")[0]
|
||||
|
||||
# Check if all Azure parameters are present
|
||||
if self.api_key and self.api_base and self.api_version:
|
||||
return "azure"
|
||||
|
||||
return None
|
||||
|
||||
def _validate_call_params(self) -> None:
|
||||
@@ -842,7 +850,9 @@ class LLM:
|
||||
The custom_llm_provider is dynamically determined from the model:
|
||||
- E.g., "openrouter/deepseek/deepseek-chat" yields "openrouter"
|
||||
- "gemini/gemini-1.5-pro" yields "gemini"
|
||||
- If no slash is present, "openai" is assumed.
|
||||
- "azure/gpt-4" yields "azure"
|
||||
- If Azure parameters (api_key, api_base, api_version) are present, "azure" is used
|
||||
- If no slash is present and no Azure parameters, None is returned
|
||||
"""
|
||||
provider = self._get_custom_llm_provider()
|
||||
if self.response_format is not None and not supports_response_schema(
|
||||
|
||||
37
test_azure_integration.py
Normal file
37
test_azure_integration.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# test_azure_integration.py
|
||||
from src.crewai.llm import LLM
|
||||
|
||||
# Test with Azure parameters but without azure/ prefix
|
||||
llm = LLM(
|
||||
api_key='test_key',
|
||||
api_base='test_base',
|
||||
model='gpt-4o-mini-2024-07-18',
|
||||
api_version='test_version'
|
||||
)
|
||||
|
||||
# Print the detected provider
|
||||
provider = llm._get_custom_llm_provider()
|
||||
print(f"Detected provider: {provider}")
|
||||
print(f"Is Azure detected correctly: {provider == 'azure'}")
|
||||
|
||||
# Prepare parameters that would be passed to LiteLLM
|
||||
params = llm._prepare_completion_params(messages=[{"role": "user", "content": "test"}])
|
||||
print(f"Parameters passed to LiteLLM: {params}")
|
||||
|
||||
# Test with Azure parameters and with azure/ prefix for comparison
|
||||
llm_with_prefix = LLM(
|
||||
api_key='test_key',
|
||||
api_base='test_base',
|
||||
model='azure/gpt-4o-mini-2024-07-18',
|
||||
api_version='test_version'
|
||||
)
|
||||
|
||||
# Print the detected provider
|
||||
provider_with_prefix = llm_with_prefix._get_custom_llm_provider()
|
||||
print(f"\nWith azure/ prefix:")
|
||||
print(f"Detected provider: {provider_with_prefix}")
|
||||
print(f"Is Azure detected correctly: {provider_with_prefix == 'azure'}")
|
||||
|
||||
# Prepare parameters that would be passed to LiteLLM
|
||||
params_with_prefix = llm_with_prefix._prepare_completion_params(messages=[{"role": "user", "content": "test"}])
|
||||
print(f"Parameters passed to LiteLLM: {params_with_prefix}")
|
||||
@@ -221,6 +221,20 @@ def test_get_custom_llm_provider_openai():
|
||||
llm = LLM(model="gpt-4")
|
||||
assert llm._get_custom_llm_provider() == None
|
||||
|
||||
def test_get_custom_llm_provider_azure_with_prefix():
|
||||
llm = LLM(model="azure/gpt-4")
|
||||
assert llm._get_custom_llm_provider() == "azure"
|
||||
|
||||
|
||||
def test_get_custom_llm_provider_azure_without_prefix():
|
||||
llm = LLM(
|
||||
model="gpt-4",
|
||||
api_key="test_key",
|
||||
api_base="test_base",
|
||||
api_version="test_version"
|
||||
)
|
||||
assert llm._get_custom_llm_provider() == "azure"
|
||||
|
||||
|
||||
def test_validate_call_params_supported():
|
||||
class DummyResponse(BaseModel):
|
||||
|
||||
Reference in New Issue
Block a user