mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-27 09:08:14 +00:00
Fix issue #3609: Add URL validation for Ollama connections
- Add _validate_base_url method to LLM class to validate Ollama URLs - Integrate URL validation into _validate_call_params for Ollama models - Validate IP address format and reject invalid IPs like 192.168.0.300 - Provide clear, helpful error messages for invalid URLs - Add comprehensive tests covering invalid IPs, malformed URLs, and valid URLs - Only validate URLs for Ollama models to avoid breaking other providers - Fixes litellm.APIConnectionError with unclear 'No route to host' messages Co-Authored-By: João <joao@crewai.com>
This commit is contained in:
@@ -1179,8 +1179,9 @@ class LLM(BaseLLM):
|
|||||||
|
|
||||||
def _validate_call_params(self) -> None:
|
def _validate_call_params(self) -> None:
|
||||||
"""
|
"""
|
||||||
Validate parameters before making a call. Currently this only checks if
|
Validate parameters before making a call. Currently this checks if
|
||||||
a response_format is provided and whether the model supports it.
|
a response_format is provided and whether the model supports it, and
|
||||||
|
validates base_url for Ollama connections.
|
||||||
The custom_llm_provider is dynamically determined from the model:
|
The custom_llm_provider is dynamically determined from the model:
|
||||||
- E.g., "openrouter/deepseek/deepseek-chat" yields "openrouter"
|
- E.g., "openrouter/deepseek/deepseek-chat" yields "openrouter"
|
||||||
- "gemini/gemini-1.5-pro" yields "gemini"
|
- "gemini/gemini-1.5-pro" yields "gemini"
|
||||||
@@ -1195,6 +1196,56 @@ class LLM(BaseLLM):
|
|||||||
f"The model {self.model} does not support response_format for provider '{provider}'. "
|
f"The model {self.model} does not support response_format for provider '{provider}'. "
|
||||||
"Please remove response_format or use a supported model."
|
"Please remove response_format or use a supported model."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Validate base_url for Ollama connections
|
||||||
|
if "ollama" in self.model.lower() and (self.base_url or self.api_base):
|
||||||
|
url_to_validate = self.base_url or self.api_base
|
||||||
|
if not self._validate_base_url(url_to_validate):
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid Ollama base_url: '{url_to_validate}'. "
|
||||||
|
"Please check that the URL format is correct and the IP address is valid. "
|
||||||
|
"Example: 'http://localhost:11434' or 'http://192.168.1.100:11434'"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _validate_base_url(self, url: str) -> bool:
|
||||||
|
"""Validate base_url format and IP address for Ollama connections.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: The base URL to validate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if URL is valid, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
import ipaddress
|
||||||
|
|
||||||
|
result = urlparse(url)
|
||||||
|
|
||||||
|
if not all([result.scheme in ("http", "https"), result.netloc]):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Extract hostname/IP from netloc (remove port if present)
|
||||||
|
hostname = result.hostname
|
||||||
|
if not hostname:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if it looks like an IP address first
|
||||||
|
if all(part.isdigit() for part in hostname.split('.')) and len(hostname.split('.')) == 4:
|
||||||
|
try:
|
||||||
|
ipaddress.ip_address(hostname)
|
||||||
|
return True # Valid IP address
|
||||||
|
except ValueError:
|
||||||
|
return False # Invalid IP address
|
||||||
|
else:
|
||||||
|
if hostname == "localhost":
|
||||||
|
return True
|
||||||
|
if "." in hostname and all(c.isalnum() or c in ".-" for c in hostname):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
def supports_function_calling(self) -> bool:
|
def supports_function_calling(self) -> bool:
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -711,3 +711,77 @@ def test_ollama_does_not_modify_when_last_is_user(ollama_llm):
|
|||||||
formatted = ollama_llm._format_messages_for_provider(original_messages)
|
formatted = ollama_llm._format_messages_for_provider(original_messages)
|
||||||
|
|
||||||
assert formatted == original_messages
|
assert formatted == original_messages
|
||||||
|
|
||||||
|
|
||||||
|
def test_ollama_invalid_ip_address_validation():
|
||||||
|
"""Test that invalid IP addresses in base_url are caught and provide helpful error messages."""
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
llm = LLM(model="ollama/llama3.1", base_url="http://192.168.0.300:11434")
|
||||||
|
llm.call("Hello")
|
||||||
|
|
||||||
|
assert "Invalid Ollama base_url" in str(excinfo.value)
|
||||||
|
assert "192.168.0.300" in str(excinfo.value)
|
||||||
|
assert "IP address is valid" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ollama_invalid_url_format_validation():
|
||||||
|
"""Test that malformed URLs are caught."""
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
llm = LLM(model="ollama/llama3.1", base_url="not-a-url")
|
||||||
|
llm.call("Hello")
|
||||||
|
|
||||||
|
assert "Invalid Ollama base_url" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ollama_valid_urls_pass_validation():
|
||||||
|
"""Test that valid Ollama URLs pass validation."""
|
||||||
|
valid_urls = [
|
||||||
|
"http://localhost:11434",
|
||||||
|
"http://127.0.0.1:11434",
|
||||||
|
"http://192.168.1.100:11434",
|
||||||
|
"https://ollama.example.com:11434"
|
||||||
|
]
|
||||||
|
|
||||||
|
for url in valid_urls:
|
||||||
|
llm = LLM(model="ollama/llama3.1", base_url=url)
|
||||||
|
# Should not raise validation error when calling _validate_call_params
|
||||||
|
try:
|
||||||
|
llm._validate_call_params()
|
||||||
|
except ValueError as e:
|
||||||
|
if "Invalid Ollama base_url" in str(e):
|
||||||
|
pytest.fail(f"Valid URL {url} was incorrectly rejected: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def test_non_ollama_models_skip_url_validation():
|
||||||
|
"""Test that non-Ollama models don't validate base_url."""
|
||||||
|
llm = LLM(model="gpt-4", base_url="http://192.168.0.300:11434")
|
||||||
|
# Should not raise validation error for non-Ollama models
|
||||||
|
try:
|
||||||
|
llm._validate_call_params()
|
||||||
|
except ValueError as e:
|
||||||
|
if "Invalid Ollama base_url" in str(e):
|
||||||
|
pytest.fail(f"Non-Ollama model was incorrectly validated: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def test_ollama_api_base_validation():
|
||||||
|
"""Test that api_base parameter is also validated for Ollama models."""
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
llm = LLM(model="ollama/llama3.1", api_base="http://192.168.0.300:11434")
|
||||||
|
llm.call("Hello")
|
||||||
|
|
||||||
|
assert "Invalid Ollama base_url" in str(excinfo.value)
|
||||||
|
assert "192.168.0.300" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ollama_no_url_provided_passes():
|
||||||
|
"""Test that Ollama models without base_url or api_base pass validation."""
|
||||||
|
llm = LLM(model="ollama/llama3.1")
|
||||||
|
# Should not raise validation error when no URL is provided
|
||||||
|
try:
|
||||||
|
llm._validate_call_params()
|
||||||
|
except ValueError as e:
|
||||||
|
if "Invalid Ollama base_url" in str(e):
|
||||||
|
pytest.fail(f"Ollama model without URL was incorrectly validated: {e}")
|
||||||
|
raise
|
||||||
|
|||||||
Reference in New Issue
Block a user