mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-13 10:08:29 +00:00
Fix #2549: Improve error handling for Ollama connection errors
Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
@@ -177,6 +177,10 @@ class LLM:
|
||||
response = litellm.completion(**params)
|
||||
return response["choices"][0]["message"]["content"]
|
||||
except Exception as e:
|
||||
if "ollama" in str(self.model).lower() and ("connection refused" in str(e).lower() or "ollamaexception" in str(e).lower()):
|
||||
from crewai.utilities.exceptions.ollama_connection_exception import OllamaConnectionException
|
||||
raise OllamaConnectionException(str(e))
|
||||
|
||||
if not LLMContextLengthExceededException(
|
||||
str(e)
|
||||
)._is_context_limit_error(str(e)):
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
class OllamaConnectionException(Exception):
|
||||
"""Exception raised when there's a connection issue with Ollama.
|
||||
|
||||
This typically happens when Ollama is not running or is not accessible
|
||||
at the expected URL.
|
||||
"""
|
||||
|
||||
def __init__(self, error_message: str):
|
||||
self.original_error_message = error_message
|
||||
super().__init__(self._get_error_message(error_message))
|
||||
|
||||
def _get_error_message(self, error_message: str):
|
||||
return (
|
||||
f"Failed to connect to Ollama. Original error: {error_message}\n"
|
||||
"Please make sure Ollama is installed and running. "
|
||||
"You can install Ollama from https://ollama.com/download and "
|
||||
"start it by running 'ollama serve' in your terminal."
|
||||
)
|
||||
17
tests/test_ollama_connection.py
Normal file
17
tests/test_ollama_connection.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
from crewai.llm import LLM
|
||||
from crewai.utilities.exceptions.ollama_connection_exception import OllamaConnectionException
|
||||
|
||||
class TestOllamaConnection:
|
||||
def test_ollama_connection_error(self):
|
||||
with patch('litellm.completion') as mock_completion:
|
||||
mock_completion.side_effect = Exception("OllamaException - [Errno 111] Connection refused")
|
||||
|
||||
llm = LLM(model="ollama/llama3")
|
||||
|
||||
with pytest.raises(OllamaConnectionException) as exc_info:
|
||||
llm.call([{"role": "user", "content": "Hello"}])
|
||||
|
||||
assert "Failed to connect to Ollama" in str(exc_info.value)
|
||||
assert "Please make sure Ollama is installed and running" in str(exc_info.value)
|
||||
Reference in New Issue
Block a user