fix: append user message when last message is from assistent when using Ollama models (#3200)
Some checks failed
Notify Downstream / notify-downstream (push) Has been cancelled

Ollama doesn't supports last message to be 'assistant'
We can drop this commit after merging https://github.com/BerriAI/litellm/pull/10917
This commit is contained in:
Lucas Gomide
2025-07-21 14:30:40 -03:00
committed by GitHub
parent 424433ff58
commit 3c55c8a22a
2 changed files with 36 additions and 0 deletions

View File

@@ -1079,6 +1079,15 @@ class LLM(BaseLLM):
messages.append({"role": "user", "content": "Please continue."})
return messages
# TODO: Remove this code after merging PR https://github.com/BerriAI/litellm/pull/10917
# Ollama doesn't supports last message to be 'assistant'
if "ollama" in self.model.lower() and messages and messages[-1]["role"] == "assistant":
messages = messages.copy()
messages.append(
{"role": "user", "content": ""}
)
return messages
# Handle Anthropic models
if not self.is_anthropic:
return messages

View File

@@ -684,3 +684,30 @@ def test_llm_call_when_stop_is_unsupported_when_additional_drop_params_is_provid
assert "Retrying LLM call without the unsupported 'stop'" in caplog.text
assert isinstance(result, str)
assert "Paris" in result
@pytest.fixture
def ollama_llm():
return LLM(model="ollama/llama3.2:3b")
def test_ollama_appends_dummy_user_message_when_last_is_assistant(ollama_llm):
original_messages = [
{"role": "user", "content": "Hi there"},
{"role": "assistant", "content": "Hello!"},
]
formatted = ollama_llm._format_messages_for_provider(original_messages)
assert len(formatted) == len(original_messages) + 1
assert formatted[-1]["role"] == "user"
assert formatted[-1]["content"] == ""
def test_ollama_does_not_modify_when_last_is_user(ollama_llm):
original_messages = [
{"role": "user", "content": "Tell me a joke."},
]
formatted = ollama_llm._format_messages_for_provider(original_messages)
assert formatted == original_messages