From 3c55c8a22ac8f1b5cac0a79817b64d77fb76b288 Mon Sep 17 00:00:00 2001 From: Lucas Gomide Date: Mon, 21 Jul 2025 14:30:40 -0300 Subject: [PATCH] fix: append user message when last message is from assistent when using Ollama models (#3200) Ollama doesn't supports last message to be 'assistant' We can drop this commit after merging https://github.com/BerriAI/litellm/pull/10917 --- src/crewai/llm.py | 9 +++++++++ tests/llm_test.py | 27 +++++++++++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/src/crewai/llm.py b/src/crewai/llm.py index 3b4f0b9f4..397d85481 100644 --- a/src/crewai/llm.py +++ b/src/crewai/llm.py @@ -1079,6 +1079,15 @@ class LLM(BaseLLM): messages.append({"role": "user", "content": "Please continue."}) return messages + # TODO: Remove this code after merging PR https://github.com/BerriAI/litellm/pull/10917 + # Ollama doesn't supports last message to be 'assistant' + if "ollama" in self.model.lower() and messages and messages[-1]["role"] == "assistant": + messages = messages.copy() + messages.append( + {"role": "user", "content": ""} + ) + return messages + # Handle Anthropic models if not self.is_anthropic: return messages diff --git a/tests/llm_test.py b/tests/llm_test.py index 07ab2df1e..fb78c4774 100644 --- a/tests/llm_test.py +++ b/tests/llm_test.py @@ -684,3 +684,30 @@ def test_llm_call_when_stop_is_unsupported_when_additional_drop_params_is_provid assert "Retrying LLM call without the unsupported 'stop'" in caplog.text assert isinstance(result, str) assert "Paris" in result + + +@pytest.fixture +def ollama_llm(): + return LLM(model="ollama/llama3.2:3b") + +def test_ollama_appends_dummy_user_message_when_last_is_assistant(ollama_llm): + original_messages = [ + {"role": "user", "content": "Hi there"}, + {"role": "assistant", "content": "Hello!"}, + ] + + formatted = ollama_llm._format_messages_for_provider(original_messages) + + assert len(formatted) == len(original_messages) + 1 + assert formatted[-1]["role"] == "user" + assert formatted[-1]["content"] == "" + + +def test_ollama_does_not_modify_when_last_is_user(ollama_llm): + original_messages = [ + {"role": "user", "content": "Tell me a joke."}, + ] + + formatted = ollama_llm._format_messages_for_provider(original_messages) + + assert formatted == original_messages