mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-08 23:58:34 +00:00
fix: append user message when last message is from assistent when using Ollama models (#3200)
Some checks failed
Notify Downstream / notify-downstream (push) Has been cancelled
Some checks failed
Notify Downstream / notify-downstream (push) Has been cancelled
Ollama doesn't supports last message to be 'assistant' We can drop this commit after merging https://github.com/BerriAI/litellm/pull/10917
This commit is contained in:
@@ -1079,6 +1079,15 @@ class LLM(BaseLLM):
|
|||||||
messages.append({"role": "user", "content": "Please continue."})
|
messages.append({"role": "user", "content": "Please continue."})
|
||||||
return messages
|
return messages
|
||||||
|
|
||||||
|
# TODO: Remove this code after merging PR https://github.com/BerriAI/litellm/pull/10917
|
||||||
|
# Ollama doesn't supports last message to be 'assistant'
|
||||||
|
if "ollama" in self.model.lower() and messages and messages[-1]["role"] == "assistant":
|
||||||
|
messages = messages.copy()
|
||||||
|
messages.append(
|
||||||
|
{"role": "user", "content": ""}
|
||||||
|
)
|
||||||
|
return messages
|
||||||
|
|
||||||
# Handle Anthropic models
|
# Handle Anthropic models
|
||||||
if not self.is_anthropic:
|
if not self.is_anthropic:
|
||||||
return messages
|
return messages
|
||||||
|
|||||||
@@ -684,3 +684,30 @@ def test_llm_call_when_stop_is_unsupported_when_additional_drop_params_is_provid
|
|||||||
assert "Retrying LLM call without the unsupported 'stop'" in caplog.text
|
assert "Retrying LLM call without the unsupported 'stop'" in caplog.text
|
||||||
assert isinstance(result, str)
|
assert isinstance(result, str)
|
||||||
assert "Paris" in result
|
assert "Paris" in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ollama_llm():
|
||||||
|
return LLM(model="ollama/llama3.2:3b")
|
||||||
|
|
||||||
|
def test_ollama_appends_dummy_user_message_when_last_is_assistant(ollama_llm):
|
||||||
|
original_messages = [
|
||||||
|
{"role": "user", "content": "Hi there"},
|
||||||
|
{"role": "assistant", "content": "Hello!"},
|
||||||
|
]
|
||||||
|
|
||||||
|
formatted = ollama_llm._format_messages_for_provider(original_messages)
|
||||||
|
|
||||||
|
assert len(formatted) == len(original_messages) + 1
|
||||||
|
assert formatted[-1]["role"] == "user"
|
||||||
|
assert formatted[-1]["content"] == ""
|
||||||
|
|
||||||
|
|
||||||
|
def test_ollama_does_not_modify_when_last_is_user(ollama_llm):
|
||||||
|
original_messages = [
|
||||||
|
{"role": "user", "content": "Tell me a joke."},
|
||||||
|
]
|
||||||
|
|
||||||
|
formatted = ollama_llm._format_messages_for_provider(original_messages)
|
||||||
|
|
||||||
|
assert formatted == original_messages
|
||||||
|
|||||||
Reference in New Issue
Block a user