mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-11 09:08:31 +00:00
This patch addresses issue #2744 by adding bounds checking before accessing messages[msg_i].get('tool_calls') in the ollama_pt function. The issue occurs when an assistant message is the last message in the list, causing msg_i to go out of bounds. The fix is implemented as a monkey patch in CrewAI to avoid waiting for an upstream fix in litellm. Co-Authored-By: Joe Moura <joao@crewai.com>
46 lines
1.6 KiB
Python
46 lines
1.6 KiB
Python
"""
|
|
Test for the litellm patch that fixes the IndexError in ollama_pt function.
|
|
"""
|
|
|
|
import unittest
|
|
from unittest.mock import patch, MagicMock
|
|
import sys
|
|
|
|
import litellm
|
|
from litellm.litellm_core_utils.prompt_templates.factory import ollama_pt
|
|
|
|
from crewai.patches.litellm_patch import patch_litellm_ollama_pt
|
|
|
|
|
|
class TestLitellmPatch(unittest.TestCase):
|
|
def test_ollama_pt_patch_fixes_index_error(self):
|
|
"""Test that the patch fixes the IndexError in ollama_pt."""
|
|
# Create a message list where the assistant message is the last one
|
|
messages = [
|
|
{"role": "user", "content": "Hello"},
|
|
{"role": "assistant", "content": "Hi there"},
|
|
]
|
|
|
|
# Store the original function to restore it after the test
|
|
original_ollama_pt = litellm.litellm_core_utils.prompt_templates.factory.ollama_pt
|
|
|
|
try:
|
|
# Apply the patch
|
|
patch_litellm_ollama_pt()
|
|
|
|
# The patched function should not raise an IndexError
|
|
result = ollama_pt("qwen3:4b", messages)
|
|
|
|
# Verify the result is as expected
|
|
self.assertIn("prompt", result)
|
|
self.assertIn("images", result)
|
|
self.assertIn("### User:\nHello", result["prompt"])
|
|
self.assertIn("### Assistant:\nHi there", result["prompt"])
|
|
finally:
|
|
# Restore the original function to avoid affecting other tests
|
|
litellm.litellm_core_utils.prompt_templates.factory.ollama_pt = original_ollama_pt
|
|
|
|
|
|
if __name__ == "__main__":
|
|
unittest.main()
|