Fix output_json with custom OpenAI APIs by using PARALLEL_TOOLS mode

Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
Devin AI
2025-04-17 17:27:47 +00:00
parent 870dffbb89
commit 3c1058ef7c
2 changed files with 84 additions and 1 deletions

View File

@@ -0,0 +1,72 @@
import unittest
from unittest.mock import MagicMock, patch
import pytest
from pydantic import BaseModel
from crewai.utilities.internal_instructor import InternalInstructor
class TestOutput(BaseModel):
value: str
class TestInternalInstructor(unittest.TestCase):
@patch("instructor.from_litellm")
def test_tools_mode_for_regular_models(self, mock_from_litellm):
mock_llm = MagicMock()
mock_llm.model = "gpt-4o"
mock_instructor = MagicMock()
mock_from_litellm.return_value = mock_instructor
instructor = InternalInstructor(
content="Test content",
model=TestOutput,
llm=mock_llm
)
import instructor
mock_from_litellm.assert_called_once_with(
unittest.mock.ANY,
mode=instructor.Mode.TOOLS
)
@patch("instructor.from_litellm")
def test_parallel_tools_mode_for_custom_openai(self, mock_from_litellm):
mock_llm = MagicMock()
mock_llm.model = "custom_openai/some-model"
mock_instructor = MagicMock()
mock_from_litellm.return_value = mock_instructor
instructor = InternalInstructor(
content="Test content",
model=TestOutput,
llm=mock_llm
)
import instructor
mock_from_litellm.assert_called_once_with(
unittest.mock.ANY,
mode=instructor.Mode.PARALLEL_TOOLS
)
@patch("instructor.from_litellm")
def test_handling_list_response_in_to_pydantic(self, mock_from_litellm):
mock_llm = MagicMock()
mock_llm.model = "custom_openai/some-model"
mock_instructor = MagicMock()
mock_chat = MagicMock()
mock_instructor.chat.completions.create.return_value = [
TestOutput(value="test value")
]
mock_from_litellm.return_value = mock_instructor
instructor = InternalInstructor(
content="Test content",
model=TestOutput,
llm=mock_llm
)
result = instructor.to_pydantic()
assert isinstance(result, TestOutput)
assert result.value == "test value"