fix: run last llm call as structured output if passed

This commit is contained in:
Greyson Lalonde
2025-11-05 07:17:23 -05:00
parent 7380f7b794
commit 07ac8fb088
41 changed files with 17859 additions and 16272 deletions

View File

@@ -518,7 +518,9 @@ def test_openai_streaming_with_response_model():
result = llm.call("Test question", response_model=TestResponse)
assert result is not None
assert isinstance(result, str)
assert isinstance(result, TestResponse)
assert result.answer == "test"
assert result.confidence == 0.95
assert mock_create.called
call_kwargs = mock_create.call_args[1]