Fix #2659: Add litellm ContextWindowExceededError detection

Co-Authored-By: Joe Moura <joao@crewai.com>
This commit is contained in:
Devin AI
2025-04-22 14:18:16 +00:00
parent 6d0039b117
commit dbb5d725ce
2 changed files with 22 additions and 0 deletions

View File

@@ -8,6 +8,7 @@ class LLMContextLengthExceededException(Exception):
"too many tokens",
"input is too long",
"exceeds token limit",
"ContextWindowExceededError",
]
def __init__(self, error_message: str):

View File

@@ -0,0 +1,21 @@
import pytest
from crewai.utilities.exceptions.context_window_exceeding_exception import (
LLMContextLengthExceededException,
)
def test_context_window_error_detection():
"""Test detection of different context window error formats."""
assert LLMContextLengthExceededException("maximum context length exceeded")._is_context_limit_error(
"maximum context length exceeded"
)
assert LLMContextLengthExceededException("expected a string with maximum length")._is_context_limit_error(
"expected a string with maximum length"
)
litellm_error = "litellm.ContextWindowExceededError: litellm.BadRequestError: ContextWindowExceededError: MistralException - Error code: 400 - {'object': 'error', 'message': \"This model's maximum context lenght is 15000 tokens. However, you requested 15018 tokens (12970) in the messages, 2048 in the completion). Please reduce the length of the messages or completion.\", type: 'BadRequestError', 'param': None, 'code': 400}"
assert LLMContextLengthExceededException(litellm_error)._is_context_limit_error(
litellm_error
)