Compare commits

...

2 Commits

Author SHA1 Message Date
Brandon Hancock (bhancock_ai)
018c7420eb Merge branch 'main' into brandon/cre-414-litellm-returns-none-during-errors-which-breaks 2024-11-01 16:44:43 -04:00
Brandon Hancock
6df541962f Raise an error if an LLM doesnt return a response 2024-11-01 11:13:07 -04:00
2 changed files with 14 additions and 5 deletions

View File

@@ -117,6 +117,15 @@ class CrewAgentExecutor(CrewAgentExecutorMixin):
callbacks=self.callbacks,
)
if answer is None or answer == "":
self._printer.print(
content="Received None or empty response from LLM call.",
color="red",
)
raise ValueError(
"Invalid response from LLM call - None or empty."
)
if not self.use_stop_words:
try:
self._format_answer(answer)

View File

@@ -1,7 +1,10 @@
import io
import logging
import sys
import warnings
from contextlib import contextmanager
from typing import Any, Dict, List, Optional, Union
import logging
import warnings
import litellm
from litellm import get_supported_openai_params
@@ -9,9 +12,6 @@ from crewai.utilities.exceptions.context_window_exceeding_exception import (
LLMContextLengthExceededException,
)
import sys
import io
class FilteredStream(io.StringIO):
def write(self, s):