mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-27 17:18:13 +00:00
Fix issue when stream is True
This commit is contained in:
@@ -153,7 +153,13 @@ class LLM:
|
|||||||
params = {k: v for k, v in params.items() if v is not None}
|
params = {k: v for k, v in params.items() if v is not None}
|
||||||
|
|
||||||
response = litellm.completion(**params)
|
response = litellm.completion(**params)
|
||||||
return response["choices"][0]["message"]["content"]
|
if params.get("stream", False):
|
||||||
|
content = ""
|
||||||
|
for chunk in response:
|
||||||
|
content += chunk.choices[0].delta.content or ""
|
||||||
|
return content
|
||||||
|
else:
|
||||||
|
return response["choices"][0]["message"]["content"]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if not LLMContextLengthExceededException(
|
if not LLMContextLengthExceededException(
|
||||||
str(e)
|
str(e)
|
||||||
|
|||||||
Reference in New Issue
Block a user