mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-11 00:58:30 +00:00
use langchain callback handler to support all LLMs
This commit is contained in:
@@ -25,7 +25,9 @@ from crewai.memory.contextual.contextual_memory import ContextualMemory
|
|||||||
from crewai.utilities import I18N, Logger, Prompts, RPMController
|
from crewai.utilities import I18N, Logger, Prompts, RPMController
|
||||||
from crewai.utilities.token_counter_callback import TokenCalcHandler, TokenProcess
|
from crewai.utilities.token_counter_callback import TokenCalcHandler, TokenProcess
|
||||||
|
|
||||||
|
agentops = None
|
||||||
try:
|
try:
|
||||||
|
import agentops
|
||||||
from agentops import track_agent
|
from agentops import track_agent
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
||||||
@@ -195,6 +197,12 @@ class Agent(BaseModel):
|
|||||||
):
|
):
|
||||||
self.llm.callbacks.append(token_handler)
|
self.llm.callbacks.append(token_handler)
|
||||||
|
|
||||||
|
if agentops and not any(
|
||||||
|
isinstance(handler, agentops.LangchainCallbackHandler) for handler in self.llm.callbacks
|
||||||
|
):
|
||||||
|
agentops.stop_instrumenting()
|
||||||
|
self.llm.callbacks.append(agentops.LangchainCallbackHandler())
|
||||||
|
|
||||||
if not self.agent_executor:
|
if not self.agent_executor:
|
||||||
if not self.cache_handler:
|
if not self.cache_handler:
|
||||||
self.cache_handler = CacheHandler()
|
self.cache_handler = CacheHandler()
|
||||||
|
|||||||
Reference in New Issue
Block a user