mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-08 23:58:34 +00:00
use langchain callback handler to support all LLMs
This commit is contained in:
@@ -25,7 +25,9 @@ from crewai.memory.contextual.contextual_memory import ContextualMemory
|
||||
from crewai.utilities import I18N, Logger, Prompts, RPMController
|
||||
from crewai.utilities.token_counter_callback import TokenCalcHandler, TokenProcess
|
||||
|
||||
agentops = None
|
||||
try:
|
||||
import agentops
|
||||
from agentops import track_agent
|
||||
except ImportError:
|
||||
|
||||
@@ -195,6 +197,12 @@ class Agent(BaseModel):
|
||||
):
|
||||
self.llm.callbacks.append(token_handler)
|
||||
|
||||
if agentops and not any(
|
||||
isinstance(handler, agentops.LangchainCallbackHandler) for handler in self.llm.callbacks
|
||||
):
|
||||
agentops.stop_instrumenting()
|
||||
self.llm.callbacks.append(agentops.LangchainCallbackHandler())
|
||||
|
||||
if not self.agent_executor:
|
||||
if not self.cache_handler:
|
||||
self.cache_handler = CacheHandler()
|
||||
|
||||
Reference in New Issue
Block a user