mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-27 00:58:13 +00:00
Check the right property
This commit is contained in:
@@ -188,7 +188,7 @@ class LLM:
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if the model is from Anthropic, False otherwise.
|
bool: True if the model is from Anthropic, False otherwise.
|
||||||
"""
|
"""
|
||||||
ANTHROPIC_PREFIXES = ('anthropic/', 'claude-', 'claude/')
|
ANTHROPIC_PREFIXES = ("anthropic/", "claude-", "claude/")
|
||||||
return any(prefix in model.lower() for prefix in ANTHROPIC_PREFIXES)
|
return any(prefix in model.lower() for prefix in ANTHROPIC_PREFIXES)
|
||||||
|
|
||||||
def call(
|
def call(
|
||||||
@@ -348,7 +348,9 @@ class LLM:
|
|||||||
logging.error(f"LiteLLM call failed: {str(e)}")
|
logging.error(f"LiteLLM call failed: {str(e)}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _format_messages_for_provider(self, messages: List[Dict[str, str]]) -> List[Dict[str, str]]:
|
def _format_messages_for_provider(
|
||||||
|
self, messages: List[Dict[str, str]]
|
||||||
|
) -> List[Dict[str, str]]:
|
||||||
"""Format messages according to provider requirements.
|
"""Format messages according to provider requirements.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -368,7 +370,9 @@ class LLM:
|
|||||||
# Validate message format first
|
# Validate message format first
|
||||||
for msg in messages:
|
for msg in messages:
|
||||||
if not isinstance(msg, dict) or "role" not in msg or "content" not in msg:
|
if not isinstance(msg, dict) or "role" not in msg or "content" not in msg:
|
||||||
raise TypeError("Invalid message format. Each message must be a dict with 'role' and 'content' keys")
|
raise TypeError(
|
||||||
|
"Invalid message format. Each message must be a dict with 'role' and 'content' keys"
|
||||||
|
)
|
||||||
|
|
||||||
if not self.is_anthropic:
|
if not self.is_anthropic:
|
||||||
return messages
|
return messages
|
||||||
@@ -413,7 +417,7 @@ class LLM:
|
|||||||
def supports_function_calling(self) -> bool:
|
def supports_function_calling(self) -> bool:
|
||||||
try:
|
try:
|
||||||
params = get_supported_openai_params(model=self.model)
|
params = get_supported_openai_params(model=self.model)
|
||||||
return "response_format" in params
|
return "tools" in params
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Failed to get supported params: {str(e)}")
|
logging.error(f"Failed to get supported params: {str(e)}")
|
||||||
return False
|
return False
|
||||||
|
|||||||
Reference in New Issue
Block a user