mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-07 15:18:29 +00:00
Compare commits
4 Commits
lorenze/re
...
1.6.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
52444ad390 | ||
|
|
f070595e65 | ||
|
|
69c5eace2d | ||
|
|
d88ac338d5 |
@@ -326,7 +326,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"tab": "AMP",
|
||||
"tab": "AOP",
|
||||
"icon": "briefcase",
|
||||
"groups": [
|
||||
{
|
||||
@@ -753,7 +753,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"tab": "AMP",
|
||||
"tab": "AOP",
|
||||
"icon": "briefcase",
|
||||
"groups": [
|
||||
{
|
||||
|
||||
@@ -7,7 +7,7 @@ mode: "wide"
|
||||
|
||||
## Introduction
|
||||
|
||||
CrewAI AOP(Agent Management Platform) provides a platform for deploying, monitoring, and scaling your crews and agents in a production environment.
|
||||
CrewAI AOP(Agent Operations Platform) provides a platform for deploying, monitoring, and scaling your crews and agents in a production environment.
|
||||
|
||||
<Frame>
|
||||
<img src="/images/enterprise/crewai-enterprise-dashboard.png" alt="CrewAI AOP Dashboard" />
|
||||
|
||||
@@ -7,7 +7,7 @@ mode: "wide"
|
||||
|
||||
## 소개
|
||||
|
||||
CrewAI AOP(Agent Management Platform)는 프로덕션 환경에서 crew와 agent를 배포, 모니터링, 확장할 수 있는 플랫폼을 제공합니다.
|
||||
CrewAI AOP(Agent Operation Platform)는 프로덕션 환경에서 crew와 agent를 배포, 모니터링, 확장할 수 있는 플랫폼을 제공합니다.
|
||||
|
||||
<Frame>
|
||||
<img src="/images/enterprise/crewai-enterprise-dashboard.png" alt="CrewAI AOP Dashboard" />
|
||||
|
||||
@@ -7,7 +7,7 @@ mode: "wide"
|
||||
|
||||
## Introdução
|
||||
|
||||
CrewAI AOP(Agent Management Platform) fornece uma plataforma para implementar, monitorar e escalar seus crews e agentes em um ambiente de produção.
|
||||
CrewAI AOP(Agent Operation Platform) fornece uma plataforma para implementar, monitorar e escalar seus crews e agentes em um ambiente de produção.
|
||||
|
||||
<Frame>
|
||||
<img src="/images/enterprise/crewai-enterprise-dashboard.png" alt="CrewAI AOP Dashboard" />
|
||||
|
||||
@@ -12,7 +12,7 @@ dependencies = [
|
||||
"pytube>=15.0.0",
|
||||
"requests>=2.32.5",
|
||||
"docker>=7.1.0",
|
||||
"crewai==1.5.0",
|
||||
"crewai==1.6.0",
|
||||
"lancedb>=0.5.4",
|
||||
"tiktoken>=0.8.0",
|
||||
"beautifulsoup4>=4.13.4",
|
||||
|
||||
@@ -291,4 +291,4 @@ __all__ = [
|
||||
"ZapierActionTools",
|
||||
]
|
||||
|
||||
__version__ = "1.5.0"
|
||||
__version__ = "1.6.0"
|
||||
|
||||
@@ -48,7 +48,7 @@ Repository = "https://github.com/crewAIInc/crewAI"
|
||||
|
||||
[project.optional-dependencies]
|
||||
tools = [
|
||||
"crewai-tools==1.5.0",
|
||||
"crewai-tools==1.6.0",
|
||||
]
|
||||
embeddings = [
|
||||
"tiktoken~=0.8.0"
|
||||
|
||||
@@ -40,7 +40,7 @@ def _suppress_pydantic_deprecation_warnings() -> None:
|
||||
|
||||
_suppress_pydantic_deprecation_warnings()
|
||||
|
||||
__version__ = "1.5.0"
|
||||
__version__ = "1.6.0"
|
||||
_telemetry_submitted = False
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ description = "{{name}} using crewAI"
|
||||
authors = [{ name = "Your Name", email = "you@example.com" }]
|
||||
requires-python = ">=3.10,<3.14"
|
||||
dependencies = [
|
||||
"crewai[tools]==1.5.0"
|
||||
"crewai[tools]==1.6.0"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
@@ -5,7 +5,7 @@ description = "{{name}} using crewAI"
|
||||
authors = [{ name = "Your Name", email = "you@example.com" }]
|
||||
requires-python = ">=3.10,<3.14"
|
||||
dependencies = [
|
||||
"crewai[tools]==1.5.0"
|
||||
"crewai[tools]==1.6.0"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
@@ -310,6 +310,14 @@ class AzureCompletion(BaseLLM):
|
||||
params["tools"] = self._convert_tools_for_interference(tools)
|
||||
params["tool_choice"] = "auto"
|
||||
|
||||
additional_params = self.additional_params
|
||||
additional_drop_params = additional_params.get('additional_drop_params')
|
||||
drop_params = additional_params.get('drop_params')
|
||||
|
||||
if drop_params and isinstance(additional_drop_params, list):
|
||||
for drop_param in additional_drop_params:
|
||||
params.pop(drop_param, None)
|
||||
|
||||
return params
|
||||
|
||||
def _convert_tools_for_interference(
|
||||
|
||||
@@ -16,6 +16,7 @@ from crewai.utilities.paths import db_storage_path
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from crewai.crew import Crew
|
||||
from crewai.rag.core.base_client import BaseClient
|
||||
from crewai.rag.core.base_embeddings_provider import BaseEmbeddingsProvider
|
||||
from crewai.rag.embeddings.types import ProviderSpec
|
||||
@@ -32,16 +33,16 @@ class RAGStorage(BaseRAGStorage):
|
||||
self,
|
||||
type: str,
|
||||
allow_reset: bool = True,
|
||||
embedder_config: ProviderSpec | BaseEmbeddingsProvider | None = None,
|
||||
crew: Any = None,
|
||||
embedder_config: ProviderSpec | BaseEmbeddingsProvider[Any] | None = None,
|
||||
crew: Crew | None = None,
|
||||
path: str | None = None,
|
||||
) -> None:
|
||||
super().__init__(type, allow_reset, embedder_config, crew)
|
||||
agents = crew.agents if crew else []
|
||||
agents = [self._sanitize_role(agent.role) for agent in agents]
|
||||
agents = "_".join(agents)
|
||||
self.agents = agents
|
||||
self.storage_file_name = self._build_storage_file_name(type, agents)
|
||||
crew_agents = crew.agents if crew else []
|
||||
sanitized_roles = [self._sanitize_role(agent.role) for agent in crew_agents]
|
||||
agents_str = "_".join(sanitized_roles)
|
||||
self.agents = agents_str
|
||||
self.storage_file_name = self._build_storage_file_name(type, agents_str)
|
||||
|
||||
self.type = type
|
||||
self._client: BaseClient | None = None
|
||||
@@ -96,6 +97,10 @@ class RAGStorage(BaseRAGStorage):
|
||||
ChromaEmbeddingFunctionWrapper, embedding_function
|
||||
)
|
||||
)
|
||||
|
||||
if self.path:
|
||||
config.settings.persist_directory = self.path
|
||||
|
||||
self._client = create_client(config)
|
||||
|
||||
def _get_client(self) -> BaseClient:
|
||||
|
||||
@@ -381,6 +381,7 @@ def test_azure_raises_error_when_endpoint_missing():
|
||||
with pytest.raises(ValueError, match="Azure endpoint is required"):
|
||||
AzureCompletion(model="gpt-4", api_key="test-key")
|
||||
|
||||
|
||||
def test_azure_raises_error_when_api_key_missing():
|
||||
"""Test that AzureCompletion raises ValueError when API key is missing"""
|
||||
from crewai.llms.providers.azure.completion import AzureCompletion
|
||||
@@ -389,6 +390,8 @@ def test_azure_raises_error_when_api_key_missing():
|
||||
with patch.dict(os.environ, {}, clear=True):
|
||||
with pytest.raises(ValueError, match="Azure API key is required"):
|
||||
AzureCompletion(model="gpt-4", endpoint="https://test.openai.azure.com")
|
||||
|
||||
|
||||
def test_azure_endpoint_configuration():
|
||||
"""
|
||||
Test that Azure endpoint configuration works with multiple environment variable names
|
||||
@@ -1086,3 +1089,27 @@ def test_azure_mistral_and_other_models():
|
||||
)
|
||||
assert "model" in params
|
||||
assert params["model"] == model_name
|
||||
|
||||
|
||||
def test_azure_completion_params_preparation_with_drop_params():
|
||||
"""
|
||||
Test that completion parameters are properly prepared with drop paramaeters attribute respected
|
||||
"""
|
||||
with patch.dict(os.environ, {
|
||||
"AZURE_API_KEY": "test-key",
|
||||
"AZURE_ENDPOINT": "https://models.inference.ai.azure.com"
|
||||
}):
|
||||
llm = LLM(
|
||||
model="azure/o4-mini",
|
||||
drop_params=True,
|
||||
additional_drop_params=["stop"],
|
||||
max_tokens=1000
|
||||
)
|
||||
|
||||
from crewai.llms.providers.azure.completion import AzureCompletion
|
||||
assert isinstance(llm, AzureCompletion)
|
||||
|
||||
messages = [{"role": "user", "content": "Hello"}]
|
||||
params = llm._prepare_completion_params(messages)
|
||||
|
||||
assert params.get('stop') == None
|
||||
82
lib/crewai/tests/rag/test_rag_storage_path.py
Normal file
82
lib/crewai/tests/rag/test_rag_storage_path.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""Tests for RAGStorage custom path functionality."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from crewai.memory.storage.rag_storage import RAGStorage
|
||||
|
||||
|
||||
@patch("crewai.memory.storage.rag_storage.create_client")
|
||||
@patch("crewai.memory.storage.rag_storage.build_embedder")
|
||||
def test_rag_storage_custom_path(
|
||||
mock_build_embedder: MagicMock,
|
||||
mock_create_client: MagicMock,
|
||||
) -> None:
|
||||
"""Test RAGStorage uses custom path when provided."""
|
||||
mock_build_embedder.return_value = MagicMock(return_value=[[0.1, 0.2, 0.3]])
|
||||
mock_create_client.return_value = MagicMock()
|
||||
|
||||
custom_path = "/custom/memory/path"
|
||||
embedder_config = {"provider": "openai", "config": {"model": "text-embedding-3-small"}}
|
||||
|
||||
RAGStorage(
|
||||
type="short_term",
|
||||
crew=None,
|
||||
path=custom_path,
|
||||
embedder_config=embedder_config,
|
||||
)
|
||||
|
||||
mock_create_client.assert_called_once()
|
||||
config_arg = mock_create_client.call_args[0][0]
|
||||
assert config_arg.settings.persist_directory == custom_path
|
||||
|
||||
|
||||
@patch("crewai.memory.storage.rag_storage.create_client")
|
||||
@patch("crewai.memory.storage.rag_storage.build_embedder")
|
||||
def test_rag_storage_default_path_when_none(
|
||||
mock_build_embedder: MagicMock,
|
||||
mock_create_client: MagicMock,
|
||||
) -> None:
|
||||
"""Test RAGStorage uses default path when no custom path is provided."""
|
||||
mock_build_embedder.return_value = MagicMock(return_value=[[0.1, 0.2, 0.3]])
|
||||
mock_create_client.return_value = MagicMock()
|
||||
|
||||
embedder_config = {"provider": "openai", "config": {"model": "text-embedding-3-small"}}
|
||||
|
||||
storage = RAGStorage(
|
||||
type="short_term",
|
||||
crew=None,
|
||||
path=None,
|
||||
embedder_config=embedder_config,
|
||||
)
|
||||
|
||||
mock_create_client.assert_called_once()
|
||||
assert storage.path is None
|
||||
|
||||
|
||||
@patch("crewai.memory.storage.rag_storage.create_client")
|
||||
@patch("crewai.memory.storage.rag_storage.build_embedder")
|
||||
def test_rag_storage_custom_path_with_batch_size(
|
||||
mock_build_embedder: MagicMock,
|
||||
mock_create_client: MagicMock,
|
||||
) -> None:
|
||||
"""Test RAGStorage uses custom path with batch_size in config."""
|
||||
mock_build_embedder.return_value = MagicMock(return_value=[[0.1, 0.2, 0.3]])
|
||||
mock_create_client.return_value = MagicMock()
|
||||
|
||||
custom_path = "/custom/batch/path"
|
||||
embedder_config = {
|
||||
"provider": "openai",
|
||||
"config": {"model": "text-embedding-3-small", "batch_size": 100},
|
||||
}
|
||||
|
||||
RAGStorage(
|
||||
type="long_term",
|
||||
crew=None,
|
||||
path=custom_path,
|
||||
embedder_config=embedder_config,
|
||||
)
|
||||
|
||||
mock_create_client.assert_called_once()
|
||||
config_arg = mock_create_client.call_args[0][0]
|
||||
assert config_arg.settings.persist_directory == custom_path
|
||||
assert config_arg.batch_size == 100
|
||||
@@ -1,3 +1,3 @@
|
||||
"""CrewAI development tools."""
|
||||
|
||||
__version__ = "1.5.0"
|
||||
__version__ = "1.6.0"
|
||||
|
||||
Reference in New Issue
Block a user