mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-29 10:08:13 +00:00
Compare commits
5 Commits
1.9.1
...
tm-push-cu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7c29545afb | ||
|
|
028db9dbbc | ||
|
|
08fc6ac6f9 | ||
|
|
31092293e5 | ||
|
|
0600843299 |
@@ -60,6 +60,7 @@ class PlusAPI:
|
|||||||
description: str | None,
|
description: str | None,
|
||||||
encoded_file: str,
|
encoded_file: str,
|
||||||
available_exports: list[dict[str, Any]] | None = None,
|
available_exports: list[dict[str, Any]] | None = None,
|
||||||
|
tools_metadata: list[dict[str, Any]] | None = None,
|
||||||
) -> requests.Response:
|
) -> requests.Response:
|
||||||
params = {
|
params = {
|
||||||
"handle": handle,
|
"handle": handle,
|
||||||
@@ -68,6 +69,7 @@ class PlusAPI:
|
|||||||
"file": encoded_file,
|
"file": encoded_file,
|
||||||
"description": description,
|
"description": description,
|
||||||
"available_exports": available_exports,
|
"available_exports": available_exports,
|
||||||
|
"tools_metadata": {"package": handle, "tools": tools_metadata} if tools_metadata else None,
|
||||||
}
|
}
|
||||||
return self._make_request("POST", f"{self.TOOLS_RESOURCE}", json=params)
|
return self._make_request("POST", f"{self.TOOLS_RESOURCE}", json=params)
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from crewai.cli.constants import DEFAULT_CREWAI_ENTERPRISE_URL
|
|||||||
from crewai.cli.utils import (
|
from crewai.cli.utils import (
|
||||||
build_env_with_tool_repository_credentials,
|
build_env_with_tool_repository_credentials,
|
||||||
extract_available_exports,
|
extract_available_exports,
|
||||||
|
extract_tools_metadata,
|
||||||
get_project_description,
|
get_project_description,
|
||||||
get_project_name,
|
get_project_name,
|
||||||
get_project_version,
|
get_project_version,
|
||||||
@@ -94,6 +95,18 @@ class ToolCommand(BaseCommand, PlusAPIMixin):
|
|||||||
console.print(
|
console.print(
|
||||||
f"[green]Found these tools to publish: {', '.join([e['name'] for e in available_exports])}[/green]"
|
f"[green]Found these tools to publish: {', '.join([e['name'] for e in available_exports])}[/green]"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
console.print("[bold blue]Extracting tool metadata...[/bold blue]")
|
||||||
|
try:
|
||||||
|
tools_metadata = extract_tools_metadata()
|
||||||
|
except Exception as e:
|
||||||
|
console.print(
|
||||||
|
f"[yellow]Warning: Could not extract tool metadata: {e}[/yellow]\n"
|
||||||
|
f"Publishing will continue without detailed metadata."
|
||||||
|
)
|
||||||
|
tools_metadata = []
|
||||||
|
|
||||||
|
self._print_tools_preview(tools_metadata)
|
||||||
self._print_current_organization()
|
self._print_current_organization()
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory() as temp_build_dir:
|
with tempfile.TemporaryDirectory() as temp_build_dir:
|
||||||
@@ -111,7 +124,7 @@ class ToolCommand(BaseCommand, PlusAPIMixin):
|
|||||||
"Project build failed. Please ensure that the command `uv build --sdist` completes successfully.",
|
"Project build failed. Please ensure that the command `uv build --sdist` completes successfully.",
|
||||||
style="bold red",
|
style="bold red",
|
||||||
)
|
)
|
||||||
raise SystemExit
|
raise SystemExit(1)
|
||||||
|
|
||||||
tarball_path = os.path.join(temp_build_dir, tarball_filename)
|
tarball_path = os.path.join(temp_build_dir, tarball_filename)
|
||||||
with open(tarball_path, "rb") as file:
|
with open(tarball_path, "rb") as file:
|
||||||
@@ -127,6 +140,7 @@ class ToolCommand(BaseCommand, PlusAPIMixin):
|
|||||||
description=project_description,
|
description=project_description,
|
||||||
encoded_file=f"data:application/x-gzip;base64,{encoded_tarball}",
|
encoded_file=f"data:application/x-gzip;base64,{encoded_tarball}",
|
||||||
available_exports=available_exports,
|
available_exports=available_exports,
|
||||||
|
tools_metadata=tools_metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._validate_response(publish_response)
|
self._validate_response(publish_response)
|
||||||
@@ -237,6 +251,41 @@ class ToolCommand(BaseCommand, PlusAPIMixin):
|
|||||||
)
|
)
|
||||||
raise SystemExit
|
raise SystemExit
|
||||||
|
|
||||||
|
def _print_tools_preview(self, tools_metadata: list[dict[str, Any]]) -> None:
|
||||||
|
if not tools_metadata:
|
||||||
|
console.print("[yellow]No tool metadata extracted.[/yellow]")
|
||||||
|
return
|
||||||
|
|
||||||
|
console.print(f"\n[bold]Tools to be published ({len(tools_metadata)}):[/bold]\n")
|
||||||
|
|
||||||
|
for tool in tools_metadata:
|
||||||
|
console.print(f" [bold cyan]{tool.get('name', 'Unknown')}[/bold cyan]")
|
||||||
|
if tool.get("module"):
|
||||||
|
console.print(f" Module: {tool.get('module')}")
|
||||||
|
console.print(f" Name: {tool.get('humanized_name', 'N/A')}")
|
||||||
|
console.print(f" Description: {tool.get('description', 'N/A')[:80]}{'...' if len(tool.get('description', '')) > 80 else ''}")
|
||||||
|
|
||||||
|
init_params = tool.get("init_params_schema", {}).get("properties", {})
|
||||||
|
if init_params:
|
||||||
|
required = tool.get("init_params_schema", {}).get("required", [])
|
||||||
|
console.print(" Init parameters:")
|
||||||
|
for param_name, param_info in init_params.items():
|
||||||
|
param_type = param_info.get("type", "any")
|
||||||
|
is_required = param_name in required
|
||||||
|
req_marker = "[red]*[/red]" if is_required else ""
|
||||||
|
default = f" = {param_info['default']}" if "default" in param_info else ""
|
||||||
|
console.print(f" - {param_name}: {param_type}{default} {req_marker}")
|
||||||
|
|
||||||
|
env_vars = tool.get("env_vars", [])
|
||||||
|
if env_vars:
|
||||||
|
console.print(" Environment variables:")
|
||||||
|
for env_var in env_vars:
|
||||||
|
req_marker = "[red]*[/red]" if env_var.get("required") else ""
|
||||||
|
default = f" (default: {env_var['default']})" if env_var.get("default") else ""
|
||||||
|
console.print(f" - {env_var['name']}: {env_var.get('description', 'N/A')}{default} {req_marker}")
|
||||||
|
|
||||||
|
console.print()
|
||||||
|
|
||||||
def _print_current_organization(self) -> None:
|
def _print_current_organization(self) -> None:
|
||||||
settings = Settings()
|
settings = Settings()
|
||||||
if settings.org_uuid:
|
if settings.org_uuid:
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
from functools import reduce
|
from collections.abc import Mapping
|
||||||
|
from functools import lru_cache, reduce
|
||||||
|
import hashlib
|
||||||
import importlib.util
|
import importlib.util
|
||||||
|
import inspect
|
||||||
from inspect import getmro, isclass, isfunction, ismethod
|
from inspect import getmro, isclass, isfunction, ismethod
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -511,3 +514,217 @@ def _print_no_tools_warning() -> None:
|
|||||||
" # ... implementation\n"
|
" # ... implementation\n"
|
||||||
" return result\n"
|
" return result\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_tools_metadata(dir_path: str = "src") -> list[dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Extract rich metadata from tool classes in the project.
|
||||||
|
|
||||||
|
Returns a list of tool metadata dictionaries containing:
|
||||||
|
- name: Class name
|
||||||
|
- humanized_name: From name field default
|
||||||
|
- description: From description field default
|
||||||
|
- run_params_schema: JSON Schema for _run() params (from args_schema)
|
||||||
|
- init_params_schema: JSON Schema for __init__ params (filtered)
|
||||||
|
- env_vars: List of environment variable dicts
|
||||||
|
"""
|
||||||
|
tools_metadata: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
for init_file in Path(dir_path).glob("**/__init__.py"):
|
||||||
|
tools = _extract_tool_metadata_from_init(init_file)
|
||||||
|
tools_metadata.extend(tools)
|
||||||
|
|
||||||
|
return tools_metadata
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_tool_metadata_from_init(init_file: Path) -> list[dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Load module from init file and extract metadata from valid tool classes.
|
||||||
|
"""
|
||||||
|
from crewai.tools.base_tool import BaseTool
|
||||||
|
|
||||||
|
module_name = f"temp_metadata_{hashlib.md5(str(init_file).encode()).hexdigest()[:8]}"
|
||||||
|
spec = importlib.util.spec_from_file_location(module_name, init_file)
|
||||||
|
|
||||||
|
if not spec or not spec.loader:
|
||||||
|
return []
|
||||||
|
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
sys.modules[module_name] = module
|
||||||
|
|
||||||
|
try:
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
|
||||||
|
exported_names = getattr(module, "__all__", None)
|
||||||
|
if not exported_names:
|
||||||
|
return []
|
||||||
|
|
||||||
|
tools_metadata = []
|
||||||
|
for name in exported_names:
|
||||||
|
obj = getattr(module, name, None)
|
||||||
|
if obj is None or not (inspect.isclass(obj) and issubclass(obj, BaseTool)):
|
||||||
|
continue
|
||||||
|
if tool_info := _extract_single_tool_metadata(obj):
|
||||||
|
tools_metadata.append(tool_info)
|
||||||
|
|
||||||
|
return tools_metadata
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(
|
||||||
|
f"[yellow]Warning: Could not extract metadata from {init_file}: {e}[/yellow]"
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
finally:
|
||||||
|
sys.modules.pop(module_name, None)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_single_tool_metadata(tool_class: type) -> dict[str, Any] | None:
|
||||||
|
"""
|
||||||
|
Extract metadata from a single tool class.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
core_schema = tool_class.__pydantic_core_schema__
|
||||||
|
if not core_schema:
|
||||||
|
return None
|
||||||
|
|
||||||
|
schema = _unwrap_schema(core_schema)
|
||||||
|
fields = schema.get("schema", {}).get("fields", {})
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_path = inspect.getfile(tool_class)
|
||||||
|
relative_path = Path(file_path).relative_to(Path.cwd())
|
||||||
|
module_path = relative_path.with_suffix("")
|
||||||
|
if module_path.parts[0] == "src":
|
||||||
|
module_path = Path(*module_path.parts[1:])
|
||||||
|
module = ".".join(module_path.parts)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
module = tool_class.__module__
|
||||||
|
|
||||||
|
return {
|
||||||
|
"name": tool_class.__name__,
|
||||||
|
"module": module,
|
||||||
|
"humanized_name": _extract_field_default(
|
||||||
|
fields.get("name"), fallback=tool_class.__name__
|
||||||
|
),
|
||||||
|
"description": str(
|
||||||
|
_extract_field_default(fields.get("description"))
|
||||||
|
).strip(),
|
||||||
|
"run_params_schema": _extract_run_params_schema(fields.get("args_schema")),
|
||||||
|
"init_params_schema": _extract_init_params_schema(tool_class),
|
||||||
|
"env_vars": _extract_env_vars(fields.get("env_vars")),
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _unwrap_schema(schema: Mapping[str, Any] | dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Unwrap nested schema structures to get to the actual schema definition.
|
||||||
|
"""
|
||||||
|
result: dict[str, Any] = dict(schema)
|
||||||
|
while result.get("type") in {"function-after", "default"} and "schema" in result:
|
||||||
|
result = dict(result["schema"])
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_field_default(
|
||||||
|
field: dict[str, Any] | None, fallback: str | list[Any] = ""
|
||||||
|
) -> str | list[Any] | int:
|
||||||
|
"""
|
||||||
|
Extract the default value from a field schema.
|
||||||
|
"""
|
||||||
|
if not field:
|
||||||
|
return fallback
|
||||||
|
|
||||||
|
schema = field.get("schema", {})
|
||||||
|
default = schema.get("default")
|
||||||
|
return default if isinstance(default, (list, str, int)) else fallback
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def _get_schema_generator() -> type:
|
||||||
|
"""Get a SchemaGenerator that omits non-serializable defaults."""
|
||||||
|
from pydantic.json_schema import GenerateJsonSchema
|
||||||
|
from pydantic_core import PydanticOmit
|
||||||
|
|
||||||
|
class SchemaGenerator(GenerateJsonSchema):
|
||||||
|
def handle_invalid_for_json_schema(
|
||||||
|
self, schema: Any, error_info: Any
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
raise PydanticOmit
|
||||||
|
|
||||||
|
return SchemaGenerator
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_run_params_schema(args_schema_field: dict[str, Any] | None) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Extract JSON Schema for the tool's run parameters from args_schema field.
|
||||||
|
"""
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
if not args_schema_field:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
args_schema_class = args_schema_field.get("schema", {}).get("default")
|
||||||
|
if not (inspect.isclass(args_schema_class) and issubclass(args_schema_class, BaseModel)):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
return args_schema_class.model_json_schema(schema_generator=_get_schema_generator())
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
_IGNORED_INIT_PARAMS = frozenset({
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"env_vars",
|
||||||
|
"args_schema",
|
||||||
|
"description_updated",
|
||||||
|
"cache_function",
|
||||||
|
"result_as_answer",
|
||||||
|
"max_usage_count",
|
||||||
|
"current_usage_count",
|
||||||
|
"package_dependencies",
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_init_params_schema(tool_class: type) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Extract JSON Schema for the tool's __init__ parameters, filtering out base fields.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
json_schema = tool_class.model_json_schema(
|
||||||
|
schema_generator=_get_schema_generator(), mode="serialization"
|
||||||
|
)
|
||||||
|
json_schema["properties"] = {
|
||||||
|
key: value
|
||||||
|
for key, value in json_schema.get("properties", {}).items()
|
||||||
|
if key not in _IGNORED_INIT_PARAMS
|
||||||
|
}
|
||||||
|
return json_schema
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_env_vars(env_vars_field: dict[str, Any] | None) -> list[dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Extract environment variable definitions from env_vars field.
|
||||||
|
"""
|
||||||
|
from crewai.tools.base_tool import EnvVar
|
||||||
|
|
||||||
|
if not env_vars_field:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"name": env_var.name,
|
||||||
|
"description": env_var.description,
|
||||||
|
"required": env_var.required,
|
||||||
|
"default": env_var.default,
|
||||||
|
}
|
||||||
|
for env_var in env_vars_field.get("schema", {}).get("default", [])
|
||||||
|
if isinstance(env_var, EnvVar)
|
||||||
|
]
|
||||||
|
|||||||
@@ -152,6 +152,7 @@ class TestPlusAPI(unittest.TestCase):
|
|||||||
"file": encoded_file,
|
"file": encoded_file,
|
||||||
"description": description,
|
"description": description,
|
||||||
"available_exports": None,
|
"available_exports": None,
|
||||||
|
"tools_metadata": None,
|
||||||
}
|
}
|
||||||
mock_make_request.assert_called_once_with(
|
mock_make_request.assert_called_once_with(
|
||||||
"POST", "/crewai_plus/api/v1/tools", json=params
|
"POST", "/crewai_plus/api/v1/tools", json=params
|
||||||
@@ -190,6 +191,7 @@ class TestPlusAPI(unittest.TestCase):
|
|||||||
"file": encoded_file,
|
"file": encoded_file,
|
||||||
"description": description,
|
"description": description,
|
||||||
"available_exports": None,
|
"available_exports": None,
|
||||||
|
"tools_metadata": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
self.assert_request_with_org_id(
|
self.assert_request_with_org_id(
|
||||||
@@ -218,6 +220,48 @@ class TestPlusAPI(unittest.TestCase):
|
|||||||
"file": encoded_file,
|
"file": encoded_file,
|
||||||
"description": description,
|
"description": description,
|
||||||
"available_exports": None,
|
"available_exports": None,
|
||||||
|
"tools_metadata": None,
|
||||||
|
}
|
||||||
|
mock_make_request.assert_called_once_with(
|
||||||
|
"POST", "/crewai_plus/api/v1/tools", json=params
|
||||||
|
)
|
||||||
|
self.assertEqual(response, mock_response)
|
||||||
|
|
||||||
|
@patch("crewai.cli.plus_api.PlusAPI._make_request")
|
||||||
|
def test_publish_tool_with_tools_metadata(self, mock_make_request):
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_make_request.return_value = mock_response
|
||||||
|
handle = "test_tool_handle"
|
||||||
|
public = True
|
||||||
|
version = "1.0.0"
|
||||||
|
description = "Test tool description"
|
||||||
|
encoded_file = "encoded_test_file"
|
||||||
|
available_exports = [{"name": "MyTool"}]
|
||||||
|
tools_metadata = [
|
||||||
|
{
|
||||||
|
"name": "MyTool",
|
||||||
|
"humanized_name": "my_tool",
|
||||||
|
"description": "A test tool",
|
||||||
|
"run_params_schema": {"type": "object", "properties": {}},
|
||||||
|
"init_params_schema": {"type": "object", "properties": {}},
|
||||||
|
"env_vars": [{"name": "API_KEY", "description": "API key", "required": True, "default": None}],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
response = self.api.publish_tool(
|
||||||
|
handle, public, version, description, encoded_file,
|
||||||
|
available_exports=available_exports,
|
||||||
|
tools_metadata=tools_metadata,
|
||||||
|
)
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"handle": handle,
|
||||||
|
"public": public,
|
||||||
|
"version": version,
|
||||||
|
"file": encoded_file,
|
||||||
|
"description": description,
|
||||||
|
"available_exports": available_exports,
|
||||||
|
"tools_metadata": {"package": handle, "tools": tools_metadata},
|
||||||
}
|
}
|
||||||
mock_make_request.assert_called_once_with(
|
mock_make_request.assert_called_once_with(
|
||||||
"POST", "/crewai_plus/api/v1/tools", json=params
|
"POST", "/crewai_plus/api/v1/tools", json=params
|
||||||
|
|||||||
@@ -363,3 +363,261 @@ def test_get_crews_ignores_template_directories(
|
|||||||
utils.get_crews()
|
utils.get_crews()
|
||||||
|
|
||||||
assert not template_crew_detected
|
assert not template_crew_detected
|
||||||
|
|
||||||
|
|
||||||
|
# Tests for extract_tools_metadata
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_empty_project(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata returns empty list for empty project."""
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert metadata == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_no_init_file(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata returns empty list when no __init__.py exists."""
|
||||||
|
(temp_project_dir / "some_file.py").write_text("print('hello')")
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert metadata == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_empty_init_file(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata returns empty list for empty __init__.py."""
|
||||||
|
create_init_file(temp_project_dir, "")
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert metadata == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_no_all_variable(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata returns empty list when __all__ is not defined."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"from crewai.tools import BaseTool\n\nclass MyTool(BaseTool):\n pass",
|
||||||
|
)
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert metadata == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_valid_base_tool_class(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata extracts metadata from a valid BaseTool class."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
|
||||||
|
class MyTool(BaseTool):
|
||||||
|
name: str = "my_tool"
|
||||||
|
description: str = "A test tool"
|
||||||
|
|
||||||
|
__all__ = ['MyTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert len(metadata) == 1
|
||||||
|
assert metadata[0]["name"] == "MyTool"
|
||||||
|
assert metadata[0]["humanized_name"] == "my_tool"
|
||||||
|
assert metadata[0]["description"] == "A test tool"
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_with_args_schema(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata extracts run_params_schema from args_schema."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
class MyToolInput(BaseModel):
|
||||||
|
query: str
|
||||||
|
limit: int = 10
|
||||||
|
|
||||||
|
class MyTool(BaseTool):
|
||||||
|
name: str = "my_tool"
|
||||||
|
description: str = "A test tool"
|
||||||
|
args_schema: type[BaseModel] = MyToolInput
|
||||||
|
|
||||||
|
__all__ = ['MyTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert len(metadata) == 1
|
||||||
|
assert metadata[0]["name"] == "MyTool"
|
||||||
|
run_params = metadata[0]["run_params_schema"]
|
||||||
|
assert "properties" in run_params
|
||||||
|
assert "query" in run_params["properties"]
|
||||||
|
assert "limit" in run_params["properties"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_with_env_vars(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata extracts env_vars."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
from crewai.tools.base_tool import EnvVar
|
||||||
|
|
||||||
|
class MyTool(BaseTool):
|
||||||
|
name: str = "my_tool"
|
||||||
|
description: str = "A test tool"
|
||||||
|
env_vars: list[EnvVar] = [
|
||||||
|
EnvVar(name="MY_API_KEY", description="API key for service", required=True),
|
||||||
|
EnvVar(name="MY_OPTIONAL_VAR", description="Optional var", required=False, default="default_value"),
|
||||||
|
]
|
||||||
|
|
||||||
|
__all__ = ['MyTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert len(metadata) == 1
|
||||||
|
env_vars = metadata[0]["env_vars"]
|
||||||
|
assert len(env_vars) == 2
|
||||||
|
assert env_vars[0]["name"] == "MY_API_KEY"
|
||||||
|
assert env_vars[0]["description"] == "API key for service"
|
||||||
|
assert env_vars[0]["required"] is True
|
||||||
|
assert env_vars[1]["name"] == "MY_OPTIONAL_VAR"
|
||||||
|
assert env_vars[1]["required"] is False
|
||||||
|
assert env_vars[1]["default"] == "default_value"
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_with_custom_init_params(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata extracts init_params_schema with custom params."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
|
||||||
|
class MyTool(BaseTool):
|
||||||
|
name: str = "my_tool"
|
||||||
|
description: str = "A test tool"
|
||||||
|
api_endpoint: str = "https://api.example.com"
|
||||||
|
timeout: int = 30
|
||||||
|
|
||||||
|
__all__ = ['MyTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert len(metadata) == 1
|
||||||
|
init_params = metadata[0]["init_params_schema"]
|
||||||
|
assert "properties" in init_params
|
||||||
|
# Custom params should be included
|
||||||
|
assert "api_endpoint" in init_params["properties"]
|
||||||
|
assert "timeout" in init_params["properties"]
|
||||||
|
# Base params should be filtered out
|
||||||
|
assert "name" not in init_params["properties"]
|
||||||
|
assert "description" not in init_params["properties"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_multiple_tools(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata extracts metadata from multiple tools."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
|
||||||
|
class FirstTool(BaseTool):
|
||||||
|
name: str = "first_tool"
|
||||||
|
description: str = "First test tool"
|
||||||
|
|
||||||
|
class SecondTool(BaseTool):
|
||||||
|
name: str = "second_tool"
|
||||||
|
description: str = "Second test tool"
|
||||||
|
|
||||||
|
__all__ = ['FirstTool', 'SecondTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert len(metadata) == 2
|
||||||
|
names = [m["name"] for m in metadata]
|
||||||
|
assert "FirstTool" in names
|
||||||
|
assert "SecondTool" in names
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_multiple_init_files(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata extracts metadata from multiple __init__.py files."""
|
||||||
|
# Create tool in root __init__.py
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
|
||||||
|
class RootTool(BaseTool):
|
||||||
|
name: str = "root_tool"
|
||||||
|
description: str = "Root tool"
|
||||||
|
|
||||||
|
__all__ = ['RootTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create nested package with another tool
|
||||||
|
nested_dir = temp_project_dir / "nested"
|
||||||
|
nested_dir.mkdir()
|
||||||
|
create_init_file(
|
||||||
|
nested_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
|
||||||
|
class NestedTool(BaseTool):
|
||||||
|
name: str = "nested_tool"
|
||||||
|
description: str = "Nested tool"
|
||||||
|
|
||||||
|
__all__ = ['NestedTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert len(metadata) == 2
|
||||||
|
names = [m["name"] for m in metadata]
|
||||||
|
assert "RootTool" in names
|
||||||
|
assert "NestedTool" in names
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_ignores_non_tool_exports(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata ignores non-BaseTool exports."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
|
||||||
|
class MyTool(BaseTool):
|
||||||
|
name: str = "my_tool"
|
||||||
|
description: str = "A test tool"
|
||||||
|
|
||||||
|
def not_a_tool():
|
||||||
|
pass
|
||||||
|
|
||||||
|
SOME_CONSTANT = "value"
|
||||||
|
|
||||||
|
__all__ = ['MyTool', 'not_a_tool', 'SOME_CONSTANT']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert len(metadata) == 1
|
||||||
|
assert metadata[0]["name"] == "MyTool"
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_import_error_returns_empty(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata returns empty list on import error."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from nonexistent_module import something
|
||||||
|
|
||||||
|
class MyTool(BaseTool):
|
||||||
|
pass
|
||||||
|
|
||||||
|
__all__ = ['MyTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
# Should not raise, just return empty list
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert metadata == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_extract_tools_metadata_syntax_error_returns_empty(temp_project_dir):
|
||||||
|
"""Test that extract_tools_metadata returns empty list on syntax error."""
|
||||||
|
create_init_file(
|
||||||
|
temp_project_dir,
|
||||||
|
"""from crewai.tools import BaseTool
|
||||||
|
|
||||||
|
class MyTool(BaseTool):
|
||||||
|
# Missing closing parenthesis
|
||||||
|
def __init__(self, name:
|
||||||
|
pass
|
||||||
|
|
||||||
|
__all__ = ['MyTool']
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
# Should not raise, just return empty list
|
||||||
|
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||||
|
assert metadata == []
|
||||||
|
|||||||
@@ -185,9 +185,14 @@ def test_publish_when_not_in_sync(mock_is_synced, capsys, tool_command):
|
|||||||
"crewai.cli.tools.main.extract_available_exports",
|
"crewai.cli.tools.main.extract_available_exports",
|
||||||
return_value=[{"name": "SampleTool"}],
|
return_value=[{"name": "SampleTool"}],
|
||||||
)
|
)
|
||||||
|
@patch(
|
||||||
|
"crewai.cli.tools.main.extract_tools_metadata",
|
||||||
|
return_value=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||||
|
)
|
||||||
@patch("crewai.cli.tools.main.ToolCommand._print_current_organization")
|
@patch("crewai.cli.tools.main.ToolCommand._print_current_organization")
|
||||||
def test_publish_when_not_in_sync_and_force(
|
def test_publish_when_not_in_sync_and_force(
|
||||||
mock_print_org,
|
mock_print_org,
|
||||||
|
mock_tools_metadata,
|
||||||
mock_available_exports,
|
mock_available_exports,
|
||||||
mock_is_synced,
|
mock_is_synced,
|
||||||
mock_publish,
|
mock_publish,
|
||||||
@@ -222,6 +227,7 @@ def test_publish_when_not_in_sync_and_force(
|
|||||||
description="A sample tool",
|
description="A sample tool",
|
||||||
encoded_file=unittest.mock.ANY,
|
encoded_file=unittest.mock.ANY,
|
||||||
available_exports=[{"name": "SampleTool"}],
|
available_exports=[{"name": "SampleTool"}],
|
||||||
|
tools_metadata=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||||
)
|
)
|
||||||
mock_print_org.assert_called_once()
|
mock_print_org.assert_called_once()
|
||||||
|
|
||||||
@@ -242,7 +248,12 @@ def test_publish_when_not_in_sync_and_force(
|
|||||||
"crewai.cli.tools.main.extract_available_exports",
|
"crewai.cli.tools.main.extract_available_exports",
|
||||||
return_value=[{"name": "SampleTool"}],
|
return_value=[{"name": "SampleTool"}],
|
||||||
)
|
)
|
||||||
|
@patch(
|
||||||
|
"crewai.cli.tools.main.extract_tools_metadata",
|
||||||
|
return_value=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||||
|
)
|
||||||
def test_publish_success(
|
def test_publish_success(
|
||||||
|
mock_tools_metadata,
|
||||||
mock_available_exports,
|
mock_available_exports,
|
||||||
mock_is_synced,
|
mock_is_synced,
|
||||||
mock_publish,
|
mock_publish,
|
||||||
@@ -277,6 +288,7 @@ def test_publish_success(
|
|||||||
description="A sample tool",
|
description="A sample tool",
|
||||||
encoded_file=unittest.mock.ANY,
|
encoded_file=unittest.mock.ANY,
|
||||||
available_exports=[{"name": "SampleTool"}],
|
available_exports=[{"name": "SampleTool"}],
|
||||||
|
tools_metadata=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -295,7 +307,12 @@ def test_publish_success(
|
|||||||
"crewai.cli.tools.main.extract_available_exports",
|
"crewai.cli.tools.main.extract_available_exports",
|
||||||
return_value=[{"name": "SampleTool"}],
|
return_value=[{"name": "SampleTool"}],
|
||||||
)
|
)
|
||||||
|
@patch(
|
||||||
|
"crewai.cli.tools.main.extract_tools_metadata",
|
||||||
|
return_value=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||||
|
)
|
||||||
def test_publish_failure(
|
def test_publish_failure(
|
||||||
|
mock_tools_metadata,
|
||||||
mock_available_exports,
|
mock_available_exports,
|
||||||
mock_publish,
|
mock_publish,
|
||||||
mock_open,
|
mock_open,
|
||||||
@@ -336,7 +353,12 @@ def test_publish_failure(
|
|||||||
"crewai.cli.tools.main.extract_available_exports",
|
"crewai.cli.tools.main.extract_available_exports",
|
||||||
return_value=[{"name": "SampleTool"}],
|
return_value=[{"name": "SampleTool"}],
|
||||||
)
|
)
|
||||||
|
@patch(
|
||||||
|
"crewai.cli.tools.main.extract_tools_metadata",
|
||||||
|
return_value=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||||
|
)
|
||||||
def test_publish_api_error(
|
def test_publish_api_error(
|
||||||
|
mock_tools_metadata,
|
||||||
mock_available_exports,
|
mock_available_exports,
|
||||||
mock_publish,
|
mock_publish,
|
||||||
mock_open,
|
mock_open,
|
||||||
@@ -362,6 +384,39 @@ def test_publish_api_error(
|
|||||||
mock_publish.assert_called_once()
|
mock_publish.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@patch("crewai.cli.tools.main.get_project_name", return_value="sample-tool")
|
||||||
|
@patch("crewai.cli.tools.main.get_project_version", return_value="1.0.0")
|
||||||
|
@patch("crewai.cli.tools.main.get_project_description", return_value="A sample tool")
|
||||||
|
@patch("crewai.cli.tools.main.git.Repository.is_synced", return_value=True)
|
||||||
|
@patch(
|
||||||
|
"crewai.cli.tools.main.extract_available_exports",
|
||||||
|
return_value=[{"name": "SampleTool"}],
|
||||||
|
)
|
||||||
|
@patch(
|
||||||
|
"crewai.cli.tools.main.extract_tools_metadata",
|
||||||
|
side_effect=Exception("Failed to extract metadata"),
|
||||||
|
)
|
||||||
|
def test_publish_metadata_extraction_failure_continues_with_warning(
|
||||||
|
mock_tools_metadata,
|
||||||
|
mock_available_exports,
|
||||||
|
mock_is_synced,
|
||||||
|
mock_get_project_description,
|
||||||
|
mock_get_project_version,
|
||||||
|
mock_get_project_name,
|
||||||
|
capsys,
|
||||||
|
tool_command,
|
||||||
|
):
|
||||||
|
"""Test that metadata extraction failure shows warning but continues publishing."""
|
||||||
|
try:
|
||||||
|
tool_command.publish(is_public=True)
|
||||||
|
except SystemExit:
|
||||||
|
pass # May fail later due to API mock, but should get past metadata extraction
|
||||||
|
output = capsys.readouterr().out
|
||||||
|
assert "Warning: Could not extract tool metadata" in output
|
||||||
|
assert "Publishing will continue without detailed metadata" in output
|
||||||
|
assert "No tool metadata extracted" in output
|
||||||
|
|
||||||
|
|
||||||
@patch("crewai.cli.tools.main.Settings")
|
@patch("crewai.cli.tools.main.Settings")
|
||||||
def test_print_current_organization_with_org(mock_settings, capsys, tool_command):
|
def test_print_current_organization_with_org(mock_settings, capsys, tool_command):
|
||||||
mock_settings_instance = MagicMock()
|
mock_settings_instance = MagicMock()
|
||||||
|
|||||||
Reference in New Issue
Block a user