mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-29 10:08:13 +00:00
Exporting tool's metadata to AMP - initial work
This commit is contained in:
@@ -60,6 +60,7 @@ class PlusAPI:
|
||||
description: str | None,
|
||||
encoded_file: str,
|
||||
available_exports: list[dict[str, Any]] | None = None,
|
||||
tools_metadata: list[dict[str, Any]] | None = None,
|
||||
) -> requests.Response:
|
||||
params = {
|
||||
"handle": handle,
|
||||
@@ -68,6 +69,7 @@ class PlusAPI:
|
||||
"file": encoded_file,
|
||||
"description": description,
|
||||
"available_exports": available_exports,
|
||||
"tools_metadata": tools_metadata,
|
||||
}
|
||||
return self._make_request("POST", f"{self.TOOLS_RESOURCE}", json=params)
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ from crewai.cli.constants import DEFAULT_CREWAI_ENTERPRISE_URL
|
||||
from crewai.cli.utils import (
|
||||
build_env_with_tool_repository_credentials,
|
||||
extract_available_exports,
|
||||
extract_tools_metadata,
|
||||
get_project_description,
|
||||
get_project_name,
|
||||
get_project_version,
|
||||
@@ -94,6 +95,22 @@ class ToolCommand(BaseCommand, PlusAPIMixin):
|
||||
console.print(
|
||||
f"[green]Found these tools to publish: {', '.join([e['name'] for e in available_exports])}[/green]"
|
||||
)
|
||||
|
||||
console.print("[bold blue]Extracting tool metadata...[/bold blue]")
|
||||
try:
|
||||
tools_metadata = extract_tools_metadata()
|
||||
console.print(f"[dim]DEBUG: Extracted metadata: {tools_metadata}[/dim]")
|
||||
except Exception as e:
|
||||
console.print(
|
||||
f"[bold red]Failed to extract tool metadata.[/bold red]\n"
|
||||
f"Error: {e}\n\n"
|
||||
f"Please ensure your tools:\n"
|
||||
f"* Inherit from BaseTool\n"
|
||||
f"* Are properly exported in __init__.py with __all__\n"
|
||||
f"* Have valid Pydantic field definitions"
|
||||
)
|
||||
raise SystemExit()
|
||||
|
||||
self._print_current_organization()
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_build_dir:
|
||||
@@ -120,6 +137,7 @@ class ToolCommand(BaseCommand, PlusAPIMixin):
|
||||
encoded_tarball = base64.b64encode(tarball_contents).decode("utf-8")
|
||||
|
||||
console.print("[bold blue]Publishing tool to repository...[/bold blue]")
|
||||
console.print(f"[dim]DEBUG: Payload (excluding encoded_file): handle={project_name}, is_public={is_public}, version={project_version}, description={project_description}, available_exports={available_exports}, tools_metadata={tools_metadata}[/dim]")
|
||||
publish_response = self.plus_api_client.publish_tool(
|
||||
handle=project_name,
|
||||
is_public=is_public,
|
||||
@@ -127,6 +145,7 @@ class ToolCommand(BaseCommand, PlusAPIMixin):
|
||||
description=project_description,
|
||||
encoded_file=f"data:application/x-gzip;base64,{encoded_tarball}",
|
||||
available_exports=available_exports,
|
||||
tools_metadata=tools_metadata,
|
||||
)
|
||||
|
||||
self._validate_response(publish_response)
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
from collections.abc import Mapping
|
||||
from functools import reduce
|
||||
import hashlib
|
||||
import importlib.util
|
||||
import inspect
|
||||
from inspect import getmro, isclass, isfunction, ismethod
|
||||
import os
|
||||
from pathlib import Path
|
||||
@@ -511,3 +514,210 @@ def _print_no_tools_warning() -> None:
|
||||
" # ... implementation\n"
|
||||
" return result\n"
|
||||
)
|
||||
|
||||
|
||||
def extract_tools_metadata(dir_path: str = "src") -> list[dict[str, Any]]:
|
||||
"""
|
||||
Extract rich metadata from tool classes in the project.
|
||||
|
||||
Returns a list of tool metadata dictionaries containing:
|
||||
- name: Class name
|
||||
- humanized_name: From name field default
|
||||
- description: From description field default
|
||||
- run_params_schema: JSON Schema for _run() params (from args_schema)
|
||||
- init_params_schema: JSON Schema for __init__ params (filtered)
|
||||
- env_vars: List of environment variable dicts
|
||||
"""
|
||||
tools_metadata: list[dict[str, Any]] = []
|
||||
|
||||
for init_file in Path(dir_path).glob("**/__init__.py"):
|
||||
tools = _extract_tool_metadata_from_init(init_file)
|
||||
tools_metadata.extend(tools)
|
||||
|
||||
return tools_metadata
|
||||
|
||||
|
||||
def _extract_tool_metadata_from_init(init_file: Path) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Load module from init file and extract metadata from valid tool classes.
|
||||
"""
|
||||
from crewai.tools.base_tool import BaseTool
|
||||
|
||||
module_name = f"temp_metadata_{hashlib.md5(str(init_file).encode()).hexdigest()[:8]}"
|
||||
spec = importlib.util.spec_from_file_location(module_name, init_file)
|
||||
|
||||
if not spec or not spec.loader:
|
||||
return []
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
sys.modules[module_name] = module
|
||||
|
||||
try:
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
if not hasattr(module, "__all__"):
|
||||
return []
|
||||
|
||||
tools_metadata = []
|
||||
for name in module.__all__:
|
||||
if not hasattr(module, name):
|
||||
continue
|
||||
|
||||
obj = getattr(module, name)
|
||||
|
||||
if not (inspect.isclass(obj) and issubclass(obj, BaseTool)):
|
||||
continue
|
||||
|
||||
tool_info = _extract_single_tool_metadata(obj)
|
||||
if tool_info:
|
||||
tools_metadata.append(tool_info)
|
||||
|
||||
return tools_metadata
|
||||
|
||||
except Exception as e:
|
||||
console.print(
|
||||
f"[yellow]Warning: Could not extract metadata from {init_file}: {e}[/yellow]"
|
||||
)
|
||||
return []
|
||||
|
||||
finally:
|
||||
sys.modules.pop(module_name, None)
|
||||
|
||||
|
||||
def _extract_single_tool_metadata(tool_class: type) -> dict[str, Any] | None:
|
||||
"""
|
||||
Extract metadata from a single tool class.
|
||||
"""
|
||||
try:
|
||||
core_schema = tool_class.__pydantic_core_schema__
|
||||
if not core_schema:
|
||||
return None
|
||||
|
||||
schema = _unwrap_schema(core_schema)
|
||||
fields = schema.get("schema", {}).get("fields", {})
|
||||
|
||||
return {
|
||||
"name": tool_class.__name__,
|
||||
"humanized_name": _extract_field_default(
|
||||
fields.get("name"), fallback=tool_class.__name__
|
||||
),
|
||||
"description": str(
|
||||
_extract_field_default(fields.get("description"))
|
||||
).strip(),
|
||||
"run_params_schema": _extract_run_params_schema(fields.get("args_schema")),
|
||||
"init_params_schema": _extract_init_params_schema(tool_class),
|
||||
"env_vars": _extract_env_vars(fields.get("env_vars")),
|
||||
}
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _unwrap_schema(schema: Mapping[str, Any] | dict[str, Any]) -> dict[str, Any]:
|
||||
"""
|
||||
Unwrap nested schema structures to get to the actual schema definition.
|
||||
"""
|
||||
result: dict[str, Any] = dict(schema)
|
||||
while result.get("type") in {"function-after", "default"} and "schema" in result:
|
||||
result = dict(result["schema"])
|
||||
return result
|
||||
|
||||
|
||||
def _extract_field_default(
|
||||
field: dict[str, Any] | None, fallback: str | list[Any] = ""
|
||||
) -> str | list[Any] | int:
|
||||
"""
|
||||
Extract the default value from a field schema.
|
||||
"""
|
||||
if not field:
|
||||
return fallback
|
||||
|
||||
schema = field.get("schema", {})
|
||||
default = schema.get("default")
|
||||
return default if isinstance(default, (list, str, int)) else fallback
|
||||
|
||||
|
||||
def _get_schema_generator() -> type:
|
||||
"""Get a SchemaGenerator that omits non-serializable defaults."""
|
||||
from pydantic.json_schema import GenerateJsonSchema
|
||||
from pydantic_core import PydanticOmit
|
||||
|
||||
class SchemaGenerator(GenerateJsonSchema):
|
||||
def handle_invalid_for_json_schema(
|
||||
self, schema: Any, error_info: Any
|
||||
) -> dict[str, Any]:
|
||||
raise PydanticOmit
|
||||
|
||||
return SchemaGenerator
|
||||
|
||||
|
||||
def _extract_run_params_schema(args_schema_field: dict[str, Any] | None) -> dict[str, Any]:
|
||||
"""
|
||||
Extract JSON Schema for the tool's run parameters from args_schema field.
|
||||
"""
|
||||
from pydantic import BaseModel
|
||||
|
||||
if not args_schema_field:
|
||||
return {}
|
||||
|
||||
args_schema_class = args_schema_field.get("schema", {}).get("default")
|
||||
if not (inspect.isclass(args_schema_class) and issubclass(args_schema_class, BaseModel)):
|
||||
return {}
|
||||
|
||||
try:
|
||||
return args_schema_class.model_json_schema(schema_generator=_get_schema_generator())
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def _extract_init_params_schema(tool_class: type) -> dict[str, Any]:
|
||||
"""
|
||||
Extract JSON Schema for the tool's __init__ parameters, filtering out base fields.
|
||||
"""
|
||||
ignored_init_params = [
|
||||
"name",
|
||||
"description",
|
||||
"env_vars",
|
||||
"args_schema",
|
||||
"description_updated",
|
||||
"cache_function",
|
||||
"result_as_answer",
|
||||
"max_usage_count",
|
||||
"current_usage_count",
|
||||
"package_dependencies",
|
||||
]
|
||||
|
||||
try:
|
||||
json_schema = tool_class.model_json_schema(
|
||||
schema_generator=_get_schema_generator(), mode="serialization"
|
||||
)
|
||||
|
||||
json_schema["properties"] = {
|
||||
key: value
|
||||
for key, value in json_schema.get("properties", {}).items()
|
||||
if key not in ignored_init_params
|
||||
}
|
||||
return json_schema
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def _extract_env_vars(env_vars_field: dict[str, Any] | None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Extract environment variable definitions from env_vars field.
|
||||
"""
|
||||
from crewai.tools.base_tool import EnvVar
|
||||
|
||||
if not env_vars_field:
|
||||
return []
|
||||
|
||||
return [
|
||||
{
|
||||
"name": env_var.name,
|
||||
"description": env_var.description,
|
||||
"required": env_var.required,
|
||||
"default": env_var.default,
|
||||
}
|
||||
for env_var in env_vars_field.get("schema", {}).get("default", [])
|
||||
if isinstance(env_var, EnvVar)
|
||||
]
|
||||
|
||||
@@ -152,6 +152,7 @@ class TestPlusAPI(unittest.TestCase):
|
||||
"file": encoded_file,
|
||||
"description": description,
|
||||
"available_exports": None,
|
||||
"tools_metadata": None,
|
||||
}
|
||||
mock_make_request.assert_called_once_with(
|
||||
"POST", "/crewai_plus/api/v1/tools", json=params
|
||||
@@ -190,6 +191,7 @@ class TestPlusAPI(unittest.TestCase):
|
||||
"file": encoded_file,
|
||||
"description": description,
|
||||
"available_exports": None,
|
||||
"tools_metadata": None,
|
||||
}
|
||||
|
||||
self.assert_request_with_org_id(
|
||||
@@ -218,6 +220,48 @@ class TestPlusAPI(unittest.TestCase):
|
||||
"file": encoded_file,
|
||||
"description": description,
|
||||
"available_exports": None,
|
||||
"tools_metadata": None,
|
||||
}
|
||||
mock_make_request.assert_called_once_with(
|
||||
"POST", "/crewai_plus/api/v1/tools", json=params
|
||||
)
|
||||
self.assertEqual(response, mock_response)
|
||||
|
||||
@patch("crewai.cli.plus_api.PlusAPI._make_request")
|
||||
def test_publish_tool_with_tools_metadata(self, mock_make_request):
|
||||
mock_response = MagicMock()
|
||||
mock_make_request.return_value = mock_response
|
||||
handle = "test_tool_handle"
|
||||
public = True
|
||||
version = "1.0.0"
|
||||
description = "Test tool description"
|
||||
encoded_file = "encoded_test_file"
|
||||
available_exports = [{"name": "MyTool"}]
|
||||
tools_metadata = [
|
||||
{
|
||||
"name": "MyTool",
|
||||
"humanized_name": "my_tool",
|
||||
"description": "A test tool",
|
||||
"run_params_schema": {"type": "object", "properties": {}},
|
||||
"init_params_schema": {"type": "object", "properties": {}},
|
||||
"env_vars": [{"name": "API_KEY", "description": "API key", "required": True, "default": None}],
|
||||
}
|
||||
]
|
||||
|
||||
response = self.api.publish_tool(
|
||||
handle, public, version, description, encoded_file,
|
||||
available_exports=available_exports,
|
||||
tools_metadata=tools_metadata,
|
||||
)
|
||||
|
||||
params = {
|
||||
"handle": handle,
|
||||
"public": public,
|
||||
"version": version,
|
||||
"file": encoded_file,
|
||||
"description": description,
|
||||
"available_exports": available_exports,
|
||||
"tools_metadata": tools_metadata,
|
||||
}
|
||||
mock_make_request.assert_called_once_with(
|
||||
"POST", "/crewai_plus/api/v1/tools", json=params
|
||||
|
||||
@@ -363,3 +363,261 @@ def test_get_crews_ignores_template_directories(
|
||||
utils.get_crews()
|
||||
|
||||
assert not template_crew_detected
|
||||
|
||||
|
||||
# Tests for extract_tools_metadata
|
||||
|
||||
|
||||
def test_extract_tools_metadata_empty_project(temp_project_dir):
|
||||
"""Test that extract_tools_metadata returns empty list for empty project."""
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert metadata == []
|
||||
|
||||
|
||||
def test_extract_tools_metadata_no_init_file(temp_project_dir):
|
||||
"""Test that extract_tools_metadata returns empty list when no __init__.py exists."""
|
||||
(temp_project_dir / "some_file.py").write_text("print('hello')")
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert metadata == []
|
||||
|
||||
|
||||
def test_extract_tools_metadata_empty_init_file(temp_project_dir):
|
||||
"""Test that extract_tools_metadata returns empty list for empty __init__.py."""
|
||||
create_init_file(temp_project_dir, "")
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert metadata == []
|
||||
|
||||
|
||||
def test_extract_tools_metadata_no_all_variable(temp_project_dir):
|
||||
"""Test that extract_tools_metadata returns empty list when __all__ is not defined."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"from crewai.tools import BaseTool\n\nclass MyTool(BaseTool):\n pass",
|
||||
)
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert metadata == []
|
||||
|
||||
|
||||
def test_extract_tools_metadata_valid_base_tool_class(temp_project_dir):
|
||||
"""Test that extract_tools_metadata extracts metadata from a valid BaseTool class."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
|
||||
class MyTool(BaseTool):
|
||||
name: str = "my_tool"
|
||||
description: str = "A test tool"
|
||||
|
||||
__all__ = ['MyTool']
|
||||
""",
|
||||
)
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert len(metadata) == 1
|
||||
assert metadata[0]["name"] == "MyTool"
|
||||
assert metadata[0]["humanized_name"] == "my_tool"
|
||||
assert metadata[0]["description"] == "A test tool"
|
||||
|
||||
|
||||
def test_extract_tools_metadata_with_args_schema(temp_project_dir):
|
||||
"""Test that extract_tools_metadata extracts run_params_schema from args_schema."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel
|
||||
|
||||
class MyToolInput(BaseModel):
|
||||
query: str
|
||||
limit: int = 10
|
||||
|
||||
class MyTool(BaseTool):
|
||||
name: str = "my_tool"
|
||||
description: str = "A test tool"
|
||||
args_schema: type[BaseModel] = MyToolInput
|
||||
|
||||
__all__ = ['MyTool']
|
||||
""",
|
||||
)
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert len(metadata) == 1
|
||||
assert metadata[0]["name"] == "MyTool"
|
||||
run_params = metadata[0]["run_params_schema"]
|
||||
assert "properties" in run_params
|
||||
assert "query" in run_params["properties"]
|
||||
assert "limit" in run_params["properties"]
|
||||
|
||||
|
||||
def test_extract_tools_metadata_with_env_vars(temp_project_dir):
|
||||
"""Test that extract_tools_metadata extracts env_vars."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
from crewai.tools.base_tool import EnvVar
|
||||
|
||||
class MyTool(BaseTool):
|
||||
name: str = "my_tool"
|
||||
description: str = "A test tool"
|
||||
env_vars: list[EnvVar] = [
|
||||
EnvVar(name="MY_API_KEY", description="API key for service", required=True),
|
||||
EnvVar(name="MY_OPTIONAL_VAR", description="Optional var", required=False, default="default_value"),
|
||||
]
|
||||
|
||||
__all__ = ['MyTool']
|
||||
""",
|
||||
)
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert len(metadata) == 1
|
||||
env_vars = metadata[0]["env_vars"]
|
||||
assert len(env_vars) == 2
|
||||
assert env_vars[0]["name"] == "MY_API_KEY"
|
||||
assert env_vars[0]["description"] == "API key for service"
|
||||
assert env_vars[0]["required"] is True
|
||||
assert env_vars[1]["name"] == "MY_OPTIONAL_VAR"
|
||||
assert env_vars[1]["required"] is False
|
||||
assert env_vars[1]["default"] == "default_value"
|
||||
|
||||
|
||||
def test_extract_tools_metadata_with_custom_init_params(temp_project_dir):
|
||||
"""Test that extract_tools_metadata extracts init_params_schema with custom params."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
|
||||
class MyTool(BaseTool):
|
||||
name: str = "my_tool"
|
||||
description: str = "A test tool"
|
||||
api_endpoint: str = "https://api.example.com"
|
||||
timeout: int = 30
|
||||
|
||||
__all__ = ['MyTool']
|
||||
""",
|
||||
)
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert len(metadata) == 1
|
||||
init_params = metadata[0]["init_params_schema"]
|
||||
assert "properties" in init_params
|
||||
# Custom params should be included
|
||||
assert "api_endpoint" in init_params["properties"]
|
||||
assert "timeout" in init_params["properties"]
|
||||
# Base params should be filtered out
|
||||
assert "name" not in init_params["properties"]
|
||||
assert "description" not in init_params["properties"]
|
||||
|
||||
|
||||
def test_extract_tools_metadata_multiple_tools(temp_project_dir):
|
||||
"""Test that extract_tools_metadata extracts metadata from multiple tools."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
|
||||
class FirstTool(BaseTool):
|
||||
name: str = "first_tool"
|
||||
description: str = "First test tool"
|
||||
|
||||
class SecondTool(BaseTool):
|
||||
name: str = "second_tool"
|
||||
description: str = "Second test tool"
|
||||
|
||||
__all__ = ['FirstTool', 'SecondTool']
|
||||
""",
|
||||
)
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert len(metadata) == 2
|
||||
names = [m["name"] for m in metadata]
|
||||
assert "FirstTool" in names
|
||||
assert "SecondTool" in names
|
||||
|
||||
|
||||
def test_extract_tools_metadata_multiple_init_files(temp_project_dir):
|
||||
"""Test that extract_tools_metadata extracts metadata from multiple __init__.py files."""
|
||||
# Create tool in root __init__.py
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
|
||||
class RootTool(BaseTool):
|
||||
name: str = "root_tool"
|
||||
description: str = "Root tool"
|
||||
|
||||
__all__ = ['RootTool']
|
||||
""",
|
||||
)
|
||||
|
||||
# Create nested package with another tool
|
||||
nested_dir = temp_project_dir / "nested"
|
||||
nested_dir.mkdir()
|
||||
create_init_file(
|
||||
nested_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
|
||||
class NestedTool(BaseTool):
|
||||
name: str = "nested_tool"
|
||||
description: str = "Nested tool"
|
||||
|
||||
__all__ = ['NestedTool']
|
||||
""",
|
||||
)
|
||||
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert len(metadata) == 2
|
||||
names = [m["name"] for m in metadata]
|
||||
assert "RootTool" in names
|
||||
assert "NestedTool" in names
|
||||
|
||||
|
||||
def test_extract_tools_metadata_ignores_non_tool_exports(temp_project_dir):
|
||||
"""Test that extract_tools_metadata ignores non-BaseTool exports."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
|
||||
class MyTool(BaseTool):
|
||||
name: str = "my_tool"
|
||||
description: str = "A test tool"
|
||||
|
||||
def not_a_tool():
|
||||
pass
|
||||
|
||||
SOME_CONSTANT = "value"
|
||||
|
||||
__all__ = ['MyTool', 'not_a_tool', 'SOME_CONSTANT']
|
||||
""",
|
||||
)
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert len(metadata) == 1
|
||||
assert metadata[0]["name"] == "MyTool"
|
||||
|
||||
|
||||
def test_extract_tools_metadata_import_error_returns_empty(temp_project_dir):
|
||||
"""Test that extract_tools_metadata returns empty list on import error."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from nonexistent_module import something
|
||||
|
||||
class MyTool(BaseTool):
|
||||
pass
|
||||
|
||||
__all__ = ['MyTool']
|
||||
""",
|
||||
)
|
||||
# Should not raise, just return empty list
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert metadata == []
|
||||
|
||||
|
||||
def test_extract_tools_metadata_syntax_error_returns_empty(temp_project_dir):
|
||||
"""Test that extract_tools_metadata returns empty list on syntax error."""
|
||||
create_init_file(
|
||||
temp_project_dir,
|
||||
"""from crewai.tools import BaseTool
|
||||
|
||||
class MyTool(BaseTool):
|
||||
# Missing closing parenthesis
|
||||
def __init__(self, name:
|
||||
pass
|
||||
|
||||
__all__ = ['MyTool']
|
||||
""",
|
||||
)
|
||||
# Should not raise, just return empty list
|
||||
metadata = utils.extract_tools_metadata(dir_path=str(temp_project_dir))
|
||||
assert metadata == []
|
||||
|
||||
@@ -185,9 +185,14 @@ def test_publish_when_not_in_sync(mock_is_synced, capsys, tool_command):
|
||||
"crewai.cli.tools.main.extract_available_exports",
|
||||
return_value=[{"name": "SampleTool"}],
|
||||
)
|
||||
@patch(
|
||||
"crewai.cli.tools.main.extract_tools_metadata",
|
||||
return_value=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||
)
|
||||
@patch("crewai.cli.tools.main.ToolCommand._print_current_organization")
|
||||
def test_publish_when_not_in_sync_and_force(
|
||||
mock_print_org,
|
||||
mock_tools_metadata,
|
||||
mock_available_exports,
|
||||
mock_is_synced,
|
||||
mock_publish,
|
||||
@@ -222,6 +227,7 @@ def test_publish_when_not_in_sync_and_force(
|
||||
description="A sample tool",
|
||||
encoded_file=unittest.mock.ANY,
|
||||
available_exports=[{"name": "SampleTool"}],
|
||||
tools_metadata=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||
)
|
||||
mock_print_org.assert_called_once()
|
||||
|
||||
@@ -242,7 +248,12 @@ def test_publish_when_not_in_sync_and_force(
|
||||
"crewai.cli.tools.main.extract_available_exports",
|
||||
return_value=[{"name": "SampleTool"}],
|
||||
)
|
||||
@patch(
|
||||
"crewai.cli.tools.main.extract_tools_metadata",
|
||||
return_value=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||
)
|
||||
def test_publish_success(
|
||||
mock_tools_metadata,
|
||||
mock_available_exports,
|
||||
mock_is_synced,
|
||||
mock_publish,
|
||||
@@ -277,6 +288,7 @@ def test_publish_success(
|
||||
description="A sample tool",
|
||||
encoded_file=unittest.mock.ANY,
|
||||
available_exports=[{"name": "SampleTool"}],
|
||||
tools_metadata=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||
)
|
||||
|
||||
|
||||
@@ -295,7 +307,12 @@ def test_publish_success(
|
||||
"crewai.cli.tools.main.extract_available_exports",
|
||||
return_value=[{"name": "SampleTool"}],
|
||||
)
|
||||
@patch(
|
||||
"crewai.cli.tools.main.extract_tools_metadata",
|
||||
return_value=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||
)
|
||||
def test_publish_failure(
|
||||
mock_tools_metadata,
|
||||
mock_available_exports,
|
||||
mock_publish,
|
||||
mock_open,
|
||||
@@ -336,7 +353,12 @@ def test_publish_failure(
|
||||
"crewai.cli.tools.main.extract_available_exports",
|
||||
return_value=[{"name": "SampleTool"}],
|
||||
)
|
||||
@patch(
|
||||
"crewai.cli.tools.main.extract_tools_metadata",
|
||||
return_value=[{"name": "SampleTool", "humanized_name": "sample_tool", "description": "A sample tool", "run_params_schema": {}, "init_params_schema": {}, "env_vars": []}],
|
||||
)
|
||||
def test_publish_api_error(
|
||||
mock_tools_metadata,
|
||||
mock_available_exports,
|
||||
mock_publish,
|
||||
mock_open,
|
||||
@@ -362,6 +384,35 @@ def test_publish_api_error(
|
||||
mock_publish.assert_called_once()
|
||||
|
||||
|
||||
@patch("crewai.cli.tools.main.get_project_name", return_value="sample-tool")
|
||||
@patch("crewai.cli.tools.main.get_project_version", return_value="1.0.0")
|
||||
@patch("crewai.cli.tools.main.get_project_description", return_value="A sample tool")
|
||||
@patch("crewai.cli.tools.main.git.Repository.is_synced", return_value=True)
|
||||
@patch(
|
||||
"crewai.cli.tools.main.extract_available_exports",
|
||||
return_value=[{"name": "SampleTool"}],
|
||||
)
|
||||
@patch(
|
||||
"crewai.cli.tools.main.extract_tools_metadata",
|
||||
side_effect=Exception("Failed to extract metadata"),
|
||||
)
|
||||
def test_publish_metadata_extraction_failure(
|
||||
mock_tools_metadata,
|
||||
mock_available_exports,
|
||||
mock_is_synced,
|
||||
mock_get_project_description,
|
||||
mock_get_project_version,
|
||||
mock_get_project_name,
|
||||
capsys,
|
||||
tool_command,
|
||||
):
|
||||
with raises(SystemExit):
|
||||
tool_command.publish(is_public=True)
|
||||
output = capsys.readouterr().out
|
||||
assert "Failed to extract tool metadata" in output
|
||||
assert "Inherit from BaseTool" in output
|
||||
|
||||
|
||||
@patch("crewai.cli.tools.main.Settings")
|
||||
def test_print_current_organization_with_org(mock_settings, capsys, tool_command):
|
||||
mock_settings_instance = MagicMock()
|
||||
|
||||
Reference in New Issue
Block a user