mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-05-13 13:08:14 +00:00
Compare commits
4 Commits
main
...
feat/open-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff2fd74ee2 | ||
|
|
812468e1b9 | ||
|
|
0f78d824e9 | ||
|
|
9c981e175b |
@@ -142,6 +142,9 @@ contextual = [
|
||||
daytona = [
|
||||
"daytona~=0.140.0",
|
||||
]
|
||||
opensandbox = [
|
||||
"opensandbox>=0.1.8",
|
||||
]
|
||||
|
||||
e2b = [
|
||||
"e2b~=2.20.0",
|
||||
|
||||
@@ -119,6 +119,11 @@ from crewai_tools.tools.multion_tool.multion_tool import MultiOnTool
|
||||
from crewai_tools.tools.mysql_search_tool.mysql_search_tool import MySQLSearchTool
|
||||
from crewai_tools.tools.nl2sql.nl2sql_tool import NL2SQLTool
|
||||
from crewai_tools.tools.ocr_tool.ocr_tool import OCRTool
|
||||
from crewai_tools.tools.open_sandbox_tool import (
|
||||
OpenSandboxBaseTool,
|
||||
OpenSandboxExecTool,
|
||||
OpenSandboxFileTool,
|
||||
)
|
||||
from crewai_tools.tools.oxylabs_amazon_product_scraper_tool.oxylabs_amazon_product_scraper_tool import (
|
||||
OxylabsAmazonProductScraperTool,
|
||||
)
|
||||
@@ -282,6 +287,9 @@ __all__ = [
|
||||
"MySQLSearchTool",
|
||||
"NL2SQLTool",
|
||||
"OCRTool",
|
||||
"OpenSandboxBaseTool",
|
||||
"OpenSandboxExecTool",
|
||||
"OpenSandboxFileTool",
|
||||
"OxylabsAmazonProductScraperTool",
|
||||
"OxylabsAmazonSearchScraperTool",
|
||||
"OxylabsGoogleSearchScraperTool",
|
||||
|
||||
@@ -109,6 +109,11 @@ from crewai_tools.tools.multion_tool.multion_tool import MultiOnTool
|
||||
from crewai_tools.tools.mysql_search_tool.mysql_search_tool import MySQLSearchTool
|
||||
from crewai_tools.tools.nl2sql.nl2sql_tool import NL2SQLTool
|
||||
from crewai_tools.tools.ocr_tool.ocr_tool import OCRTool
|
||||
from crewai_tools.tools.open_sandbox_tool import (
|
||||
OpenSandboxBaseTool,
|
||||
OpenSandboxExecTool,
|
||||
OpenSandboxFileTool,
|
||||
)
|
||||
from crewai_tools.tools.oxylabs_amazon_product_scraper_tool.oxylabs_amazon_product_scraper_tool import (
|
||||
OxylabsAmazonProductScraperTool,
|
||||
)
|
||||
@@ -266,6 +271,9 @@ __all__ = [
|
||||
"MySQLSearchTool",
|
||||
"NL2SQLTool",
|
||||
"OCRTool",
|
||||
"OpenSandboxBaseTool",
|
||||
"OpenSandboxExecTool",
|
||||
"OpenSandboxFileTool",
|
||||
"OxylabsAmazonProductScraperTool",
|
||||
"OxylabsAmazonSearchScraperTool",
|
||||
"OxylabsGoogleSearchScraperTool",
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
from crewai_tools.tools.open_sandbox_tool.open_sandbox_base_tool import (
|
||||
OpenSandboxBaseTool,
|
||||
)
|
||||
from crewai_tools.tools.open_sandbox_tool.open_sandbox_exec_tool import (
|
||||
OpenSandboxExecTool,
|
||||
)
|
||||
from crewai_tools.tools.open_sandbox_tool.open_sandbox_file_tool import (
|
||||
OpenSandboxFileTool,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"OpenSandboxBaseTool",
|
||||
"OpenSandboxExecTool",
|
||||
"OpenSandboxFileTool",
|
||||
]
|
||||
@@ -0,0 +1,229 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from crewai.tools import BaseTool, EnvVar
|
||||
from pydantic import ConfigDict, Field, PrivateAttr
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OpenSandboxBaseTool(BaseTool):
|
||||
"""Shared base for tools that act on an Open Sandbox sandbox.
|
||||
|
||||
Lifecycle modes:
|
||||
- persistent=False (default): create a fresh sandbox per `_run` call and
|
||||
kill it when the call returns. Safer and stateless — nothing leaks if
|
||||
the agent forgets cleanup.
|
||||
- persistent=True: lazily create a single sandbox on first use, cache it
|
||||
on the instance, and register an atexit hook to kill it at process
|
||||
exit. Cheaper across many calls and lets files/state carry over.
|
||||
- sandbox_id=<existing>: attach to a sandbox the caller already owns.
|
||||
Never killed by the tool.
|
||||
"""
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
package_dependencies: list[str] = Field(default_factory=lambda: ["opensandbox"])
|
||||
|
||||
api_key: str | None = Field(
|
||||
default_factory=lambda: os.getenv("OPEN_SANDBOX_API_KEY"),
|
||||
description="Open Sandbox API key. Falls back to OPEN_SANDBOX_API_KEY env var.",
|
||||
json_schema_extra={"required": False},
|
||||
)
|
||||
domain: str | None = Field(
|
||||
default_factory=lambda: os.getenv("OPEN_SANDBOX_DOMAIN"),
|
||||
description=(
|
||||
"Open Sandbox management API domain (e.g. 'api.opensandbox.io'). "
|
||||
"Falls back to OPEN_SANDBOX_DOMAIN env var."
|
||||
),
|
||||
json_schema_extra={"required": False},
|
||||
)
|
||||
protocol: str | None = Field(
|
||||
default=None,
|
||||
description="Protocol for the management API ('http' or 'https').",
|
||||
json_schema_extra={"required": False},
|
||||
)
|
||||
|
||||
persistent: bool = Field(
|
||||
default=False,
|
||||
description=(
|
||||
"If True, reuse one sandbox across all calls to this tool instance "
|
||||
"and kill it at process exit. Default False creates and kills a "
|
||||
"fresh sandbox per call."
|
||||
),
|
||||
)
|
||||
sandbox_id: str | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Attach to an existing sandbox by id instead of creating a new one. "
|
||||
"The tool will never kill a sandbox it did not create."
|
||||
),
|
||||
)
|
||||
create_params: dict[str, Any] | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Optional kwargs forwarded to SandboxSync.create when creating a "
|
||||
"sandbox (e.g. image, env, resource, metadata, entrypoint)."
|
||||
),
|
||||
)
|
||||
sandbox_timeout: float = Field(
|
||||
default=60.0,
|
||||
description=(
|
||||
"Timeout in seconds to wait for sandbox readiness on create/connect."
|
||||
),
|
||||
)
|
||||
|
||||
env_vars: list[EnvVar] = Field(
|
||||
default_factory=lambda: [
|
||||
EnvVar(
|
||||
name="OPEN_SANDBOX_API_KEY",
|
||||
description="API key for Open Sandbox service",
|
||||
required=False,
|
||||
),
|
||||
EnvVar(
|
||||
name="OPEN_SANDBOX_DOMAIN",
|
||||
description="Open Sandbox management API domain (optional)",
|
||||
required=False,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
_client: Any | None = PrivateAttr(default=None)
|
||||
_persistent_sandbox: Any | None = PrivateAttr(default=None)
|
||||
_lock: threading.Lock = PrivateAttr(default_factory=threading.Lock)
|
||||
_cleanup_registered: bool = PrivateAttr(default=False)
|
||||
|
||||
_sdk_cache: ClassVar[dict[str, Any]] = {}
|
||||
|
||||
@classmethod
|
||||
def _import_sdk(cls) -> dict[str, Any]:
|
||||
if cls._sdk_cache:
|
||||
return cls._sdk_cache
|
||||
try:
|
||||
from opensandbox.config.connection_sync import ConnectionConfigSync
|
||||
from opensandbox.models.execd import RunCommandOpts
|
||||
from opensandbox.models.filesystem import SearchEntry, WriteEntry
|
||||
from opensandbox.sync.sandbox import SandboxSync
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
"The 'opensandbox' package is required for Open Sandbox tools. "
|
||||
"Install it with: uv add opensandbox (or) pip install opensandbox"
|
||||
) from exc
|
||||
cls._sdk_cache = {
|
||||
"SandboxSync": SandboxSync,
|
||||
"ConnectionConfigSync": ConnectionConfigSync,
|
||||
"RunCommandOpts": RunCommandOpts,
|
||||
"WriteEntry": WriteEntry,
|
||||
"SearchEntry": SearchEntry,
|
||||
}
|
||||
return cls._sdk_cache
|
||||
|
||||
def _get_client(self) -> Any:
|
||||
"""Return a cached ConnectionConfigSync built from this tool's fields.
|
||||
|
||||
Open Sandbox has no separate "client" object — connection settings are
|
||||
carried by ConnectionConfigSync and passed into SandboxSync.create /
|
||||
SandboxSync.connect. We cache one config per tool instance.
|
||||
"""
|
||||
if self._client is not None:
|
||||
return self._client
|
||||
sdk = self._import_sdk()
|
||||
config_kwargs: dict[str, Any] = {}
|
||||
if self.api_key:
|
||||
config_kwargs["api_key"] = self.api_key
|
||||
if self.domain:
|
||||
config_kwargs["domain"] = self.domain
|
||||
if self.protocol:
|
||||
config_kwargs["protocol"] = self.protocol
|
||||
self._client = sdk["ConnectionConfigSync"](**config_kwargs)
|
||||
return self._client
|
||||
|
||||
def _build_create_kwargs(self) -> dict[str, Any]:
|
||||
return dict(self.create_params) if self.create_params else {}
|
||||
|
||||
def _acquire_sandbox(self) -> tuple[Any, bool]:
|
||||
"""Return (sandbox, should_kill_after_use)."""
|
||||
sdk = self._import_sdk()
|
||||
config = self._get_client()
|
||||
|
||||
if self.sandbox_id:
|
||||
sandbox = sdk["SandboxSync"].connect(
|
||||
self.sandbox_id,
|
||||
connection_config=config,
|
||||
connect_timeout=_seconds_to_timedelta(self.sandbox_timeout),
|
||||
)
|
||||
return sandbox, False
|
||||
|
||||
if self.persistent:
|
||||
with self._lock:
|
||||
if self._persistent_sandbox is None:
|
||||
self._persistent_sandbox = sdk["SandboxSync"].create(
|
||||
connection_config=config,
|
||||
ready_timeout=_seconds_to_timedelta(self.sandbox_timeout),
|
||||
**self._build_create_kwargs(),
|
||||
)
|
||||
if not self._cleanup_registered:
|
||||
atexit.register(self.close)
|
||||
self._cleanup_registered = True
|
||||
return self._persistent_sandbox, False
|
||||
|
||||
sandbox = sdk["SandboxSync"].create(
|
||||
connection_config=config,
|
||||
ready_timeout=_seconds_to_timedelta(self.sandbox_timeout),
|
||||
**self._build_create_kwargs(),
|
||||
)
|
||||
return sandbox, True
|
||||
|
||||
def _release_sandbox(self, sandbox: Any, should_kill: bool) -> None:
|
||||
if not should_kill:
|
||||
return
|
||||
try:
|
||||
sandbox.kill()
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"Best-effort sandbox kill failed after ephemeral use; "
|
||||
"the sandbox may need manual termination.",
|
||||
exc_info=True,
|
||||
)
|
||||
try:
|
||||
sandbox.close()
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"Best-effort sandbox local-resource close failed after ephemeral use.",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Kill the cached persistent sandbox if one exists."""
|
||||
with self._lock:
|
||||
sandbox = self._persistent_sandbox
|
||||
self._persistent_sandbox = None
|
||||
if sandbox is None:
|
||||
return
|
||||
try:
|
||||
sandbox.kill()
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"Best-effort persistent sandbox kill failed at close(); "
|
||||
"the sandbox may need manual termination.",
|
||||
exc_info=True,
|
||||
)
|
||||
try:
|
||||
sandbox.close()
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"Best-effort persistent sandbox local-resource close failed at close().",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
|
||||
def _seconds_to_timedelta(seconds: float) -> Any:
|
||||
from datetime import timedelta
|
||||
|
||||
return timedelta(seconds=seconds)
|
||||
@@ -0,0 +1,102 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from builtins import type as type_
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.open_sandbox_tool.open_sandbox_base_tool import (
|
||||
OpenSandboxBaseTool,
|
||||
)
|
||||
|
||||
|
||||
class OpenSandboxExecToolSchema(BaseModel):
|
||||
command: str = Field(..., description="Shell command to execute in the sandbox.")
|
||||
cwd: str | None = Field(
|
||||
default=None,
|
||||
description="Working directory to run the command in. Defaults to the sandbox work dir.",
|
||||
)
|
||||
env: dict[str, str] | None = Field(
|
||||
default=None,
|
||||
description="Optional environment variables to set for this command.",
|
||||
)
|
||||
timeout: int | None = Field(
|
||||
default=None,
|
||||
description="Maximum seconds to wait for the command to finish.",
|
||||
)
|
||||
|
||||
|
||||
class OpenSandboxExecTool(OpenSandboxBaseTool):
|
||||
"""Run a shell command inside an Open Sandbox sandbox."""
|
||||
|
||||
name: str = "Open Sandbox Exec"
|
||||
description: str = (
|
||||
"Execute a shell command inside an Open Sandbox sandbox and return the "
|
||||
"exit code and combined output. Use this to run builds, package installs, "
|
||||
"git operations, or any one-off shell command."
|
||||
)
|
||||
args_schema: type_[BaseModel] = OpenSandboxExecToolSchema
|
||||
|
||||
def _run(
|
||||
self,
|
||||
command: str,
|
||||
cwd: str | None = None,
|
||||
env: dict[str, str] | None = None,
|
||||
timeout: int | None = None,
|
||||
) -> Any:
|
||||
sdk = self._import_sdk()
|
||||
sandbox, should_kill = self._acquire_sandbox()
|
||||
try:
|
||||
opts = self._build_run_opts(sdk, cwd=cwd, env=env, timeout=timeout)
|
||||
execution = sandbox.commands.run(command, opts=opts)
|
||||
return {
|
||||
"exit_code": getattr(execution, "exit_code", None),
|
||||
"result": getattr(execution, "text", None),
|
||||
"artifacts": _collect_artifacts(execution),
|
||||
}
|
||||
finally:
|
||||
self._release_sandbox(sandbox, should_kill)
|
||||
|
||||
@staticmethod
|
||||
def _build_run_opts(
|
||||
sdk: dict[str, Any],
|
||||
*,
|
||||
cwd: str | None,
|
||||
env: dict[str, str] | None,
|
||||
timeout: int | None,
|
||||
) -> Any | None:
|
||||
if cwd is None and env is None and timeout is None:
|
||||
return None
|
||||
kwargs: dict[str, Any] = {}
|
||||
if cwd is not None:
|
||||
kwargs["working_directory"] = cwd
|
||||
if env is not None:
|
||||
kwargs["envs"] = env
|
||||
if timeout is not None:
|
||||
kwargs["timeout"] = timedelta(seconds=timeout)
|
||||
return sdk["RunCommandOpts"](**kwargs)
|
||||
|
||||
|
||||
def _collect_artifacts(execution: Any) -> dict[str, Any] | None:
|
||||
logs = getattr(execution, "logs", None)
|
||||
stderr_msgs = getattr(logs, "stderr", None) if logs is not None else None
|
||||
results = getattr(execution, "result", None)
|
||||
error = getattr(execution, "error", None)
|
||||
if not stderr_msgs and not results and error is None:
|
||||
return None
|
||||
return {
|
||||
"stderr": [getattr(m, "text", str(m)) for m in stderr_msgs or []],
|
||||
"results": [getattr(r, "text", None) for r in results or []],
|
||||
"error": _serialize_error(error),
|
||||
}
|
||||
|
||||
|
||||
def _serialize_error(error: Any) -> dict[str, Any] | None:
|
||||
if error is None:
|
||||
return None
|
||||
return {
|
||||
"name": getattr(error, "name", None),
|
||||
"value": getattr(error, "value", None),
|
||||
"traceback": getattr(error, "traceback", None),
|
||||
}
|
||||
@@ -0,0 +1,228 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from builtins import type as type_
|
||||
import logging
|
||||
import posixpath
|
||||
from typing import Any, Literal
|
||||
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
|
||||
from crewai_tools.tools.open_sandbox_tool.open_sandbox_base_tool import (
|
||||
OpenSandboxBaseTool,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
FileAction = Literal["read", "write", "append", "list", "delete", "mkdir", "info"]
|
||||
|
||||
|
||||
class OpenSandboxFileToolSchema(BaseModel):
|
||||
action: FileAction = Field(
|
||||
...,
|
||||
description=(
|
||||
"The filesystem action to perform: 'read' (returns file contents), "
|
||||
"'write' (create or replace a file with content), 'append' (append "
|
||||
"content to an existing file — use this for writing large files in "
|
||||
"chunks to avoid hitting tool-call size limits), 'list' (lists a "
|
||||
"directory), 'delete' (removes a file/dir), 'mkdir' (creates a "
|
||||
"directory), 'info' (returns file metadata)."
|
||||
),
|
||||
)
|
||||
path: str = Field(..., description="Absolute path inside the sandbox.")
|
||||
content: str | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Content to write or append. If omitted for 'write', an empty file "
|
||||
"is created. For files larger than a few KB, prefer one 'write' "
|
||||
"with empty content followed by multiple 'append' calls of ~4KB "
|
||||
"each to stay within tool-call payload limits."
|
||||
),
|
||||
)
|
||||
binary: bool = Field(
|
||||
default=False,
|
||||
description=(
|
||||
"For 'write': treat content as base64 and upload raw bytes. "
|
||||
"For 'read': return contents as base64 instead of decoded utf-8."
|
||||
),
|
||||
)
|
||||
recursive: bool = Field(
|
||||
default=False,
|
||||
description="For action='delete': remove a directory recursively.",
|
||||
)
|
||||
mode: int = Field(
|
||||
default=755,
|
||||
description="For action='mkdir': Unix file mode as an integer (default 755).",
|
||||
)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _validate_action_args(self) -> OpenSandboxFileToolSchema:
|
||||
if self.action == "append" and self.content is None:
|
||||
raise ValueError(
|
||||
"action='append' requires 'content'. Pass the chunk to append "
|
||||
"in the 'content' field."
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
class OpenSandboxFileTool(OpenSandboxBaseTool):
|
||||
"""Read, write, and manage files inside an Open Sandbox sandbox.
|
||||
|
||||
Notes:
|
||||
- Most useful with `persistent=True` or an explicit `sandbox_id`. With the
|
||||
default ephemeral mode, files disappear when this tool call finishes.
|
||||
"""
|
||||
|
||||
name: str = "Open Sandbox File"
|
||||
description: str = (
|
||||
"Perform filesystem operations inside an Open Sandbox sandbox: read a "
|
||||
"file, write content to a path, append content to an existing file, "
|
||||
"list a directory, delete a path, make a directory, or fetch file "
|
||||
"metadata. For files larger than a few KB, create the file with "
|
||||
"action='write' and empty content, then send the body via multiple "
|
||||
"'append' calls of ~4KB each to stay within tool-call payload limits."
|
||||
)
|
||||
args_schema: type_[BaseModel] = OpenSandboxFileToolSchema
|
||||
|
||||
def _run(
|
||||
self,
|
||||
action: FileAction,
|
||||
path: str,
|
||||
content: str | None = None,
|
||||
binary: bool = False,
|
||||
recursive: bool = False,
|
||||
mode: int = 755,
|
||||
) -> Any:
|
||||
sandbox, should_kill = self._acquire_sandbox()
|
||||
try:
|
||||
if action == "read":
|
||||
return self._read(sandbox, path, binary=binary)
|
||||
if action == "write":
|
||||
return self._write(sandbox, path, content or "", binary=binary)
|
||||
if action == "append":
|
||||
return self._append(sandbox, path, content or "", binary=binary)
|
||||
if action == "list":
|
||||
return self._list(sandbox, path)
|
||||
if action == "delete":
|
||||
return self._delete(sandbox, path, recursive=recursive)
|
||||
if action == "mkdir":
|
||||
return self._mkdir(sandbox, path, mode=mode)
|
||||
if action == "info":
|
||||
return self._info(sandbox, path)
|
||||
raise ValueError(f"Unknown action: {action}")
|
||||
finally:
|
||||
self._release_sandbox(sandbox, should_kill)
|
||||
|
||||
def _read(self, sandbox: Any, path: str, *, binary: bool) -> dict[str, Any]:
|
||||
if binary:
|
||||
data: bytes = sandbox.files.read_bytes(path)
|
||||
return {
|
||||
"path": path,
|
||||
"encoding": "base64",
|
||||
"content": base64.b64encode(data).decode("ascii"),
|
||||
}
|
||||
try:
|
||||
text: str = sandbox.files.read_file(path)
|
||||
return {"path": path, "encoding": "utf-8", "content": text}
|
||||
except UnicodeDecodeError:
|
||||
data = sandbox.files.read_bytes(path)
|
||||
return {
|
||||
"path": path,
|
||||
"encoding": "base64",
|
||||
"content": base64.b64encode(data).decode("ascii"),
|
||||
"note": "File was not valid utf-8; returned as base64.",
|
||||
}
|
||||
|
||||
def _write(
|
||||
self, sandbox: Any, path: str, content: str, *, binary: bool
|
||||
) -> dict[str, Any]:
|
||||
payload = base64.b64decode(content) if binary else content.encode("utf-8")
|
||||
self._ensure_parent_dir(sandbox, path)
|
||||
sandbox.files.write_file(path, payload)
|
||||
return {"status": "written", "path": path, "bytes": len(payload)}
|
||||
|
||||
def _append(
|
||||
self, sandbox: Any, path: str, content: str, *, binary: bool
|
||||
) -> dict[str, Any]:
|
||||
chunk = base64.b64decode(content) if binary else content.encode("utf-8")
|
||||
self._ensure_parent_dir(sandbox, path)
|
||||
try:
|
||||
existing: bytes = sandbox.files.read_bytes(path)
|
||||
except Exception:
|
||||
existing = b""
|
||||
payload = existing + chunk
|
||||
sandbox.files.write_file(path, payload)
|
||||
return {
|
||||
"status": "appended",
|
||||
"path": path,
|
||||
"appended_bytes": len(chunk),
|
||||
"total_bytes": len(payload),
|
||||
}
|
||||
|
||||
def _ensure_parent_dir(self, sandbox: Any, path: str) -> None:
|
||||
"""Make sure the parent directory of `path` exists.
|
||||
|
||||
Best-effort mkdir of the parent; any error (e.g. already exists) is
|
||||
swallowed because create_directories may not be idempotent.
|
||||
"""
|
||||
parent = posixpath.dirname(path)
|
||||
if not parent or parent in ("/", "."):
|
||||
return
|
||||
sdk = self._import_sdk()
|
||||
try:
|
||||
sandbox.files.create_directories([sdk["WriteEntry"](path=parent, mode=755)])
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"Best-effort parent-directory create failed for %s; "
|
||||
"assuming it already exists and proceeding with the write.",
|
||||
parent,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
def _mkdir(self, sandbox: Any, path: str, *, mode: int) -> dict[str, Any]:
|
||||
sdk = self._import_sdk()
|
||||
sandbox.files.create_directories([sdk["WriteEntry"](path=path, mode=mode)])
|
||||
return {"status": "created", "path": path, "mode": mode}
|
||||
|
||||
def _delete(self, sandbox: Any, path: str, *, recursive: bool) -> dict[str, Any]:
|
||||
if recursive:
|
||||
sandbox.files.delete_directories([path])
|
||||
else:
|
||||
sandbox.files.delete_files([path])
|
||||
return {"status": "deleted", "path": path}
|
||||
|
||||
def _list(self, sandbox: Any, path: str) -> dict[str, Any]:
|
||||
sdk = self._import_sdk()
|
||||
entries = sandbox.files.search(sdk["SearchEntry"](path=path, pattern="*"))
|
||||
return {
|
||||
"path": path,
|
||||
"entries": [self._entry_info_to_dict(entry) for entry in entries],
|
||||
}
|
||||
|
||||
def _info(self, sandbox: Any, path: str) -> dict[str, Any]:
|
||||
info_map = sandbox.files.get_file_info([path])
|
||||
info = info_map.get(path) if hasattr(info_map, "get") else None
|
||||
if info is None:
|
||||
return {"path": path, "found": False}
|
||||
return self._entry_info_to_dict(info)
|
||||
|
||||
@staticmethod
|
||||
def _entry_info_to_dict(info: Any) -> dict[str, Any]:
|
||||
fields = (
|
||||
"path",
|
||||
"mode",
|
||||
"owner",
|
||||
"group",
|
||||
"size",
|
||||
"modified_at",
|
||||
"created_at",
|
||||
)
|
||||
out: dict[str, Any] = {}
|
||||
for field in fields:
|
||||
value = getattr(info, field, None)
|
||||
if hasattr(value, "isoformat"):
|
||||
value = value.isoformat()
|
||||
out[field] = value
|
||||
return out
|
||||
@@ -16318,6 +16318,565 @@
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "",
|
||||
"env_vars": [
|
||||
{
|
||||
"default": null,
|
||||
"description": "API key for Open Sandbox service",
|
||||
"name": "OPEN_SANDBOX_API_KEY",
|
||||
"required": false
|
||||
},
|
||||
{
|
||||
"default": null,
|
||||
"description": "Open Sandbox management API domain (optional)",
|
||||
"name": "OPEN_SANDBOX_DOMAIN",
|
||||
"required": false
|
||||
}
|
||||
],
|
||||
"humanized_name": "OpenSandboxBaseTool",
|
||||
"init_params_schema": {
|
||||
"$defs": {
|
||||
"EnvVar": {
|
||||
"properties": {
|
||||
"default": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Default"
|
||||
},
|
||||
"description": {
|
||||
"title": "Description",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"title": "Name",
|
||||
"type": "string"
|
||||
},
|
||||
"required": {
|
||||
"default": true,
|
||||
"title": "Required",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"title": "EnvVar",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"description": "Shared base for tools that act on an Open Sandbox sandbox.\n\nLifecycle modes:\n - persistent=False (default): create a fresh sandbox per `_run` call and\n kill it when the call returns. Safer and stateless \u2014 nothing leaks if\n the agent forgets cleanup.\n - persistent=True: lazily create a single sandbox on first use, cache it\n on the instance, and register an atexit hook to kill it at process\n exit. Cheaper across many calls and lets files/state carry over.\n - sandbox_id=<existing>: attach to a sandbox the caller already owns.\n Never killed by the tool.",
|
||||
"properties": {
|
||||
"api_key": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Open Sandbox API key. Falls back to OPEN_SANDBOX_API_KEY env var.",
|
||||
"required": false,
|
||||
"title": "Api Key"
|
||||
},
|
||||
"create_params": {
|
||||
"anyOf": [
|
||||
{
|
||||
"additionalProperties": true,
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Optional kwargs forwarded to SandboxSync.create when creating a sandbox (e.g. image, env, resource, metadata, entrypoint).",
|
||||
"title": "Create Params"
|
||||
},
|
||||
"domain": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Open Sandbox management API domain (e.g. 'api.opensandbox.io'). Falls back to OPEN_SANDBOX_DOMAIN env var.",
|
||||
"required": false,
|
||||
"title": "Domain"
|
||||
},
|
||||
"persistent": {
|
||||
"default": false,
|
||||
"description": "If True, reuse one sandbox across all calls to this tool instance and kill it at process exit. Default False creates and kills a fresh sandbox per call.",
|
||||
"title": "Persistent",
|
||||
"type": "boolean"
|
||||
},
|
||||
"protocol": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Protocol for the management API ('http' or 'https').",
|
||||
"required": false,
|
||||
"title": "Protocol"
|
||||
},
|
||||
"sandbox_id": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Attach to an existing sandbox by id instead of creating a new one. The tool will never kill a sandbox it did not create.",
|
||||
"title": "Sandbox Id"
|
||||
},
|
||||
"sandbox_timeout": {
|
||||
"default": 60.0,
|
||||
"description": "Timeout in seconds to wait for sandbox readiness on create/connect.",
|
||||
"title": "Sandbox Timeout",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [],
|
||||
"title": "OpenSandboxBaseTool",
|
||||
"type": "object"
|
||||
},
|
||||
"name": "OpenSandboxBaseTool",
|
||||
"package_dependencies": [
|
||||
"opensandbox"
|
||||
],
|
||||
"run_params_schema": {
|
||||
"properties": {},
|
||||
"title": "_ArgsSchemaPlaceholder",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Execute a shell command inside an Open Sandbox sandbox and return the exit code and combined output. Use this to run builds, package installs, git operations, or any one-off shell command.",
|
||||
"env_vars": [
|
||||
{
|
||||
"default": null,
|
||||
"description": "API key for Open Sandbox service",
|
||||
"name": "OPEN_SANDBOX_API_KEY",
|
||||
"required": false
|
||||
},
|
||||
{
|
||||
"default": null,
|
||||
"description": "Open Sandbox management API domain (optional)",
|
||||
"name": "OPEN_SANDBOX_DOMAIN",
|
||||
"required": false
|
||||
}
|
||||
],
|
||||
"humanized_name": "Open Sandbox Exec",
|
||||
"init_params_schema": {
|
||||
"$defs": {
|
||||
"EnvVar": {
|
||||
"properties": {
|
||||
"default": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Default"
|
||||
},
|
||||
"description": {
|
||||
"title": "Description",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"title": "Name",
|
||||
"type": "string"
|
||||
},
|
||||
"required": {
|
||||
"default": true,
|
||||
"title": "Required",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"title": "EnvVar",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"description": "Run a shell command inside an Open Sandbox sandbox.",
|
||||
"properties": {
|
||||
"api_key": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Open Sandbox API key. Falls back to OPEN_SANDBOX_API_KEY env var.",
|
||||
"required": false,
|
||||
"title": "Api Key"
|
||||
},
|
||||
"create_params": {
|
||||
"anyOf": [
|
||||
{
|
||||
"additionalProperties": true,
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Optional kwargs forwarded to SandboxSync.create when creating a sandbox (e.g. image, env, resource, metadata, entrypoint).",
|
||||
"title": "Create Params"
|
||||
},
|
||||
"domain": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Open Sandbox management API domain (e.g. 'api.opensandbox.io'). Falls back to OPEN_SANDBOX_DOMAIN env var.",
|
||||
"required": false,
|
||||
"title": "Domain"
|
||||
},
|
||||
"persistent": {
|
||||
"default": false,
|
||||
"description": "If True, reuse one sandbox across all calls to this tool instance and kill it at process exit. Default False creates and kills a fresh sandbox per call.",
|
||||
"title": "Persistent",
|
||||
"type": "boolean"
|
||||
},
|
||||
"protocol": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Protocol for the management API ('http' or 'https').",
|
||||
"required": false,
|
||||
"title": "Protocol"
|
||||
},
|
||||
"sandbox_id": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Attach to an existing sandbox by id instead of creating a new one. The tool will never kill a sandbox it did not create.",
|
||||
"title": "Sandbox Id"
|
||||
},
|
||||
"sandbox_timeout": {
|
||||
"default": 60.0,
|
||||
"description": "Timeout in seconds to wait for sandbox readiness on create/connect.",
|
||||
"title": "Sandbox Timeout",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [],
|
||||
"title": "OpenSandboxExecTool",
|
||||
"type": "object"
|
||||
},
|
||||
"name": "OpenSandboxExecTool",
|
||||
"package_dependencies": [
|
||||
"opensandbox"
|
||||
],
|
||||
"run_params_schema": {
|
||||
"properties": {
|
||||
"command": {
|
||||
"description": "Shell command to execute in the sandbox.",
|
||||
"title": "Command",
|
||||
"type": "string"
|
||||
},
|
||||
"cwd": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Working directory to run the command in. Defaults to the sandbox work dir.",
|
||||
"title": "Cwd"
|
||||
},
|
||||
"env": {
|
||||
"anyOf": [
|
||||
{
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Optional environment variables to set for this command.",
|
||||
"title": "Env"
|
||||
},
|
||||
"timeout": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "integer"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Maximum seconds to wait for the command to finish.",
|
||||
"title": "Timeout"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command"
|
||||
],
|
||||
"title": "OpenSandboxExecToolSchema",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Perform filesystem operations inside an Open Sandbox sandbox: read a file, write content to a path, append content to an existing file, list a directory, delete a path, make a directory, or fetch file metadata. For files larger than a few KB, create the file with action='write' and empty content, then send the body via multiple 'append' calls of ~4KB each to stay within tool-call payload limits.",
|
||||
"env_vars": [
|
||||
{
|
||||
"default": null,
|
||||
"description": "API key for Open Sandbox service",
|
||||
"name": "OPEN_SANDBOX_API_KEY",
|
||||
"required": false
|
||||
},
|
||||
{
|
||||
"default": null,
|
||||
"description": "Open Sandbox management API domain (optional)",
|
||||
"name": "OPEN_SANDBOX_DOMAIN",
|
||||
"required": false
|
||||
}
|
||||
],
|
||||
"humanized_name": "Open Sandbox File",
|
||||
"init_params_schema": {
|
||||
"$defs": {
|
||||
"EnvVar": {
|
||||
"properties": {
|
||||
"default": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"title": "Default"
|
||||
},
|
||||
"description": {
|
||||
"title": "Description",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"title": "Name",
|
||||
"type": "string"
|
||||
},
|
||||
"required": {
|
||||
"default": true,
|
||||
"title": "Required",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"description"
|
||||
],
|
||||
"title": "EnvVar",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"description": "Read, write, and manage files inside an Open Sandbox sandbox.\n\nNotes:\n - Most useful with `persistent=True` or an explicit `sandbox_id`. With the\n default ephemeral mode, files disappear when this tool call finishes.",
|
||||
"properties": {
|
||||
"api_key": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Open Sandbox API key. Falls back to OPEN_SANDBOX_API_KEY env var.",
|
||||
"required": false,
|
||||
"title": "Api Key"
|
||||
},
|
||||
"create_params": {
|
||||
"anyOf": [
|
||||
{
|
||||
"additionalProperties": true,
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Optional kwargs forwarded to SandboxSync.create when creating a sandbox (e.g. image, env, resource, metadata, entrypoint).",
|
||||
"title": "Create Params"
|
||||
},
|
||||
"domain": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Open Sandbox management API domain (e.g. 'api.opensandbox.io'). Falls back to OPEN_SANDBOX_DOMAIN env var.",
|
||||
"required": false,
|
||||
"title": "Domain"
|
||||
},
|
||||
"persistent": {
|
||||
"default": false,
|
||||
"description": "If True, reuse one sandbox across all calls to this tool instance and kill it at process exit. Default False creates and kills a fresh sandbox per call.",
|
||||
"title": "Persistent",
|
||||
"type": "boolean"
|
||||
},
|
||||
"protocol": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Protocol for the management API ('http' or 'https').",
|
||||
"required": false,
|
||||
"title": "Protocol"
|
||||
},
|
||||
"sandbox_id": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Attach to an existing sandbox by id instead of creating a new one. The tool will never kill a sandbox it did not create.",
|
||||
"title": "Sandbox Id"
|
||||
},
|
||||
"sandbox_timeout": {
|
||||
"default": 60.0,
|
||||
"description": "Timeout in seconds to wait for sandbox readiness on create/connect.",
|
||||
"title": "Sandbox Timeout",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [],
|
||||
"title": "OpenSandboxFileTool",
|
||||
"type": "object"
|
||||
},
|
||||
"name": "OpenSandboxFileTool",
|
||||
"package_dependencies": [
|
||||
"opensandbox"
|
||||
],
|
||||
"run_params_schema": {
|
||||
"properties": {
|
||||
"action": {
|
||||
"description": "The filesystem action to perform: 'read' (returns file contents), 'write' (create or replace a file with content), 'append' (append content to an existing file \u2014 use this for writing large files in chunks to avoid hitting tool-call size limits), 'list' (lists a directory), 'delete' (removes a file/dir), 'mkdir' (creates a directory), 'info' (returns file metadata).",
|
||||
"enum": [
|
||||
"read",
|
||||
"write",
|
||||
"append",
|
||||
"list",
|
||||
"delete",
|
||||
"mkdir",
|
||||
"info"
|
||||
],
|
||||
"title": "Action",
|
||||
"type": "string"
|
||||
},
|
||||
"binary": {
|
||||
"default": false,
|
||||
"description": "For 'write': treat content as base64 and upload raw bytes. For 'read': return contents as base64 instead of decoded utf-8.",
|
||||
"title": "Binary",
|
||||
"type": "boolean"
|
||||
},
|
||||
"content": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"default": null,
|
||||
"description": "Content to write or append. If omitted for 'write', an empty file is created. For files larger than a few KB, prefer one 'write' with empty content followed by multiple 'append' calls of ~4KB each to stay within tool-call payload limits.",
|
||||
"title": "Content"
|
||||
},
|
||||
"mode": {
|
||||
"default": 755,
|
||||
"description": "For action='mkdir': Unix file mode as an integer (default 755).",
|
||||
"title": "Mode",
|
||||
"type": "integer"
|
||||
},
|
||||
"path": {
|
||||
"description": "Absolute path inside the sandbox.",
|
||||
"title": "Path",
|
||||
"type": "string"
|
||||
},
|
||||
"recursive": {
|
||||
"default": false,
|
||||
"description": "For action='delete': remove a directory recursively.",
|
||||
"title": "Recursive",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"action",
|
||||
"path"
|
||||
],
|
||||
"title": "OpenSandboxFileToolSchema",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Scrape Amazon product pages with Oxylabs Amazon Product Scraper",
|
||||
"env_vars": [
|
||||
|
||||
Reference in New Issue
Block a user