feat: mapping explicitly tool environment variables (#338)

This commit is contained in:
Lucas Gomide
2025-06-20 09:06:11 -03:00
committed by GitHub
parent 2cca45b45a
commit 9e92b84bcc
11 changed files with 47 additions and 13 deletions

View File

@@ -1,4 +1,4 @@
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import Field
from typing import TYPE_CHECKING, Any, Dict, List
import os
@@ -7,6 +7,9 @@ if TYPE_CHECKING:
from langchain_apify import ApifyActorsTool as _ApifyActorsTool
class ApifyActorsTool(BaseTool):
env_vars: List[EnvVar] = [
EnvVar(name="APIFY_API_TOKEN", description="API token for Apify platform access", required=True),
]
"""Tool that runs Apify Actors.
To use, you should have the environment variable `APIFY_API_TOKEN` set

View File

@@ -1,10 +1,10 @@
import datetime
import os
import time
from typing import Any, ClassVar, Optional, Type
from typing import Any, ClassVar, List, Optional, Type
import requests
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import BaseModel, Field
@@ -48,6 +48,9 @@ class BraveSearchTool(BaseTool):
save_file: bool = False
_last_request_time: ClassVar[float] = 0
_min_request_interval: ClassVar[float] = 1.0 # seconds
env_vars: List[EnvVar] = [
EnvVar(name="BRAVE_API_KEY", description="API key for Brave Search", required=True),
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

View File

@@ -1,7 +1,7 @@
import os
from typing import Any, Optional, Type, List
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import BaseModel, Field
@@ -20,6 +20,10 @@ class BrowserbaseLoadTool(BaseTool):
proxy: Optional[bool] = None
browserbase: Optional[Any] = None
package_dependencies: List[str] = ["browserbase"]
env_vars: List[EnvVar] = [
EnvVar(name="BROWSERBASE_API_KEY", description="API key for Browserbase services", required=False),
EnvVar(name="BROWSERBASE_PROJECT_ID", description="Project ID for Browserbase services", required=False),
]
def __init__(
self,

View File

@@ -1,7 +1,7 @@
import os
from typing import Any, Optional, Type, Dict, Literal, Union, List
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import BaseModel, Field
@@ -26,6 +26,9 @@ class HyperbrowserLoadTool(BaseTool):
api_key: Optional[str] = None
hyperbrowser: Optional[Any] = None
package_dependencies: List[str] = ["hyperbrowser"]
env_vars: List[EnvVar] = [
EnvVar(name="HYPERBROWSER_API_KEY", description="API key for Hyperbrowser services", required=False),
]
def __init__(self, api_key: Optional[str] = None, **kwargs):
super().__init__(**kwargs)

View File

@@ -4,7 +4,7 @@ import warnings
from typing import Any, Dict, List, Optional
import requests
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
class PatronusEvalTool(BaseTool):
@@ -13,6 +13,9 @@ class PatronusEvalTool(BaseTool):
evaluators: List[Dict[str, str]] = []
criteria: List[Dict[str, str]] = []
description: str = ""
env_vars: List[EnvVar] = [
EnvVar(name="PATRONUS_API_KEY", description="API key for Patronus evaluation services", required=True),
]
def __init__(self, **kwargs: Any):
super().__init__(**kwargs)

View File

@@ -2,7 +2,7 @@ import os
from typing import TYPE_CHECKING, Any, Optional, Type, List
from urllib.parse import urlparse
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import BaseModel, ConfigDict, Field, field_validator
# Type checking import
@@ -68,6 +68,9 @@ class ScrapegraphScrapeTool(BaseTool):
enable_logging: bool = False
_client: Optional["Client"] = None
package_dependencies: List[str] = ["scrapegraph-py"]
env_vars: List[EnvVar] = [
EnvVar(name="SCRAPEGRAPH_API_KEY", description="API key for Scrapegraph AI services", required=False),
]
def __init__(
self,

View File

@@ -2,13 +2,16 @@ import os
import re
from typing import Any, Optional, Union, List
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
class SerpApiBaseTool(BaseTool):
"""Base class for SerpApi functionality with shared capabilities."""
package_dependencies: List[str] = ["serpapi"]
env_vars: List[EnvVar] = [
EnvVar(name="SERPAPI_API_KEY", description="API key for SerpApi searches", required=True),
]
client: Optional[Any] = None

View File

@@ -1,9 +1,9 @@
import os
from typing import Any, Optional, Type
from typing import Any, List, Optional, Type
from urllib.parse import urlencode
import requests
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import BaseModel, Field
@@ -26,6 +26,9 @@ class SerplyWebSearchTool(BaseTool):
proxy_location: Optional[str] = "US"
query_payload: Optional[dict] = {}
headers: Optional[dict] = {}
env_vars: List[EnvVar] = [
EnvVar(name="SERPLY_API_KEY", description="API key for Serply services", required=True),
]
def __init__(
self,

View File

@@ -1,4 +1,4 @@
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import BaseModel, Field
from typing import Optional, Type, Any, Union, List, Literal
from dotenv import load_dotenv
@@ -27,6 +27,9 @@ class TavilyExtractorToolSchema(BaseModel):
class TavilyExtractorTool(BaseTool):
package_dependencies: List[str] = ["tavily-python"]
env_vars: List[EnvVar] = [
EnvVar(name="TAVILY_API_KEY", description="API key for Tavily extraction service", required=True),
]
"""
Tool that uses the Tavily API to extract content from web pages.

View File

@@ -1,4 +1,4 @@
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import BaseModel, Field
from typing import Optional, Type, Any, Union, Literal, Sequence, List
from dotenv import load_dotenv
@@ -102,6 +102,9 @@ class TavilySearchTool(BaseTool):
description="Maximum length for the 'content' of each search result to avoid context window issues.",
)
package_dependencies: List[str] = ["tavily-python"]
env_vars: List[EnvVar] = [
EnvVar(name="TAVILY_API_KEY", description="API key for Tavily search service", required=True),
]
def __init__(self, **kwargs: Any):
super().__init__(**kwargs)

View File

@@ -15,7 +15,7 @@ except ImportError:
Vectorizers = Any
Auth = Any
from crewai.tools import BaseTool
from crewai.tools import BaseTool, EnvVar
from pydantic import BaseModel, Field
@@ -41,6 +41,9 @@ class WeaviateVectorSearchTool(BaseTool):
collection_name: Optional[str] = None
limit: Optional[int] = Field(default=3)
headers: Optional[dict] = None
env_vars: List[EnvVar] = [
EnvVar(name="OPENAI_API_KEY", description="OpenAI API key for embedding generation and retrieval", required=True),
]
weaviate_cluster_url: str = Field(
...,
description="The URL of the Weaviate cluster",