From 9e92b84bccf73e7cc113ef1083f6f2c1891789e0 Mon Sep 17 00:00:00 2001 From: Lucas Gomide Date: Fri, 20 Jun 2025 09:06:11 -0300 Subject: [PATCH] feat: mapping explicitly tool environment variables (#338) --- .../tools/apify_actors_tool/apify_actors_tool.py | 5 ++++- .../tools/brave_search_tool/brave_search_tool.py | 7 +++++-- .../tools/browserbase_load_tool/browserbase_load_tool.py | 6 +++++- .../tools/hyperbrowser_load_tool/hyperbrowser_load_tool.py | 5 ++++- .../tools/patronus_eval_tool/patronus_eval_tool.py | 5 ++++- .../scrapegraph_scrape_tool/scrapegraph_scrape_tool.py | 5 ++++- src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py | 5 ++++- .../tools/serply_api_tool/serply_web_search_tool.py | 7 +++++-- .../tools/tavily_extractor_tool/tavily_extractor_tool.py | 5 ++++- .../tools/tavily_search_tool/tavily_search_tool.py | 5 ++++- src/crewai_tools/tools/weaviate_tool/vector_search.py | 5 ++++- 11 files changed, 47 insertions(+), 13 deletions(-) diff --git a/src/crewai_tools/tools/apify_actors_tool/apify_actors_tool.py b/src/crewai_tools/tools/apify_actors_tool/apify_actors_tool.py index 44c4839e8..127169676 100644 --- a/src/crewai_tools/tools/apify_actors_tool/apify_actors_tool.py +++ b/src/crewai_tools/tools/apify_actors_tool/apify_actors_tool.py @@ -1,4 +1,4 @@ -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import Field from typing import TYPE_CHECKING, Any, Dict, List import os @@ -7,6 +7,9 @@ if TYPE_CHECKING: from langchain_apify import ApifyActorsTool as _ApifyActorsTool class ApifyActorsTool(BaseTool): + env_vars: List[EnvVar] = [ + EnvVar(name="APIFY_API_TOKEN", description="API token for Apify platform access", required=True), + ] """Tool that runs Apify Actors. To use, you should have the environment variable `APIFY_API_TOKEN` set diff --git a/src/crewai_tools/tools/brave_search_tool/brave_search_tool.py b/src/crewai_tools/tools/brave_search_tool/brave_search_tool.py index 11035739d..1f96d452a 100644 --- a/src/crewai_tools/tools/brave_search_tool/brave_search_tool.py +++ b/src/crewai_tools/tools/brave_search_tool/brave_search_tool.py @@ -1,10 +1,10 @@ import datetime import os import time -from typing import Any, ClassVar, Optional, Type +from typing import Any, ClassVar, List, Optional, Type import requests -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import BaseModel, Field @@ -48,6 +48,9 @@ class BraveSearchTool(BaseTool): save_file: bool = False _last_request_time: ClassVar[float] = 0 _min_request_interval: ClassVar[float] = 1.0 # seconds + env_vars: List[EnvVar] = [ + EnvVar(name="BRAVE_API_KEY", description="API key for Brave Search", required=True), + ] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/src/crewai_tools/tools/browserbase_load_tool/browserbase_load_tool.py b/src/crewai_tools/tools/browserbase_load_tool/browserbase_load_tool.py index f946baf73..b6b3612dc 100644 --- a/src/crewai_tools/tools/browserbase_load_tool/browserbase_load_tool.py +++ b/src/crewai_tools/tools/browserbase_load_tool/browserbase_load_tool.py @@ -1,7 +1,7 @@ import os from typing import Any, Optional, Type, List -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import BaseModel, Field @@ -20,6 +20,10 @@ class BrowserbaseLoadTool(BaseTool): proxy: Optional[bool] = None browserbase: Optional[Any] = None package_dependencies: List[str] = ["browserbase"] + env_vars: List[EnvVar] = [ + EnvVar(name="BROWSERBASE_API_KEY", description="API key for Browserbase services", required=False), + EnvVar(name="BROWSERBASE_PROJECT_ID", description="Project ID for Browserbase services", required=False), + ] def __init__( self, diff --git a/src/crewai_tools/tools/hyperbrowser_load_tool/hyperbrowser_load_tool.py b/src/crewai_tools/tools/hyperbrowser_load_tool/hyperbrowser_load_tool.py index 5359427b0..a2571b94b 100644 --- a/src/crewai_tools/tools/hyperbrowser_load_tool/hyperbrowser_load_tool.py +++ b/src/crewai_tools/tools/hyperbrowser_load_tool/hyperbrowser_load_tool.py @@ -1,7 +1,7 @@ import os from typing import Any, Optional, Type, Dict, Literal, Union, List -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import BaseModel, Field @@ -26,6 +26,9 @@ class HyperbrowserLoadTool(BaseTool): api_key: Optional[str] = None hyperbrowser: Optional[Any] = None package_dependencies: List[str] = ["hyperbrowser"] + env_vars: List[EnvVar] = [ + EnvVar(name="HYPERBROWSER_API_KEY", description="API key for Hyperbrowser services", required=False), + ] def __init__(self, api_key: Optional[str] = None, **kwargs): super().__init__(**kwargs) diff --git a/src/crewai_tools/tools/patronus_eval_tool/patronus_eval_tool.py b/src/crewai_tools/tools/patronus_eval_tool/patronus_eval_tool.py index be1f410e2..bc9a60aae 100644 --- a/src/crewai_tools/tools/patronus_eval_tool/patronus_eval_tool.py +++ b/src/crewai_tools/tools/patronus_eval_tool/patronus_eval_tool.py @@ -4,7 +4,7 @@ import warnings from typing import Any, Dict, List, Optional import requests -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar class PatronusEvalTool(BaseTool): @@ -13,6 +13,9 @@ class PatronusEvalTool(BaseTool): evaluators: List[Dict[str, str]] = [] criteria: List[Dict[str, str]] = [] description: str = "" + env_vars: List[EnvVar] = [ + EnvVar(name="PATRONUS_API_KEY", description="API key for Patronus evaluation services", required=True), + ] def __init__(self, **kwargs: Any): super().__init__(**kwargs) diff --git a/src/crewai_tools/tools/scrapegraph_scrape_tool/scrapegraph_scrape_tool.py b/src/crewai_tools/tools/scrapegraph_scrape_tool/scrapegraph_scrape_tool.py index bc3bd667b..04a544fa6 100644 --- a/src/crewai_tools/tools/scrapegraph_scrape_tool/scrapegraph_scrape_tool.py +++ b/src/crewai_tools/tools/scrapegraph_scrape_tool/scrapegraph_scrape_tool.py @@ -2,7 +2,7 @@ import os from typing import TYPE_CHECKING, Any, Optional, Type, List from urllib.parse import urlparse -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import BaseModel, ConfigDict, Field, field_validator # Type checking import @@ -68,6 +68,9 @@ class ScrapegraphScrapeTool(BaseTool): enable_logging: bool = False _client: Optional["Client"] = None package_dependencies: List[str] = ["scrapegraph-py"] + env_vars: List[EnvVar] = [ + EnvVar(name="SCRAPEGRAPH_API_KEY", description="API key for Scrapegraph AI services", required=False), + ] def __init__( self, diff --git a/src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py b/src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py index c0a5ca9c9..aa73d63d5 100644 --- a/src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py +++ b/src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py @@ -2,13 +2,16 @@ import os import re from typing import Any, Optional, Union, List -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar class SerpApiBaseTool(BaseTool): """Base class for SerpApi functionality with shared capabilities.""" package_dependencies: List[str] = ["serpapi"] + env_vars: List[EnvVar] = [ + EnvVar(name="SERPAPI_API_KEY", description="API key for SerpApi searches", required=True), + ] client: Optional[Any] = None diff --git a/src/crewai_tools/tools/serply_api_tool/serply_web_search_tool.py b/src/crewai_tools/tools/serply_api_tool/serply_web_search_tool.py index b4d1ae4b5..6801f4065 100644 --- a/src/crewai_tools/tools/serply_api_tool/serply_web_search_tool.py +++ b/src/crewai_tools/tools/serply_api_tool/serply_web_search_tool.py @@ -1,9 +1,9 @@ import os -from typing import Any, Optional, Type +from typing import Any, List, Optional, Type from urllib.parse import urlencode import requests -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import BaseModel, Field @@ -26,6 +26,9 @@ class SerplyWebSearchTool(BaseTool): proxy_location: Optional[str] = "US" query_payload: Optional[dict] = {} headers: Optional[dict] = {} + env_vars: List[EnvVar] = [ + EnvVar(name="SERPLY_API_KEY", description="API key for Serply services", required=True), + ] def __init__( self, diff --git a/src/crewai_tools/tools/tavily_extractor_tool/tavily_extractor_tool.py b/src/crewai_tools/tools/tavily_extractor_tool/tavily_extractor_tool.py index 043e01fac..5e8a760ee 100644 --- a/src/crewai_tools/tools/tavily_extractor_tool/tavily_extractor_tool.py +++ b/src/crewai_tools/tools/tavily_extractor_tool/tavily_extractor_tool.py @@ -1,4 +1,4 @@ -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import BaseModel, Field from typing import Optional, Type, Any, Union, List, Literal from dotenv import load_dotenv @@ -27,6 +27,9 @@ class TavilyExtractorToolSchema(BaseModel): class TavilyExtractorTool(BaseTool): package_dependencies: List[str] = ["tavily-python"] + env_vars: List[EnvVar] = [ + EnvVar(name="TAVILY_API_KEY", description="API key for Tavily extraction service", required=True), + ] """ Tool that uses the Tavily API to extract content from web pages. diff --git a/src/crewai_tools/tools/tavily_search_tool/tavily_search_tool.py b/src/crewai_tools/tools/tavily_search_tool/tavily_search_tool.py index 16841c380..2f9d6dcca 100644 --- a/src/crewai_tools/tools/tavily_search_tool/tavily_search_tool.py +++ b/src/crewai_tools/tools/tavily_search_tool/tavily_search_tool.py @@ -1,4 +1,4 @@ -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import BaseModel, Field from typing import Optional, Type, Any, Union, Literal, Sequence, List from dotenv import load_dotenv @@ -102,6 +102,9 @@ class TavilySearchTool(BaseTool): description="Maximum length for the 'content' of each search result to avoid context window issues.", ) package_dependencies: List[str] = ["tavily-python"] + env_vars: List[EnvVar] = [ + EnvVar(name="TAVILY_API_KEY", description="API key for Tavily search service", required=True), + ] def __init__(self, **kwargs: Any): super().__init__(**kwargs) diff --git a/src/crewai_tools/tools/weaviate_tool/vector_search.py b/src/crewai_tools/tools/weaviate_tool/vector_search.py index fa332f231..13efb018f 100644 --- a/src/crewai_tools/tools/weaviate_tool/vector_search.py +++ b/src/crewai_tools/tools/weaviate_tool/vector_search.py @@ -15,7 +15,7 @@ except ImportError: Vectorizers = Any Auth = Any -from crewai.tools import BaseTool +from crewai.tools import BaseTool, EnvVar from pydantic import BaseModel, Field @@ -41,6 +41,9 @@ class WeaviateVectorSearchTool(BaseTool): collection_name: Optional[str] = None limit: Optional[int] = Field(default=3) headers: Optional[dict] = None + env_vars: List[EnvVar] = [ + EnvVar(name="OPENAI_API_KEY", description="OpenAI API key for embedding generation and retrieval", required=True), + ] weaviate_cluster_url: str = Field( ..., description="The URL of the Weaviate cluster",