diff --git a/src/crewai_tools/tools/browserbase_load_tool/browserbase_load_tool.py b/src/crewai_tools/tools/browserbase_load_tool/browserbase_load_tool.py index 2ca1b95fc..6ac798df9 100644 --- a/src/crewai_tools/tools/browserbase_load_tool/browserbase_load_tool.py +++ b/src/crewai_tools/tools/browserbase_load_tool/browserbase_load_tool.py @@ -11,12 +11,10 @@ class BrowserbaseLoadToolSchema(BaseModel): class BrowserbaseLoadTool(BaseTool): name: str = "Browserbase web load tool" - description: str = ( - "Load webpages url in a headless browser using Browserbase and return the contents" - ) + description: str = "Load webpages url in a headless browser using Browserbase and return the contents" args_schema: Type[BaseModel] = BrowserbaseLoadToolSchema - api_key: Optional[str] = os.getenv('BROWSERBASE_API_KEY') - project_id: Optional[str] = os.getenv('BROWSERBASE_PROJECT_ID') + api_key: Optional[str] = os.getenv("BROWSERBASE_API_KEY") + project_id: Optional[str] = os.getenv("BROWSERBASE_PROJECT_ID") text_content: Optional[bool] = False session_id: Optional[str] = None proxy: Optional[bool] = None @@ -33,13 +31,24 @@ class BrowserbaseLoadTool(BaseTool): ): super().__init__(**kwargs) if not self.api_key: - raise EnvironmentError("BROWSERBASE_API_KEY environment variable is required for initialization") + raise EnvironmentError( + "BROWSERBASE_API_KEY environment variable is required for initialization" + ) try: from browserbase import Browserbase # type: ignore except ImportError: - raise ImportError( - "`browserbase` package not found, please run `pip install browserbase`" - ) + import click + + if click.confirm( + "`browserbase` package not found, would you like to install it?" + ): + import subprocess + + subprocess.run(["uv", "add", "browserbase"], check=True) + else: + raise ImportError( + "`browserbase` package not found, please run `uv add browserbase`" + ) self.browserbase = Browserbase(api_key=self.api_key) self.text_content = text_content diff --git a/src/crewai_tools/tools/firecrawl_crawl_website_tool/firecrawl_crawl_website_tool.py b/src/crewai_tools/tools/firecrawl_crawl_website_tool/firecrawl_crawl_website_tool.py index 0eafd6e4a..cc44e4b39 100644 --- a/src/crewai_tools/tools/firecrawl_crawl_website_tool/firecrawl_crawl_website_tool.py +++ b/src/crewai_tools/tools/firecrawl_crawl_website_tool/firecrawl_crawl_website_tool.py @@ -35,9 +35,21 @@ class FirecrawlCrawlWebsiteTool(BaseTool): try: from firecrawl import FirecrawlApp # type: ignore except ImportError: - raise ImportError( - "`firecrawl` package not found, please run `pip install firecrawl-py`" - ) + import click + + if click.confirm( + "You are missing the 'firecrawl-py' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "firecrawl-py"], check=True) + from firecrawl import ( + FirecrawlApp, + ) + else: + raise ImportError( + "`firecrawl-py` package not found, please run `uv add firecrawl-py`" + ) if not self.firecrawl: client_api_key = api_key or os.getenv("FIRECRAWL_API_KEY") diff --git a/src/crewai_tools/tools/firecrawl_scrape_website_tool/firecrawl_scrape_website_tool.py b/src/crewai_tools/tools/firecrawl_scrape_website_tool/firecrawl_scrape_website_tool.py index 8b2a37185..7076ad263 100644 --- a/src/crewai_tools/tools/firecrawl_scrape_website_tool/firecrawl_scrape_website_tool.py +++ b/src/crewai_tools/tools/firecrawl_scrape_website_tool/firecrawl_scrape_website_tool.py @@ -31,9 +31,21 @@ class FirecrawlScrapeWebsiteTool(BaseTool): try: from firecrawl import FirecrawlApp # type: ignore except ImportError: - raise ImportError( - "`firecrawl` package not found, please run `pip install firecrawl-py`" - ) + import click + + if click.confirm( + "You are missing the 'firecrawl-py' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "firecrawl-py"], check=True) + from firecrawl import ( + FirecrawlApp, + ) + else: + raise ImportError( + "`firecrawl-py` package not found, please run `uv add firecrawl-py`" + ) self.firecrawl = FirecrawlApp(api_key=api_key) diff --git a/src/crewai_tools/tools/firecrawl_search_tool/firecrawl_search_tool.py b/src/crewai_tools/tools/firecrawl_search_tool/firecrawl_search_tool.py index 36ba16391..c10f98c83 100644 --- a/src/crewai_tools/tools/firecrawl_search_tool/firecrawl_search_tool.py +++ b/src/crewai_tools/tools/firecrawl_search_tool/firecrawl_search_tool.py @@ -41,9 +41,21 @@ class FirecrawlSearchTool(BaseTool): try: from firecrawl import FirecrawlApp # type: ignore except ImportError: - raise ImportError( - "`firecrawl` package not found, please run `pip install firecrawl-py`" - ) + import click + + if click.confirm( + "You are missing the 'firecrawl-py' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "firecrawl-py"], check=True) + from firecrawl import ( + FirecrawlApp, + ) + else: + raise ImportError( + "`firecrawl-py` package not found, please run `uv add firecrawl-py`" + ) self.firecrawl = FirecrawlApp(api_key=api_key) diff --git a/src/crewai_tools/tools/linkup/linkup_search_tool.py b/src/crewai_tools/tools/linkup/linkup_search_tool.py index b172ad029..be03750fa 100644 --- a/src/crewai_tools/tools/linkup/linkup_search_tool.py +++ b/src/crewai_tools/tools/linkup/linkup_search_tool.py @@ -1,7 +1,10 @@ from typing import Any +from crewai.tools import BaseTool + try: from linkup import LinkupClient + LINKUP_AVAILABLE = True except ImportError: LINKUP_AVAILABLE = False @@ -9,23 +12,42 @@ except ImportError: from pydantic import PrivateAttr -class LinkupSearchTool: + +class LinkupSearchTool(BaseTool): name: str = "Linkup Search Tool" - description: str = "Performs an API call to Linkup to retrieve contextual information." - _client: LinkupClient = PrivateAttr() # type: ignore + description: str = ( + "Performs an API call to Linkup to retrieve contextual information." + ) + _client: LinkupClient = PrivateAttr() # type: ignore def __init__(self, api_key: str): """ Initialize the tool with an API key. """ - if not LINKUP_AVAILABLE: - raise ImportError( - "The 'linkup' package is required to use the LinkupSearchTool. " - "Please install it with: uv add linkup" - ) + super().__init__() + try: + from linkup import LinkupClient + except ImportError: + import click + + if click.confirm( + "You are missing the 'linkup-sdk' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "linkup-sdk"], check=True) + from linkup import LinkupClient + + else: + raise ImportError( + "The 'linkup-sdk' package is required to use the LinkupSearchTool. " + "Please install it with: uv add linkup-sdk" + ) self._client = LinkupClient(api_key=api_key) - def _run(self, query: str, depth: str = "standard", output_type: str = "searchResults") -> dict: + def _run( + self, query: str, depth: str = "standard", output_type: str = "searchResults" + ) -> dict: """ Executes a search using the Linkup API. @@ -36,9 +58,7 @@ class LinkupSearchTool: """ try: response = self._client.search( - query=query, - depth=depth, - output_type=output_type + query=query, depth=depth, output_type=output_type ) results = [ {"name": result.name, "url": result.url, "content": result.content} diff --git a/src/crewai_tools/tools/multion_tool/multion_tool.py b/src/crewai_tools/tools/multion_tool/multion_tool.py index a991074da..b525c4693 100644 --- a/src/crewai_tools/tools/multion_tool/multion_tool.py +++ b/src/crewai_tools/tools/multion_tool/multion_tool.py @@ -28,9 +28,18 @@ class MultiOnTool(BaseTool): try: from multion.client import MultiOn # type: ignore except ImportError: - raise ImportError( - "`multion` package not found, please run `pip install multion`" - ) + import click + + if click.confirm( + "You are missing the 'multion' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "multion"], check=True) + else: + raise ImportError( + "`multion` package not found, please run `uv add multion`" + ) self.session_id = None self.local = local self.multion = MultiOn(api_key=api_key) diff --git a/src/crewai_tools/tools/patronus_eval_tool/patronus_local_evaluator_tool.py b/src/crewai_tools/tools/patronus_eval_tool/patronus_local_evaluator_tool.py index e65cb342d..a1b63c790 100644 --- a/src/crewai_tools/tools/patronus_eval_tool/patronus_local_evaluator_tool.py +++ b/src/crewai_tools/tools/patronus_eval_tool/patronus_local_evaluator_tool.py @@ -1,7 +1,14 @@ from typing import Any, Type from crewai.tools import BaseTool from pydantic import BaseModel, Field -from patronus import Client + +try: + from patronus import Client + + PYPATRONUS_AVAILABLE = True +except ImportError: + PYPATRONUS_AVAILABLE = False + Client = Any class FixedLocalEvaluatorToolSchema(BaseModel): @@ -24,26 +31,44 @@ class PatronusLocalEvaluatorTool(BaseTool): name: str = "Patronus Local Evaluator Tool" evaluator: str = "The registered local evaluator" evaluated_model_gold_answer: str = "The agent's gold answer" - description: str = ( - "This tool is used to evaluate the model input and output using custom function evaluators." - ) + description: str = "This tool is used to evaluate the model input and output using custom function evaluators." client: Any = None args_schema: Type[BaseModel] = FixedLocalEvaluatorToolSchema class Config: arbitrary_types_allowed = True - def __init__(self, patronus_client: Client, evaluator: str, evaluated_model_gold_answer: str, **kwargs: Any): + def __init__( + self, + patronus_client: Client, + evaluator: str, + evaluated_model_gold_answer: str, + **kwargs: Any, + ): super().__init__(**kwargs) - self.client = patronus_client - if evaluator: - self.evaluator = evaluator - self.evaluated_model_gold_answer = evaluated_model_gold_answer + if PYPATRONUS_AVAILABLE: + self.client = patronus_client + if evaluator: + self.evaluator = evaluator + self.evaluated_model_gold_answer = evaluated_model_gold_answer self.description = f"This tool calls the Patronus Evaluation API that takes an additional argument in addition to the following new argument:\n evaluators={evaluator}, evaluated_model_gold_answer={evaluated_model_gold_answer}" self._generate_description() print( f"Updating judge evaluator, gold_answer to: {self.evaluator}, {self.evaluated_model_gold_answer}" ) + else: + import click + + if click.confirm( + "You are missing the 'patronus' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "patronus"], check=True) + else: + raise ImportError( + "You are missing the patronus package. Would you like to install it?" + ) def _run( self, @@ -79,7 +104,7 @@ class PatronusLocalEvaluatorTool(BaseTool): if isinstance(evaluated_model_gold_answer, str) else evaluated_model_gold_answer.get("description") ), - tags={}, # Optional metadata, supports arbitrary kv pairs + tags={}, # Optional metadata, supports arbitrary kv pairs ) output = f"Evaluation result: {result.pass_}, Explanation: {result.explanation}" return output diff --git a/src/crewai_tools/tools/scrapegraph_scrape_tool/scrapegraph_scrape_tool.py b/src/crewai_tools/tools/scrapegraph_scrape_tool/scrapegraph_scrape_tool.py index 906bf6376..92623e3e0 100644 --- a/src/crewai_tools/tools/scrapegraph_scrape_tool/scrapegraph_scrape_tool.py +++ b/src/crewai_tools/tools/scrapegraph_scrape_tool/scrapegraph_scrape_tool.py @@ -1,25 +1,30 @@ import os -from typing import Any, Optional, Type +from typing import Any, Optional, Type, TYPE_CHECKING from urllib.parse import urlparse from crewai.tools import BaseTool -from pydantic import BaseModel, Field, validator -from scrapegraph_py import Client -from scrapegraph_py.logger import sgai_logger +from pydantic import BaseModel, Field, validator, ConfigDict + +# Type checking import +if TYPE_CHECKING: + from scrapegraph_py import Client class ScrapegraphError(Exception): """Base exception for Scrapegraph-related errors""" + pass class RateLimitError(ScrapegraphError): """Raised when API rate limits are exceeded""" + pass class FixedScrapegraphScrapeToolSchema(BaseModel): """Input for ScrapegraphScrapeTool when website_url is fixed.""" + pass @@ -32,7 +37,7 @@ class ScrapegraphScrapeToolSchema(FixedScrapegraphScrapeToolSchema): description="Prompt to guide the extraction of content", ) - @validator('website_url') + @validator("website_url") def validate_url(cls, v): """Validate URL format""" try: @@ -41,25 +46,32 @@ class ScrapegraphScrapeToolSchema(FixedScrapegraphScrapeToolSchema): raise ValueError return v except Exception: - raise ValueError("Invalid URL format. URL must include scheme (http/https) and domain") + raise ValueError( + "Invalid URL format. URL must include scheme (http/https) and domain" + ) class ScrapegraphScrapeTool(BaseTool): """ A tool that uses Scrapegraph AI to intelligently scrape website content. - + Raises: ValueError: If API key is missing or URL format is invalid RateLimitError: If API rate limits are exceeded RuntimeError: If scraping operation fails """ + model_config = ConfigDict(arbitrary_types_allowed=True) + name: str = "Scrapegraph website scraper" - description: str = "A tool that uses Scrapegraph AI to intelligently scrape website content." + description: str = ( + "A tool that uses Scrapegraph AI to intelligently scrape website content." + ) args_schema: Type[BaseModel] = ScrapegraphScrapeToolSchema website_url: Optional[str] = None user_prompt: Optional[str] = None api_key: Optional[str] = None + _client: Optional["Client"] = None def __init__( self, @@ -69,8 +81,31 @@ class ScrapegraphScrapeTool(BaseTool): **kwargs, ): super().__init__(**kwargs) + try: + from scrapegraph_py import Client + from scrapegraph_py.logger import sgai_logger + + except ImportError: + import click + + if click.confirm( + "You are missing the 'scrapegraph-py' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "scrapegraph-py"], check=True) + from scrapegraph_py import Client + from scrapegraph_py.logger import sgai_logger + + else: + raise ImportError( + "`scrapegraph-py` package not found, please run `uv add scrapegraph-py`" + ) + + self._client = Client(api_key=api_key) + self.api_key = api_key or os.getenv("SCRAPEGRAPH_API_KEY") - + if not self.api_key: raise ValueError("Scrapegraph API key is required") @@ -79,7 +114,7 @@ class ScrapegraphScrapeTool(BaseTool): self.website_url = website_url self.description = f"A tool that uses Scrapegraph AI to intelligently scrape {website_url}'s content." self.args_schema = FixedScrapegraphScrapeToolSchema - + if user_prompt is not None: self.user_prompt = user_prompt @@ -94,22 +129,24 @@ class ScrapegraphScrapeTool(BaseTool): if not all([result.scheme, result.netloc]): raise ValueError except Exception: - raise ValueError("Invalid URL format. URL must include scheme (http/https) and domain") + raise ValueError( + "Invalid URL format. URL must include scheme (http/https) and domain" + ) def _handle_api_response(self, response: dict) -> str: """Handle and validate API response""" if not response: raise RuntimeError("Empty response from Scrapegraph API") - + if "error" in response: error_msg = response.get("error", {}).get("message", "Unknown error") if "rate limit" in error_msg.lower(): raise RateLimitError(f"Rate limit exceeded: {error_msg}") raise RuntimeError(f"API error: {error_msg}") - + if "result" not in response: raise RuntimeError("Invalid response format from Scrapegraph API") - + return response["result"] def _run( @@ -117,7 +154,10 @@ class ScrapegraphScrapeTool(BaseTool): **kwargs: Any, ) -> Any: website_url = kwargs.get("website_url", self.website_url) - user_prompt = kwargs.get("user_prompt", self.user_prompt) or "Extract the main content of the webpage" + user_prompt = ( + kwargs.get("user_prompt", self.user_prompt) + or "Extract the main content of the webpage" + ) if not website_url: raise ValueError("website_url is required") @@ -125,12 +165,9 @@ class ScrapegraphScrapeTool(BaseTool): # Validate URL format self._validate_url(website_url) - # Initialize the client - sgai_client = Client(api_key=self.api_key) - try: # Make the SmartScraper request - response = sgai_client.smartscraper( + response = self.client.smartscraper( website_url=website_url, user_prompt=user_prompt, ) @@ -144,4 +181,4 @@ class ScrapegraphScrapeTool(BaseTool): raise RuntimeError(f"Scraping failed: {str(e)}") finally: # Always close the client - sgai_client.close() + self.client.close() diff --git a/src/crewai_tools/tools/scrapfly_scrape_website_tool/scrapfly_scrape_website_tool.py b/src/crewai_tools/tools/scrapfly_scrape_website_tool/scrapfly_scrape_website_tool.py index b47ce8e5b..dd071a61b 100644 --- a/src/crewai_tools/tools/scrapfly_scrape_website_tool/scrapfly_scrape_website_tool.py +++ b/src/crewai_tools/tools/scrapfly_scrape_website_tool/scrapfly_scrape_website_tool.py @@ -34,9 +34,18 @@ class ScrapflyScrapeWebsiteTool(BaseTool): try: from scrapfly import ScrapflyClient except ImportError: - raise ImportError( - "`scrapfly` package not found, please run `pip install scrapfly-sdk`" - ) + import click + + if click.confirm( + "You are missing the 'scrapfly-sdk' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "scrapfly-sdk"], check=True) + else: + raise ImportError( + "`scrapfly-sdk` package not found, please run `uv add scrapfly-sdk`" + ) self.scrapfly = ScrapflyClient(key=api_key) def _run( diff --git a/src/crewai_tools/tools/selenium_scraping_tool/selenium_scraping_tool.py b/src/crewai_tools/tools/selenium_scraping_tool/selenium_scraping_tool.py index d7a55428d..e43a63828 100644 --- a/src/crewai_tools/tools/selenium_scraping_tool/selenium_scraping_tool.py +++ b/src/crewai_tools/tools/selenium_scraping_tool/selenium_scraping_tool.py @@ -5,9 +5,6 @@ from urllib.parse import urlparse from crewai.tools import BaseTool from pydantic import BaseModel, Field, validator -from selenium import webdriver -from selenium.webdriver.chrome.options import Options -from selenium.webdriver.common.by import By class FixedSeleniumScrapingToolSchema(BaseModel): @@ -17,33 +14,36 @@ class FixedSeleniumScrapingToolSchema(BaseModel): class SeleniumScrapingToolSchema(FixedSeleniumScrapingToolSchema): """Input for SeleniumScrapingTool.""" - website_url: str = Field(..., description="Mandatory website url to read the file. Must start with http:// or https://") + website_url: str = Field( + ..., + description="Mandatory website url to read the file. Must start with http:// or https://", + ) css_element: str = Field( ..., description="Mandatory css reference for element to scrape from the website", ) - @validator('website_url') + @validator("website_url") def validate_website_url(cls, v): if not v: raise ValueError("Website URL cannot be empty") - + if len(v) > 2048: # Common maximum URL length raise ValueError("URL is too long (max 2048 characters)") - - if not re.match(r'^https?://', v): + + if not re.match(r"^https?://", v): raise ValueError("URL must start with http:// or https://") - + try: result = urlparse(v) if not all([result.scheme, result.netloc]): raise ValueError("Invalid URL format") except Exception as e: raise ValueError(f"Invalid URL: {str(e)}") - - if re.search(r'\s', v): + + if re.search(r"\s", v): raise ValueError("URL cannot contain whitespace") - + return v @@ -52,7 +52,7 @@ class SeleniumScrapingTool(BaseTool): description: str = "A tool that can be used to read a website content." args_schema: Type[BaseModel] = SeleniumScrapingToolSchema website_url: Optional[str] = None - driver: Optional[Any] = webdriver.Chrome + driver: Optional[Any] = None cookie: Optional[dict] = None wait_time: Optional[int] = 3 css_element: Optional[str] = None @@ -66,6 +66,30 @@ class SeleniumScrapingTool(BaseTool): **kwargs, ): super().__init__(**kwargs) + try: + from selenium import webdriver + from selenium.webdriver.chrome.options import Options + from selenium.webdriver.common.by import By + except ImportError: + import click + + if click.confirm( + "You are missing the 'selenium' and 'webdriver-manager' packages. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run( + ["uv", "pip", "install", "selenium", "webdriver-manager"], + check=True, + ) + from selenium import webdriver + from selenium.webdriver.chrome.options import Options + from selenium.webdriver.common.by import By + else: + raise ImportError( + "`selenium` and `webdriver-manager` package not found, please run `uv add selenium webdriver-manager`" + ) + self.driver = webdriver.Chrome() if cookie is not None: self.cookie = cookie @@ -130,11 +154,11 @@ class SeleniumScrapingTool(BaseTool): def _create_driver(self, url, cookie, wait_time): if not url: raise ValueError("URL cannot be empty") - + # Validate URL format - if not re.match(r'^https?://', url): + if not re.match(r"^https?://", url): raise ValueError("URL must start with http:// or https://") - + options = Options() options.add_argument("--headless") driver = self.driver(options=options) diff --git a/src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py b/src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py index 98491190c..f41f0a596 100644 --- a/src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py +++ b/src/crewai_tools/tools/serpapi_tool/serpapi_base_tool.py @@ -4,6 +4,7 @@ from typing import Optional, Any, Union from crewai.tools import BaseTool + class SerpApiBaseTool(BaseTool): """Base class for SerpApi functionality with shared capabilities.""" @@ -15,9 +16,18 @@ class SerpApiBaseTool(BaseTool): try: from serpapi import Client except ImportError: - raise ImportError( - "`serpapi` package not found, please install with `pip install serpapi`" - ) + import click + + if click.confirm( + "You are missing the 'serpapi' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "serpapi"], check=True) + else: + raise ImportError( + "`serpapi` package not found, please install with `uv add serpapi`" + ) api_key = os.getenv("SERPAPI_API_KEY") if not api_key: raise ValueError( diff --git a/src/crewai_tools/tools/spider_tool/spider_tool.py b/src/crewai_tools/tools/spider_tool/spider_tool.py index 87726f0bc..170e691f9 100644 --- a/src/crewai_tools/tools/spider_tool/spider_tool.py +++ b/src/crewai_tools/tools/spider_tool/spider_tool.py @@ -87,13 +87,21 @@ class SpiderTool(BaseTool): try: from spider import Spider # type: ignore - self.spider = Spider(api_key=api_key) except ImportError: - raise ImportError( - "`spider-client` package not found, please run `uv add spider-client`" - ) - except Exception as e: - raise RuntimeError(f"Failed to initialize Spider client: {str(e)}") + import click + + if click.confirm( + "You are missing the 'spider-client' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "pip", "install", "spider-client"], check=True) + from spider import Spider + else: + raise ImportError( + "`spider-client` package not found, please run `uv add spider-client`" + ) + self.spider = Spider(api_key=api_key) def _validate_url(self, url: str) -> bool: """Validate URL format and security constraints. diff --git a/src/crewai_tools/tools/stagehand_tool/stagehand_tool.py b/src/crewai_tools/tools/stagehand_tool/stagehand_tool.py index 07c76c8c3..0aac44e86 100644 --- a/src/crewai_tools/tools/stagehand_tool/stagehand_tool.py +++ b/src/crewai_tools/tools/stagehand_tool/stagehand_tool.py @@ -25,6 +25,7 @@ logger = logging.getLogger(__name__) STAGEHAND_AVAILABLE = False try: import stagehand + STAGEHAND_AVAILABLE = True except ImportError: pass # Keep STAGEHAND_AVAILABLE as False @@ -32,33 +33,45 @@ except ImportError: class StagehandResult(BaseModel): """Result from a Stagehand operation. - + Attributes: success: Whether the operation completed successfully data: The result data from the operation error: Optional error message if the operation failed """ - success: bool = Field(..., description="Whether the operation completed successfully") - data: Union[str, Dict, List] = Field(..., description="The result data from the operation") - error: Optional[str] = Field(None, description="Optional error message if the operation failed") + + success: bool = Field( + ..., description="Whether the operation completed successfully" + ) + data: Union[str, Dict, List] = Field( + ..., description="The result data from the operation" + ) + error: Optional[str] = Field( + None, description="Optional error message if the operation failed" + ) class StagehandToolConfig(BaseModel): """Configuration for the StagehandTool. - + Attributes: api_key: OpenAI API key for Stagehand authentication timeout: Maximum time in seconds to wait for operations (default: 30) retry_attempts: Number of times to retry failed operations (default: 3) """ + api_key: str = Field(..., description="OpenAI API key for Stagehand authentication") - timeout: int = Field(30, description="Maximum time in seconds to wait for operations") - retry_attempts: int = Field(3, description="Number of times to retry failed operations") + timeout: int = Field( + 30, description="Maximum time in seconds to wait for operations" + ) + retry_attempts: int = Field( + 3, description="Number of times to retry failed operations" + ) class StagehandToolSchema(BaseModel): """Schema for the StagehandTool input parameters. - + Examples: ```python # Using the 'act' API to click a button @@ -66,13 +79,13 @@ class StagehandToolSchema(BaseModel): api_method="act", instruction="Click the 'Sign In' button" ) - + # Using the 'extract' API to get text tool.run( api_method="extract", instruction="Get the text content of the main article" ) - + # Using the 'observe' API to monitor changes tool.run( api_method="observe", @@ -80,48 +93,49 @@ class StagehandToolSchema(BaseModel): ) ``` """ + api_method: str = Field( ..., description="The Stagehand API to use: 'act' for interactions, 'extract' for getting content, or 'observe' for monitoring changes", - pattern="^(act|extract|observe)$" + pattern="^(act|extract|observe)$", ) instruction: str = Field( ..., description="An atomic instruction for Stagehand to execute. Instructions should be simple and specific to increase reliability.", min_length=1, - max_length=500 + max_length=500, ) class StagehandTool(BaseTool): """A tool for using Stagehand's AI-powered web automation capabilities. - + This tool provides access to Stagehand's three core APIs: - act: Perform web interactions (e.g., clicking buttons, filling forms) - extract: Extract information from web pages (e.g., getting text content) - observe: Monitor web page changes (e.g., watching for updates) - + Each function takes atomic instructions to increase reliability. - + Required Environment Variables: OPENAI_API_KEY: API key for OpenAI (required by Stagehand) - + Examples: ```python tool = StagehandTool() - + # Perform a web interaction result = tool.run( api_method="act", instruction="Click the 'Sign In' button" ) - + # Extract content from a page content = tool.run( api_method="extract", instruction="Get the text content of the main article" ) - + # Monitor for changes changes = tool.run( api_method="observe", @@ -129,7 +143,7 @@ class StagehandTool(BaseTool): ) ``` """ - + name: str = "StagehandTool" description: str = ( "A tool that uses Stagehand's AI-powered web automation to interact with websites. " @@ -137,27 +151,33 @@ class StagehandTool(BaseTool): "Each instruction should be atomic (simple and specific) to increase reliability." ) args_schema: Type[BaseModel] = StagehandToolSchema - - def __init__(self, config: StagehandToolConfig | None = None, **kwargs: Any) -> None: + + def __init__( + self, config: StagehandToolConfig | None = None, **kwargs: Any + ) -> None: """Initialize the StagehandTool. - + Args: config: Optional configuration for the tool. If not provided, will attempt to use OPENAI_API_KEY from environment. **kwargs: Additional keyword arguments passed to the base class. - + Raises: ImportError: If the stagehand package is not installed ValueError: If no API key is provided via config or environment """ super().__init__(**kwargs) - + if not STAGEHAND_AVAILABLE: - raise ImportError( - "The 'stagehand' package is required to use this tool. " - "Please install it with: pip install stagehand" - ) - + import click + + if click.confirm( + "You are missing the 'stagehand-sdk' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "add", "stagehand-sdk"], check=True) + # Use config if provided, otherwise try environment variable if config is not None: self.config = config @@ -168,24 +188,22 @@ class StagehandTool(BaseTool): "Either provide config with api_key or set OPENAI_API_KEY environment variable" ) self.config = StagehandToolConfig( - api_key=api_key, - timeout=30, - retry_attempts=3 + api_key=api_key, timeout=30, retry_attempts=3 ) - + @lru_cache(maxsize=100) def _cached_run(self, api_method: str, instruction: str) -> Any: """Execute a cached Stagehand command. - + This method is cached to improve performance for repeated operations. - + Args: api_method: The Stagehand API to use ('act', 'extract', or 'observe') instruction: An atomic instruction for Stagehand to execute - + Returns: The raw result from the Stagehand API call - + Raises: ValueError: If an invalid api_method is provided Exception: If the Stagehand API call fails @@ -193,23 +211,25 @@ class StagehandTool(BaseTool): logger.debug( "Cache operation - Method: %s, Instruction length: %d", api_method, - len(instruction) + len(instruction), ) - + # Initialize Stagehand with configuration logger.info( "Initializing Stagehand (timeout=%ds, retries=%d)", self.config.timeout, - self.config.retry_attempts + self.config.retry_attempts, ) st = stagehand.Stagehand( api_key=self.config.api_key, timeout=self.config.timeout, - retry_attempts=self.config.retry_attempts + retry_attempts=self.config.retry_attempts, ) - + # Call the appropriate Stagehand API based on the method - logger.info("Executing %s operation with instruction: %s", api_method, instruction[:100]) + logger.info( + "Executing %s operation with instruction: %s", api_method, instruction[:100] + ) try: if api_method == "act": result = st.act(instruction) @@ -219,28 +239,27 @@ class StagehandTool(BaseTool): result = st.observe(instruction) else: raise ValueError(f"Unknown api_method: {api_method}") - - + logger.info("Successfully executed %s operation", api_method) return result - + except Exception as e: logger.warning( "Operation failed (method=%s, error=%s), will be retried on next attempt", api_method, - str(e) + str(e), ) raise def _run(self, api_method: str, instruction: str, **kwargs: Any) -> StagehandResult: """Execute a Stagehand command using the specified API method. - + Args: api_method: The Stagehand API to use ('act', 'extract', or 'observe') instruction: An atomic instruction for Stagehand to execute **kwargs: Additional keyword arguments passed to the Stagehand API - - Returns: + + Returns: StagehandResult containing the operation result and status """ try: @@ -249,56 +268,36 @@ class StagehandTool(BaseTool): "Starting operation - Method: %s, Instruction length: %d, Args: %s", api_method, len(instruction), - kwargs + kwargs, ) - + # Use cached execution result = self._cached_run(api_method, instruction) logger.info("Operation completed successfully") return StagehandResult(success=True, data=result) - + except stagehand.AuthenticationError as e: logger.error( - "Authentication failed - Method: %s, Error: %s", - api_method, - str(e) + "Authentication failed - Method: %s, Error: %s", api_method, str(e) ) return StagehandResult( - success=False, - data={}, - error=f"Authentication failed: {str(e)}" + success=False, data={}, error=f"Authentication failed: {str(e)}" ) except stagehand.APIError as e: - logger.error( - "API error - Method: %s, Error: %s", - api_method, - str(e) - ) - return StagehandResult( - success=False, - data={}, - error=f"API error: {str(e)}" - ) + logger.error("API error - Method: %s, Error: %s", api_method, str(e)) + return StagehandResult(success=False, data={}, error=f"API error: {str(e)}") except stagehand.BrowserError as e: - logger.error( - "Browser error - Method: %s, Error: %s", - api_method, - str(e) - ) + logger.error("Browser error - Method: %s, Error: %s", api_method, str(e)) return StagehandResult( - success=False, - data={}, - error=f"Browser error: {str(e)}" + success=False, data={}, error=f"Browser error: {str(e)}" ) except Exception as e: logger.error( "Unexpected error - Method: %s, Error type: %s, Message: %s", api_method, type(e).__name__, - str(e) + str(e), ) return StagehandResult( - success=False, - data={}, - error=f"Unexpected error: {str(e)}" + success=False, data={}, error=f"Unexpected error: {str(e)}" ) diff --git a/src/crewai_tools/tools/weaviate_tool/vector_search.py b/src/crewai_tools/tools/weaviate_tool/vector_search.py index 14e10d7c5..879a950f6 100644 --- a/src/crewai_tools/tools/weaviate_tool/vector_search.py +++ b/src/crewai_tools/tools/weaviate_tool/vector_search.py @@ -68,12 +68,25 @@ class WeaviateVectorSearchTool(BaseTool): model="gpt-4o", ) ) + else: + import click + + if click.confirm( + "You are missing the 'weaviate-client' package. Would you like to install it? (y/N)" + ): + import subprocess + + subprocess.run(["uv", "pip", "install", "weaviate-client"], check=True) + + else: + raise ImportError( + "You are missing the 'weaviate-client' package. Would you like to install it? (y/N)" + ) def _run(self, query: str) -> str: if not WEAVIATE_AVAILABLE: raise ImportError( - "The 'weaviate-client' package is required to use the WeaviateVectorSearchTool. " - "Please install it with: uv add weaviate-client" + "You are missing the 'weaviate-client' package. Would you like to install it? (y/N)" ) if not self.weaviate_cluster_url or not self.weaviate_api_key: