mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-10 16:48:30 +00:00
Improvements on default values and description
This commit is contained in:
@@ -26,6 +26,11 @@ class FirecrawlCrawlWebsiteTool(BaseTool):
|
|||||||
self.firecrawl = FirecrawlApp(api_key=api_key)
|
self.firecrawl = FirecrawlApp(api_key=api_key)
|
||||||
|
|
||||||
def _run(self, url: str, crawler_options: Optional[Dict[str, Any]] = None, page_options: Optional[Dict[str, Any]] = None):
|
def _run(self, url: str, crawler_options: Optional[Dict[str, Any]] = None, page_options: Optional[Dict[str, Any]] = None):
|
||||||
|
if (crawler_options is None):
|
||||||
|
crawler_options = {}
|
||||||
|
if (page_options is None):
|
||||||
|
page_options = {}
|
||||||
|
|
||||||
options = {
|
options = {
|
||||||
"crawlerOptions": crawler_options,
|
"crawlerOptions": crawler_options,
|
||||||
"pageOptions": page_options
|
"pageOptions": page_options
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ class FirecrawlScrapeWebsiteToolSchema(BaseModel):
|
|||||||
url: str = Field(description="Website URL")
|
url: str = Field(description="Website URL")
|
||||||
page_options: Optional[Dict[str, Any]] = Field(default=None, description="Options for page scraping")
|
page_options: Optional[Dict[str, Any]] = Field(default=None, description="Options for page scraping")
|
||||||
extractor_options: Optional[Dict[str, Any]] = Field(default=None, description="Options for data extraction")
|
extractor_options: Optional[Dict[str, Any]] = Field(default=None, description="Options for data extraction")
|
||||||
timeout: Optional[int] = Field(default=None, description="Timeout for the scraping operation")
|
timeout: Optional[int] = Field(default=None, description="Timeout in milliseconds for the scraping operation. The default value is 30000.")
|
||||||
|
|
||||||
class FirecrawlScrapeWebsiteTool(BaseTool):
|
class FirecrawlScrapeWebsiteTool(BaseTool):
|
||||||
name: str = "Firecrawl web scrape tool"
|
name: str = "Firecrawl web scrape tool"
|
||||||
@@ -27,6 +27,13 @@ class FirecrawlScrapeWebsiteTool(BaseTool):
|
|||||||
self.firecrawl = FirecrawlApp(api_key=api_key)
|
self.firecrawl = FirecrawlApp(api_key=api_key)
|
||||||
|
|
||||||
def _run(self, url: str, page_options: Optional[Dict[str, Any]] = None, extractor_options: Optional[Dict[str, Any]] = None, timeout: Optional[int] = None):
|
def _run(self, url: str, page_options: Optional[Dict[str, Any]] = None, extractor_options: Optional[Dict[str, Any]] = None, timeout: Optional[int] = None):
|
||||||
|
if page_options is None:
|
||||||
|
page_options = {}
|
||||||
|
if extractor_options is None:
|
||||||
|
extractor_options = {}
|
||||||
|
if timeout is None:
|
||||||
|
timeout = 30000
|
||||||
|
|
||||||
options = {
|
options = {
|
||||||
"pageOptions": page_options,
|
"pageOptions": page_options,
|
||||||
"extractorOptions": extractor_options,
|
"extractorOptions": extractor_options,
|
||||||
|
|||||||
@@ -26,6 +26,11 @@ class FirecrawlSearchTool(BaseTool):
|
|||||||
self.firecrawl = FirecrawlApp(api_key=api_key)
|
self.firecrawl = FirecrawlApp(api_key=api_key)
|
||||||
|
|
||||||
def _run(self, query: str, page_options: Optional[Dict[str, Any]] = None, result_options: Optional[Dict[str, Any]] = None):
|
def _run(self, query: str, page_options: Optional[Dict[str, Any]] = None, result_options: Optional[Dict[str, Any]] = None):
|
||||||
|
if (page_options is None):
|
||||||
|
page_options = {}
|
||||||
|
if (result_options is None):
|
||||||
|
result_options = {}
|
||||||
|
|
||||||
options = {
|
options = {
|
||||||
"pageOptions": page_options,
|
"pageOptions": page_options,
|
||||||
"resultOptions": result_options
|
"resultOptions": result_options
|
||||||
|
|||||||
Reference in New Issue
Block a user