mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-08 15:48:29 +00:00
update basetool dependencies to use root crewai repo
This commit is contained in:
@@ -51,7 +51,7 @@ There are three ways to create tools for crewAI agents:
|
||||
### Subclassing `BaseTool`
|
||||
|
||||
```python
|
||||
from crewai_tools import BaseTool
|
||||
from crewai.tools import BaseTool
|
||||
|
||||
class MyCustomTool(BaseTool):
|
||||
name: str = "Name of my tool"
|
||||
@@ -70,7 +70,7 @@ Define a new class inheriting from `BaseTool`, specifying `name`, `description`,
|
||||
For a simpler approach, create a `Tool` object directly with the required attributes and a functional logic.
|
||||
|
||||
```python
|
||||
from crewai_tools import tool
|
||||
from crewai.tools import BaseTool
|
||||
@tool("Name of my tool")
|
||||
def my_tool(question: str) -> str:
|
||||
"""Clear description for what this tool is useful for, you agent will need this information to use it."""
|
||||
|
||||
@@ -43,4 +43,3 @@ from .tools import (
|
||||
YoutubeChannelSearchTool,
|
||||
YoutubeVideoSearchTool,
|
||||
)
|
||||
from .tools.base_tool import BaseTool, Tool, tool
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
from typing import Any, Callable
|
||||
|
||||
from pydantic import BaseModel as PydanticBaseModel
|
||||
|
||||
from crewai.tools.base_tool import BaseTool
|
||||
from crewai.tools.structured_tool import CrewStructuredTool
|
||||
|
||||
|
||||
class Tool(BaseTool):
|
||||
func: Callable
|
||||
"""The function that will be executed when the tool is called."""
|
||||
|
||||
def _run(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return self.func(*args, **kwargs)
|
||||
|
||||
|
||||
def to_langchain(
|
||||
tools: list[BaseTool | CrewStructuredTool],
|
||||
) -> list[CrewStructuredTool]:
|
||||
return [t.to_structured_tool() if isinstance(t, BaseTool) else t for t in tools]
|
||||
|
||||
|
||||
def tool(*args):
|
||||
"""
|
||||
Decorator to create a tool from a function.
|
||||
"""
|
||||
|
||||
def _make_with_name(tool_name: str) -> Callable:
|
||||
def _make_tool(f: Callable) -> BaseTool:
|
||||
if f.__doc__ is None:
|
||||
raise ValueError("Function must have a docstring")
|
||||
if f.__annotations__ is None:
|
||||
raise ValueError("Function must have type annotations")
|
||||
|
||||
class_name = "".join(tool_name.split()).title()
|
||||
args_schema = type(
|
||||
class_name,
|
||||
(PydanticBaseModel,),
|
||||
{
|
||||
"__annotations__": {
|
||||
k: v for k, v in f.__annotations__.items() if k != "return"
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
return Tool(
|
||||
name=tool_name,
|
||||
description=f.__doc__,
|
||||
func=f,
|
||||
args_schema=args_schema,
|
||||
)
|
||||
|
||||
return _make_tool
|
||||
|
||||
if len(args) == 1 and callable(args[0]):
|
||||
return _make_with_name(args[0].__name__)(args[0])
|
||||
if len(args) == 1 and isinstance(args[0], str):
|
||||
return _make_with_name(args[0])
|
||||
raise ValueError("Invalid arguments")
|
||||
@@ -4,10 +4,9 @@ import time
|
||||
from typing import Any, ClassVar, Optional, Type
|
||||
|
||||
import requests
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
def _save_results_to_file(content: str) -> None:
|
||||
"""Saves the search results to a file."""
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from typing import Any, Optional, Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class BrowserbaseLoadToolSchema(BaseModel):
|
||||
url: str = Field(description="Website URL")
|
||||
|
||||
@@ -3,10 +3,9 @@ import os
|
||||
from typing import List, Optional, Type
|
||||
|
||||
import docker
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class CodeInterpreterSchema(BaseModel):
|
||||
"""Input for CodeInterpreterTool."""
|
||||
|
||||
@@ -5,8 +5,7 @@ Composio tools wrapper.
|
||||
import typing as t
|
||||
|
||||
import typing_extensions as te
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
from crewai.tools import BaseTool
|
||||
|
||||
|
||||
class ComposioTool(BaseTool):
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import json
|
||||
from typing import Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from openai import OpenAI
|
||||
from pydantic import BaseModel
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class ImagePromptSchema(BaseModel):
|
||||
"""Input for Dall-E Tool."""
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import os
|
||||
from typing import Any, Optional, Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base_tool import BaseTool
|
||||
|
||||
|
||||
class FixedDirectoryReadToolSchema(BaseModel):
|
||||
"""Input for DirectoryReadTool."""
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import os
|
||||
from typing import Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class EXABaseToolToolSchema(BaseModel):
|
||||
"""Input for EXABaseTool."""
|
||||
|
||||
@@ -1,28 +1,30 @@
|
||||
import os
|
||||
import requests
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
from .exa_base_tool import EXABaseTool
|
||||
|
||||
|
||||
class EXASearchTool(EXABaseTool):
|
||||
def _run(
|
||||
self,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
search_query = kwargs.get('search_query')
|
||||
if search_query is None:
|
||||
search_query = kwargs.get('query')
|
||||
def _run(
|
||||
self,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
search_query = kwargs.get("search_query")
|
||||
if search_query is None:
|
||||
search_query = kwargs.get("query")
|
||||
|
||||
payload = {
|
||||
"query": search_query,
|
||||
"type": "magic",
|
||||
}
|
||||
payload = {
|
||||
"query": search_query,
|
||||
"type": "magic",
|
||||
}
|
||||
|
||||
headers = self.headers.copy()
|
||||
headers["x-api-key"] = os.environ['EXA_API_KEY']
|
||||
headers = self.headers.copy()
|
||||
headers["x-api-key"] = os.environ["EXA_API_KEY"]
|
||||
|
||||
response = requests.post(self.search_url, json=payload, headers=headers)
|
||||
results = response.json()
|
||||
if 'results' in results:
|
||||
results = super()._parse_results(results['results'])
|
||||
return results
|
||||
response = requests.post(self.search_url, json=payload, headers=headers)
|
||||
results = response.json()
|
||||
if "results" in results:
|
||||
results = super()._parse_results(results["results"])
|
||||
return results
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from typing import Any, Optional, Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base_tool import BaseTool
|
||||
|
||||
|
||||
class FixedFileReadToolSchema(BaseModel):
|
||||
"""Input for FileReadTool."""
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
import os
|
||||
from typing import Any, Optional, Type
|
||||
from pydantic import BaseModel
|
||||
from ..base_tool import BaseTool
|
||||
from distutils.util import strtobool
|
||||
from typing import Any, Optional, Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class FileWriterToolInput(BaseModel):
|
||||
filename: str
|
||||
filename: str
|
||||
directory: Optional[str] = "./"
|
||||
overwrite: str = "False"
|
||||
content: str
|
||||
|
||||
|
||||
|
||||
class FileWriterTool(BaseTool):
|
||||
name: str = "File Writer Tool"
|
||||
description: str = (
|
||||
@@ -26,7 +28,7 @@ class FileWriterTool(BaseTool):
|
||||
|
||||
# Construct the full path
|
||||
filepath = os.path.join(kwargs.get("directory") or "", kwargs["filename"])
|
||||
|
||||
|
||||
# Convert overwrite to boolean
|
||||
kwargs["overwrite"] = bool(strtobool(kwargs["overwrite"]))
|
||||
|
||||
@@ -46,4 +48,4 @@ class FileWriterTool(BaseTool):
|
||||
except KeyError as e:
|
||||
return f"An error occurred while accessing key: {str(e)}"
|
||||
except Exception as e:
|
||||
return f"An error occurred while writing to the file: {str(e)}"
|
||||
return f"An error occurred while writing to the file: {str(e)}"
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Type
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
# Type checking import
|
||||
if TYPE_CHECKING:
|
||||
@@ -21,9 +20,7 @@ class FirecrawlCrawlWebsiteToolSchema(BaseModel):
|
||||
|
||||
class FirecrawlCrawlWebsiteTool(BaseTool):
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
validate_assignment=True,
|
||||
frozen=False
|
||||
arbitrary_types_allowed=True, validate_assignment=True, frozen=False
|
||||
)
|
||||
name: str = "Firecrawl web crawl tool"
|
||||
description: str = "Crawl webpages using Firecrawl and return the contents"
|
||||
@@ -59,10 +56,11 @@ class FirecrawlCrawlWebsiteTool(BaseTool):
|
||||
|
||||
try:
|
||||
from firecrawl import FirecrawlApp
|
||||
|
||||
# Must rebuild model after class is defined
|
||||
FirecrawlCrawlWebsiteTool.model_rebuild()
|
||||
except ImportError:
|
||||
"""
|
||||
When this tool is not used, then exception can be ignored.
|
||||
"""
|
||||
pass
|
||||
pass
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Type
|
||||
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
# Type checking import
|
||||
if TYPE_CHECKING:
|
||||
@@ -25,9 +24,7 @@ class FirecrawlScrapeWebsiteToolSchema(BaseModel):
|
||||
|
||||
class FirecrawlScrapeWebsiteTool(BaseTool):
|
||||
model_config = ConfigDict(
|
||||
arbitrary_types_allowed=True,
|
||||
validate_assignment=True,
|
||||
frozen=False
|
||||
arbitrary_types_allowed=True, validate_assignment=True, frozen=False
|
||||
)
|
||||
name: str = "Firecrawl web scrape tool"
|
||||
description: str = "Scrape webpages url using Firecrawl and return the contents"
|
||||
@@ -70,6 +67,7 @@ class FirecrawlScrapeWebsiteTool(BaseTool):
|
||||
|
||||
try:
|
||||
from firecrawl import FirecrawlApp
|
||||
|
||||
# Must rebuild model after class is defined
|
||||
FirecrawlScrapeWebsiteTool.model_rebuild()
|
||||
except ImportError:
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
# Type checking import
|
||||
if TYPE_CHECKING:
|
||||
from firecrawl import FirecrawlApp
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
from typing import Optional, Type
|
||||
|
||||
import requests
|
||||
from typing import Type, Optional
|
||||
from ..base_tool import BaseTool
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class JinaScrapeWebsiteToolInput(BaseModel):
|
||||
"""Input schema for JinaScrapeWebsiteTool."""
|
||||
|
||||
website_url: str = Field(..., description="Mandatory website url to read the file")
|
||||
|
||||
|
||||
class JinaScrapeWebsiteTool(BaseTool):
|
||||
name: str = "JinaScrapeWebsiteTool"
|
||||
description: str = "A tool that can be used to read a website content using Jina.ai reader and return markdown content."
|
||||
description: str = (
|
||||
"A tool that can be used to read a website content using Jina.ai reader and return markdown content."
|
||||
)
|
||||
args_schema: Type[BaseModel] = JinaScrapeWebsiteToolInput
|
||||
website_url: Optional[str] = None
|
||||
api_key: Optional[str] = None
|
||||
@@ -22,31 +26,29 @@ class JinaScrapeWebsiteTool(BaseTool):
|
||||
website_url: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
custom_headers: Optional[dict] = None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(**kwargs)
|
||||
if website_url is not None:
|
||||
self.website_url = website_url
|
||||
self.description = (
|
||||
f"A tool that can be used to read {website_url}'s content and return markdown content."
|
||||
)
|
||||
self.description = f"A tool that can be used to read {website_url}'s content and return markdown content."
|
||||
self._generate_description()
|
||||
|
||||
if custom_headers is not None:
|
||||
self.headers = custom_headers
|
||||
|
||||
|
||||
if api_key is not None:
|
||||
self.headers["Authorization"] = f"Bearer {api_key}"
|
||||
|
||||
def _run(self, website_url: Optional[str] = None) -> str:
|
||||
url = website_url or self.website_url
|
||||
if not url:
|
||||
raise ValueError("Website URL must be provided either during initialization or execution")
|
||||
raise ValueError(
|
||||
"Website URL must be provided either during initialization or execution"
|
||||
)
|
||||
|
||||
response = requests.get(
|
||||
f"https://r.jina.ai/{url}",
|
||||
headers=self.headers,
|
||||
timeout=15
|
||||
f"https://r.jina.ai/{url}", headers=self.headers, timeout=15
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.text
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from typing import Any, Optional, Type, cast
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class LlamaIndexTool(BaseTool):
|
||||
"""Tool to wrap LlamaIndex tools/query engines."""
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
from crewai.tools import BaseTool
|
||||
|
||||
|
||||
class MultiOnTool(BaseTool):
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
from typing import Any, Union
|
||||
from typing import Any, Type, Union
|
||||
|
||||
from ..base_tool import BaseTool
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from typing import Type, Any
|
||||
|
||||
class NL2SQLToolInput(BaseModel):
|
||||
sql_query: str = Field(
|
||||
@@ -13,6 +12,7 @@ class NL2SQLToolInput(BaseModel):
|
||||
description="The SQL query to execute.",
|
||||
)
|
||||
|
||||
|
||||
class NL2SQLTool(BaseTool):
|
||||
name: str = "NL2SQLTool"
|
||||
description: str = "Converts natural language to SQL queries and executes them."
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class Adapter(BaseModel, ABC):
|
||||
class Config:
|
||||
|
||||
@@ -3,10 +3,9 @@ from typing import Any, Optional, Type
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base_tool import BaseTool
|
||||
|
||||
|
||||
class FixedScrapeElementFromWebsiteToolSchema(BaseModel):
|
||||
"""Input for ScrapeElementFromWebsiteTool."""
|
||||
|
||||
@@ -4,10 +4,9 @@ from typing import Any, Optional, Type
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base_tool import BaseTool
|
||||
|
||||
|
||||
class FixedScrapeWebsiteToolSchema(BaseModel):
|
||||
"""Input for ScrapeWebsiteTool."""
|
||||
@@ -69,6 +68,6 @@ class ScrapeWebsiteTool(BaseTool):
|
||||
parsed = BeautifulSoup(page.text, "html.parser")
|
||||
|
||||
text = parsed.get_text(" ")
|
||||
text = re.sub('[ \t]+', ' ', text)
|
||||
text = re.sub('\\s+\n\\s+', '\n', text)
|
||||
text = re.sub("[ \t]+", " ", text)
|
||||
text = re.sub("\\s+\n\\s+", "\n", text)
|
||||
return text
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import logging
|
||||
from typing import Any, Dict, Literal, Optional, Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
logger = logging.getLogger(__file__)
|
||||
|
||||
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import time
|
||||
from typing import Any, Optional, Type
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.chrome.options import Options
|
||||
from selenium.webdriver.common.by import By
|
||||
|
||||
from ..base_tool import BaseTool
|
||||
|
||||
|
||||
class FixedSeleniumScrapingToolSchema(BaseModel):
|
||||
"""Input for SeleniumScrapingTool."""
|
||||
|
||||
@@ -4,10 +4,9 @@ import os
|
||||
from typing import Any, Optional, Type
|
||||
|
||||
import requests
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
def _save_results_to_file(content: str) -> None:
|
||||
"""Saves the search results to a file."""
|
||||
|
||||
@@ -3,10 +3,9 @@ from typing import Any, Optional, Type
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class SerplyNewsSearchToolSchema(BaseModel):
|
||||
"""Input for Serply News Search."""
|
||||
|
||||
@@ -3,10 +3,9 @@ from typing import Any, Optional, Type
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class SerplyScholarSearchToolSchema(BaseModel):
|
||||
"""Input for Serply Scholar Search."""
|
||||
|
||||
@@ -3,10 +3,9 @@ from typing import Any, Optional, Type
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class SerplyWebSearchToolSchema(BaseModel):
|
||||
"""Input for Serply Web Search."""
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from typing import Any, Dict, Literal, Optional, Type
|
||||
|
||||
from crewai.tools import BaseTool
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class SpiderToolSchema(BaseModel):
|
||||
url: str = Field(description="Website URL")
|
||||
|
||||
@@ -2,11 +2,10 @@ import base64
|
||||
from typing import Type
|
||||
|
||||
import requests
|
||||
from crewai.tools import BaseTool
|
||||
from openai import OpenAI
|
||||
from pydantic import BaseModel
|
||||
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
|
||||
class ImagePromptSchema(BaseModel):
|
||||
"""Input for Vision Tool."""
|
||||
|
||||
Reference in New Issue
Block a user