mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-10 00:28:31 +00:00
adding serply job search tool
This commit is contained in:
@@ -26,5 +26,6 @@ from .tools import (
|
||||
SerplyWebSearchTool,
|
||||
SerplyNewsSearchTool,
|
||||
SerplyScholarSearchTool,
|
||||
SerplyWebpageToMarkdownTool
|
||||
SerplyWebpageToMarkdownTool,
|
||||
SerplyJobSearchTool
|
||||
)
|
||||
@@ -24,4 +24,5 @@ from .youtube_video_search_tool.youtube_video_search_tool import YoutubeVideoSea
|
||||
from .serply_api_tool.serply_web_search_tool import SerplyWebSearchTool
|
||||
from .serply_api_tool.serply_news_search_tool import SerplyNewsSearchTool
|
||||
from .serply_api_tool.serply_scholar_search_tool import SerplyScholarSearchTool
|
||||
from .serply_api_tool.serply_web_to_markdown_tool import SerplyWebpageToMarkdownTool
|
||||
from .serply_api_tool.serply_webpage_to_markdown_tool import SerplyWebpageToMarkdownTool
|
||||
from .serply_api_tool.serply_job_search_tool import SerplyJobSearchTool
|
||||
|
||||
@@ -55,6 +55,17 @@ tool = SerplyScholarSearchTool()
|
||||
tool = SerplyScholarSearchTool(proxy_location="GB")
|
||||
```
|
||||
|
||||
## Job Search
|
||||
The following example demonstrates how to initialize the tool and searching for jobs in the USA:
|
||||
|
||||
```python
|
||||
from crewai_tools import SerplyJobSearchTool
|
||||
|
||||
# Initialize the tool for internet searching capabilities
|
||||
tool = SerplyJobSearchTool()
|
||||
```
|
||||
|
||||
|
||||
## Web Page To Markdown
|
||||
The following example demonstrates how to initialize the tool and fetch a web page and convert it to markdown:
|
||||
|
||||
@@ -64,7 +75,7 @@ from crewai_tools import SerplyWebpageToMarkdownTool
|
||||
# Initialize the tool for internet searching capabilities
|
||||
tool = SerplyWebpageToMarkdownTool()
|
||||
|
||||
# change country news (DE - Germany)
|
||||
# change country make request from (DE - Germany)
|
||||
tool = SerplyWebpageToMarkdownTool(proxy_location="DE")
|
||||
```
|
||||
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
import os
|
||||
import requests
|
||||
from urllib.parse import urlencode
|
||||
from typing import Type, Any, Optional
|
||||
from pydantic.v1 import BaseModel, Field
|
||||
from crewai_tools.tools.rag.rag_tool import RagTool
|
||||
|
||||
|
||||
class SerplyJobSearchToolSchema(BaseModel):
|
||||
"""Input for Serply Scholar Search."""
|
||||
search_query: str = Field(..., description="Mandatory search query you want to use to fetch jobs postings.")
|
||||
|
||||
|
||||
class SerplyJobSearchTool(RagTool):
|
||||
name: str = "Job Search"
|
||||
description: str = "A tool to perform to perform a job search in the US with a search_query."
|
||||
args_schema: Type[BaseModel] = SerplyJobSearchToolSchema
|
||||
request_url: str = "https://api.serply.io/v1/job/search/"
|
||||
proxy_location: Optional[str] = "US"
|
||||
"""
|
||||
proxy_location: (str): Where to get news, specifically for a specific country results.
|
||||
- Currently only supports US
|
||||
"""
|
||||
headers: Optional[dict] = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
**kwargs
|
||||
):
|
||||
super().__init__(**kwargs)
|
||||
self.headers = {
|
||||
"X-API-KEY": os.environ["SERPLY_API_KEY"],
|
||||
"User-Agent": "crew-tools",
|
||||
"X-Proxy-Location": self.proxy_location
|
||||
}
|
||||
|
||||
def _run(
|
||||
self,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
query_payload = {}
|
||||
|
||||
if "query" in kwargs:
|
||||
query_payload["q"] = kwargs["query"]
|
||||
elif "search_query" in kwargs:
|
||||
query_payload["q"] = kwargs["search_query"]
|
||||
|
||||
# build the url
|
||||
url = f"{self.request_url}{urlencode(query_payload)}"
|
||||
|
||||
response = requests.request("GET", url, headers=self.headers)
|
||||
|
||||
jobs = response.json().get("jobs", "")
|
||||
|
||||
if not jobs:
|
||||
return ""
|
||||
|
||||
string = []
|
||||
for job in jobs:
|
||||
try:
|
||||
string.append('\n'.join([
|
||||
f"Position: {job['position']}",
|
||||
f"Employer: {job['employer']}",
|
||||
f"Location: {job['location']}",
|
||||
f"Link: {job['link']}",
|
||||
f"""Highest: {', '.join([h for h in job['highlights']])}""",
|
||||
f"Is Remote: {job['is_remote']}",
|
||||
f"Is Hybrid: {job['is_remote']}",
|
||||
"---"
|
||||
]))
|
||||
except KeyError:
|
||||
next
|
||||
|
||||
content = '\n'.join(string)
|
||||
return f"\nSearch results: {content}\n"
|
||||
@@ -1,6 +1,5 @@
|
||||
import os
|
||||
import requests
|
||||
from urllib.parse import urlencode
|
||||
from typing import Type, Any, Optional
|
||||
from pydantic.v1 import BaseModel, Field
|
||||
from crewai_tools.tools.rag.rag_tool import RagTool
|
||||
Reference in New Issue
Block a user