dealing with agent tools
This commit is contained in:
@@ -0,0 +1,16 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- The values [PUBMED,PYTHON_REPL,STACK_EXCHANGE] on the enum `AgentTool` will be removed. If these variants are still used in the database, this will fail.
|
||||
|
||||
*/
|
||||
-- AlterEnum
|
||||
BEGIN;
|
||||
CREATE TYPE "AgentTool_new" AS ENUM ('DUCK_DUCK_GO_SEARCH', 'SEMANTIC_SCHOLER', 'WIKIDATA', 'WIKIPEDIA', 'YAHOO_FINANCE', 'YUOUTUBE_SEARCH');
|
||||
ALTER TABLE "Agent" ALTER COLUMN "tools" DROP DEFAULT;
|
||||
ALTER TABLE "Agent" ALTER COLUMN "tools" TYPE "AgentTool_new"[] USING ("tools"::text::"AgentTool_new"[]);
|
||||
ALTER TYPE "AgentTool" RENAME TO "AgentTool_old";
|
||||
ALTER TYPE "AgentTool_new" RENAME TO "AgentTool";
|
||||
DROP TYPE "AgentTool_old";
|
||||
ALTER TABLE "Agent" ALTER COLUMN "tools" SET DEFAULT ARRAY[]::"AgentTool"[];
|
||||
COMMIT;
|
||||
@@ -12,10 +12,7 @@ datasource db {
|
||||
|
||||
enum AgentTool {
|
||||
DUCK_DUCK_GO_SEARCH
|
||||
PUBMED
|
||||
PYTHON_REPL
|
||||
SEMANTIC_SCHOLER
|
||||
STACK_EXCHANGE
|
||||
WIKIDATA
|
||||
WIKIPEDIA
|
||||
YAHOO_FINANCE
|
||||
|
||||
@@ -3,6 +3,13 @@ import os
|
||||
from textwrap import dedent
|
||||
from crewai import Agent, Task, Crew, Process
|
||||
from langchain_google_genai import ChatGoogleGenerativeAI
|
||||
from langchain_community.tools import DuckDuckGoSearchRun
|
||||
from langchain_community.tools.semanticscholar.tool import SemanticScholarQueryRun
|
||||
from langchain_community.tools.wikidata.tool import WikidataAPIWrapper, WikidataQueryRun
|
||||
from langchain_community.tools import WikipediaQueryRun
|
||||
from langchain_community.utilities import WikipediaAPIWrapper
|
||||
from langchain_community.tools.yahoo_finance_news import YahooFinanceNewsTool
|
||||
from langchain_community.tools import YouTubeSearchTool
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
@@ -12,46 +19,61 @@ process_type = {
|
||||
"HIERARTICAL": Process.hierarchical,
|
||||
}
|
||||
|
||||
tool_dict = {
|
||||
"DUCK_DUCK_GO_SEARCH": DuckDuckGoSearchRun(),
|
||||
"SEMANTIC_SCHOLER": SemanticScholarQueryRun(),
|
||||
"WIKIDATA": WikidataQueryRun(api_wrapper=WikidataAPIWrapper()),
|
||||
"WIKIPEDIA": WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper()),
|
||||
"YAHOO_FINANCE": YahooFinanceNewsTool(),
|
||||
"YUOUTUBE_SEARCH": YouTubeSearchTool(),
|
||||
}
|
||||
|
||||
|
||||
def run_mission(mission):
|
||||
llm = ChatGoogleGenerativeAI(
|
||||
model="gemini-pro",
|
||||
verbose=True,
|
||||
temperature=0.5,
|
||||
google_api_key=os.getenv("GEMINI_API_KEY"),
|
||||
)
|
||||
|
||||
agents = [
|
||||
Agent(
|
||||
role=agent["role"],
|
||||
goal=agent["goal"],
|
||||
backstory=agent["backstory"],
|
||||
allow_delegation=agent["allowDelegation"],
|
||||
verbose=agent["verbose"],
|
||||
llm=llm,
|
||||
try:
|
||||
llm = ChatGoogleGenerativeAI(
|
||||
model="gemini-pro",
|
||||
verbose=True,
|
||||
temperature=0.5,
|
||||
google_api_key=os.getenv("GEMINI_API_KEY"),
|
||||
)
|
||||
for agent in mission["crew"]
|
||||
]
|
||||
|
||||
tasks = [
|
||||
Task(
|
||||
description=dedent(task["description"]),
|
||||
agent=(
|
||||
[agent for agent in agents if agent.role == task["agent"]["role"]][0]
|
||||
if task["agent"]
|
||||
else None
|
||||
),
|
||||
agents = [
|
||||
Agent(
|
||||
role=agent["role"],
|
||||
goal=agent["goal"],
|
||||
backstory=agent["backstory"],
|
||||
allow_delegation=agent["allowDelegation"],
|
||||
verbose=agent["verbose"],
|
||||
tools=[tool_dict[tool] for tool in agent["tools"]],
|
||||
llm=llm,
|
||||
)
|
||||
for agent in mission["crew"]
|
||||
]
|
||||
|
||||
tasks = [
|
||||
Task(
|
||||
description=dedent(task["description"]),
|
||||
agent=(
|
||||
[agent for agent in agents if agent.role == task["agent"]["role"]][
|
||||
0
|
||||
]
|
||||
if task["agent"]
|
||||
else None
|
||||
),
|
||||
)
|
||||
for task in mission["tasks"]
|
||||
]
|
||||
|
||||
crew = Crew(
|
||||
agents=agents,
|
||||
tasks=tasks,
|
||||
verbose=mission["verbose"],
|
||||
process=process_type[mission["process"]],
|
||||
manager_llm=llm,
|
||||
)
|
||||
for task in mission["tasks"]
|
||||
]
|
||||
|
||||
crew = Crew(
|
||||
agents=agents,
|
||||
tasks=tasks,
|
||||
verbose=mission["verbose"],
|
||||
process=process_type[mission["process"]],
|
||||
manager_llm=llm,
|
||||
)
|
||||
|
||||
result = crew.kickoff()
|
||||
return result
|
||||
result = crew.kickoff()
|
||||
return result
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
@@ -2,18 +2,24 @@ aiohttp==3.9.3
|
||||
aiosignal==1.3.1
|
||||
annotated-types==0.6.0
|
||||
anyio==4.3.0
|
||||
appdirs==1.4.4
|
||||
attrs==23.2.0
|
||||
backoff==2.2.1
|
||||
beautifulsoup4==4.12.3
|
||||
cachetools==5.3.2
|
||||
certifi==2024.2.2
|
||||
cffi==1.16.0
|
||||
charset-normalizer==3.3.2
|
||||
click==8.1.7
|
||||
colorama==0.4.6
|
||||
crewai==0.11.2
|
||||
curl_cffi==0.6.0b9
|
||||
dataclasses-json==0.6.4
|
||||
Deprecated==1.2.14
|
||||
distro==1.9.0
|
||||
docstring-parser==0.15
|
||||
duckduckgo_search==4.4.3
|
||||
frozendict==2.4.0
|
||||
frozenlist==1.4.1
|
||||
google-ai-generativelanguage==0.4.0
|
||||
google-api-core==2.17.1
|
||||
@@ -24,6 +30,7 @@ greenlet==3.0.3
|
||||
grpcio==1.60.1
|
||||
grpcio-status==1.60.1
|
||||
h11==0.14.0
|
||||
html5lib==1.1
|
||||
httpcore==1.0.3
|
||||
httpx==0.26.0
|
||||
idna==3.6
|
||||
@@ -34,14 +41,19 @@ jsonpointer==2.4
|
||||
langchain==0.1.8
|
||||
langchain-community==0.0.21
|
||||
langchain-core==0.1.24
|
||||
langchain-experimental==0.0.52
|
||||
langchain-google-genai==0.0.9
|
||||
langchain-openai==0.0.5
|
||||
langsmith==0.1.3
|
||||
lxml==5.1.0
|
||||
markdown-it-py==3.0.0
|
||||
marshmallow==3.20.2
|
||||
mdurl==0.1.2
|
||||
mediawikiapi==1.2
|
||||
multidict==6.0.5
|
||||
multitasking==0.0.11
|
||||
mypy-extensions==1.0.0
|
||||
nest-asyncio==1.6.0
|
||||
numpy==1.26.4
|
||||
openai==1.12.0
|
||||
opentelemetry-api==1.22.0
|
||||
@@ -51,27 +63,44 @@ opentelemetry-proto==1.22.0
|
||||
opentelemetry-sdk==1.22.0
|
||||
opentelemetry-semantic-conventions==0.43b0
|
||||
packaging==23.2
|
||||
pandas==2.2.0
|
||||
peewee==3.17.1
|
||||
proto-plus==1.23.0
|
||||
protobuf==4.25.3
|
||||
pyasn1==0.5.1
|
||||
pyasn1-modules==0.3.0
|
||||
pycparser==2.21
|
||||
pydantic==2.6.1
|
||||
pydantic_core==2.16.2
|
||||
Pygments==2.17.2
|
||||
python-dateutil==2.8.2
|
||||
python-dotenv==1.0.1
|
||||
pytz==2024.1
|
||||
PyYAML==6.0.1
|
||||
regex==2023.12.25
|
||||
requests==2.31.0
|
||||
rich==13.7.0
|
||||
rsa==4.9
|
||||
semanticscholar==0.7.0
|
||||
six==1.16.0
|
||||
sniffio==1.3.0
|
||||
soupsieve==2.5
|
||||
SQLAlchemy==2.0.27
|
||||
StackAPI==0.3.0
|
||||
tenacity==8.2.3
|
||||
tiktoken==0.5.2
|
||||
tqdm==4.66.2
|
||||
typer==0.9.0
|
||||
typing-inspect==0.9.0
|
||||
typing_extensions==4.9.0
|
||||
tzdata==2024.1
|
||||
urllib3==2.2.1
|
||||
webencodings==0.5.1
|
||||
wikibase-rest-api-client==0.1.3
|
||||
wikipedia==1.4.0
|
||||
wrapt==1.16.0
|
||||
xmltodict==0.13.0
|
||||
yarl==1.9.4
|
||||
yfinance==0.2.36
|
||||
youtube-search==2.1.2
|
||||
zipp==3.17.0
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
const typeDefs = `#graphql
|
||||
enum AgentTool {
|
||||
DUCK_DUCK_GO_SEARCH
|
||||
PUBMED
|
||||
PYTHON_REPL
|
||||
SEMANTIC_SCHOLER
|
||||
STACK_EXCHANGE
|
||||
WIKIDATA
|
||||
WIKIPEDIA
|
||||
YAHOO_FINANCE
|
||||
|
||||
@@ -10,7 +10,7 @@ export const agents: Array<Agent> = [
|
||||
Your expertise in programming in python. and do your best to
|
||||
produce perfect code
|
||||
`,
|
||||
tools: ["DUCK_DUCK_GO_SEARCH", "PYTHON_REPL", "STACK_EXCHANGE"],
|
||||
tools: ["DUCK_DUCK_GO_SEARCH"],
|
||||
allowDelegation: false,
|
||||
verbose: true,
|
||||
image:
|
||||
@@ -25,7 +25,7 @@ export const agents: Array<Agent> = [
|
||||
You check for missing imports, variable declarations, mismatched brackets and syntax errors.
|
||||
You also check for security vulnerabilities, and logic errors.
|
||||
`,
|
||||
tools: ["DUCK_DUCK_GO_SEARCH", "PYTHON_REPL", "STACK_EXCHANGE"],
|
||||
tools: ["DUCK_DUCK_GO_SEARCH"],
|
||||
allowDelegation: false,
|
||||
verbose: true,
|
||||
image:
|
||||
@@ -48,10 +48,7 @@ export const agents: Array<Agent> = [
|
||||
|
||||
export const tools = [
|
||||
{ text: "DUCK_DUCK_GO_SEARCH", value: "DUCK_DUCK_GO_SEARCH" },
|
||||
{ text: "PUBMED", value: "PUBMED" },
|
||||
{ text: "PYTHON_REPL", value: "PYTHON_REPL" },
|
||||
{ text: "SEMANTIC_SCHOLER", value: "SEMANTIC_SCHOLER" },
|
||||
{ text: "STACK_EXCHANGE", value: "STACK_EXCHANGE" },
|
||||
{ text: "WIKIDATA", value: "WIKIDATA" },
|
||||
{ text: "WIKIPEDIA", value: "WIKIPEDIA" },
|
||||
{ text: "YAHOO_FINANCE", value: "YAHOO_FINANCE" },
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
type Tool =
|
||||
| "DUCK_DUCK_GO_SEARCH"
|
||||
| "PUBMED"
|
||||
| "PYTHON_REPL"
|
||||
| "SEMANTIC_SCHOLER"
|
||||
| "STACK_EXCHANGE"
|
||||
| "WIKIDATA"
|
||||
| "WIKIPEDIA"
|
||||
| "YAHOO_FINANCE"
|
||||
|
||||
Reference in New Issue
Block a user