mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-07 23:28:30 +00:00
cr
This commit is contained in:
@@ -23,4 +23,5 @@ from .tools import (
|
||||
XMLSearchTool,
|
||||
YoutubeChannelSearchTool,
|
||||
YoutubeVideoSearchTool,
|
||||
LlamaIndexTool
|
||||
)
|
||||
@@ -21,3 +21,4 @@ from .website_search.website_search_tool import WebsiteSearchTool
|
||||
from .xml_search_tool.xml_search_tool import XMLSearchTool
|
||||
from .youtube_channel_search_tool.youtube_channel_search_tool import YoutubeChannelSearchTool
|
||||
from .youtube_video_search_tool.youtube_video_search_tool import YoutubeVideoSearchTool
|
||||
from .llamaindex_tool.llamaindex_tool import LlamaIndexTool
|
||||
|
||||
53
src/crewai_tools/tools/llamaindex_tool/README.md
Normal file
53
src/crewai_tools/tools/llamaindex_tool/README.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# LlamaIndexTool Documentation
|
||||
|
||||
## Description
|
||||
This tool is designed to be a general wrapper around LlamaIndex tools and query engines, enabling you to leverage LlamaIndex resources
|
||||
in terms of RAG/agentic pipelines as tools to plug into CrewAI agents.
|
||||
|
||||
## Installation
|
||||
To incorporate this tool into your project, follow the installation instructions below:
|
||||
```shell
|
||||
pip install 'crewai[tools]'
|
||||
```
|
||||
|
||||
## Example
|
||||
The following example demonstrates how to initialize the tool and execute a search with a given query:
|
||||
|
||||
```python
|
||||
from crewai_tools import LlamaIndexTool
|
||||
|
||||
# Initialize the tool from a LlamaIndex Tool
|
||||
|
||||
## Example 1: Initialize from FunctionTool
|
||||
from llama_index.core.tools import FunctionTool
|
||||
|
||||
your_python_function = lambda ...: ...
|
||||
og_tool = FunctionTool.from_defaults(your_python_function, name="<name>", description='<description>')
|
||||
tool = LlamaIndexTool.from_tool(og_tool)
|
||||
|
||||
## Example 2: Initialize from LlamaHub Tools
|
||||
from llama_index.tools.wolfram_alpha import WolframAlphaToolSpec
|
||||
wolfram_spec = WolframAlphaToolSpec(app_id="<app_id>")
|
||||
wolfram_tools = wolfram_spec.to_tool_list()
|
||||
tools = [LlamaIndexTool.from_tool(t) for t in wolfram_tools]
|
||||
|
||||
|
||||
# Initialize Tool from a LlamaIndex Query Engine
|
||||
|
||||
## NOTE: LlamaIndex has a lot of query engines, define whatever query engine you want
|
||||
query_engine = index.as_query_engine()
|
||||
query_tool = LlamaIndexTool.from_query_engine(
|
||||
query_engine,
|
||||
name="Uber 2019 10K Query Tool",
|
||||
description="Use this tool to lookup the 2019 Uber 10K Annual Report"
|
||||
)
|
||||
|
||||
```
|
||||
|
||||
## Steps to Get Started
|
||||
To effectively use the `LlamaIndexTool`, follow these steps:
|
||||
|
||||
1. **Install CrewAI**: Confirm that the `crewai[tools]` package is installed in your Python environment.
|
||||
2. **Install and use LlamaIndex**: Follow LlamaIndex documentation (https://docs.llamaindex.ai/) to setup a RAG/agent pipeline.
|
||||
|
||||
|
||||
84
src/crewai_tools/tools/llamaindex_tool/llamaindex_tool.py
Normal file
84
src/crewai_tools/tools/llamaindex_tool/llamaindex_tool.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import os
|
||||
import json
|
||||
import requests
|
||||
|
||||
from typing import Type, Any, cast, Optional
|
||||
from pydantic.v1 import BaseModel, Field
|
||||
from crewai_tools.tools.base_tool import BaseTool
|
||||
|
||||
class LlamaIndexTool(BaseTool):
|
||||
"""Tool to wrap LlamaIndex tools/query engines."""
|
||||
llama_index_tool: Any
|
||||
|
||||
def _run(
|
||||
self,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
"""Run tool."""
|
||||
from llama_index.core.tools import BaseTool as LlamaBaseTool
|
||||
tool = cast(LlamaBaseTool, self.llama_index_tool)
|
||||
return tool(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_tool(
|
||||
cls,
|
||||
tool: Any,
|
||||
**kwargs: Any
|
||||
) -> "LlamaIndexTool":
|
||||
from llama_index.core.tools import BaseTool as LlamaBaseTool
|
||||
|
||||
if not isinstance(tool, LlamaBaseTool):
|
||||
raise ValueError(f"Expected a LlamaBaseTool, got {type(tool)}")
|
||||
tool = cast(LlamaBaseTool, tool)
|
||||
|
||||
if tool.metadata.fn_schema is None:
|
||||
raise ValueError("The LlamaIndex tool does not have an fn_schema specified.")
|
||||
args_schema = cast(Type[BaseModel], tool.metadata.fn_schema)
|
||||
|
||||
return cls(
|
||||
name=tool.metadata.name,
|
||||
description=tool.metadata.description,
|
||||
args_schema=args_schema,
|
||||
llama_index_tool=tool,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
@classmethod
|
||||
def from_query_engine(
|
||||
cls,
|
||||
query_engine: Any,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
return_direct: bool = False,
|
||||
**kwargs: Any
|
||||
) -> "LlamaIndexTool":
|
||||
from llama_index.core.query_engine import BaseQueryEngine
|
||||
from llama_index.core.tools import QueryEngineTool
|
||||
|
||||
if not isinstance(query_engine, BaseQueryEngine):
|
||||
raise ValueError(f"Expected a BaseQueryEngine, got {type(query_engine)}")
|
||||
|
||||
# NOTE: by default the schema expects an `input` variable. However this
|
||||
# confuses crewAI so we are renaming to `query`.
|
||||
class QueryToolSchema(BaseModel):
|
||||
"""Schema for query tool."""
|
||||
query: str = Field(..., description="Search query for the query tool.")
|
||||
|
||||
# NOTE: setting `resolve_input_errors` to True is important because the schema expects `input` but we are using `query`
|
||||
query_engine_tool = QueryEngineTool.from_defaults(
|
||||
query_engine,
|
||||
name=name,
|
||||
description=description,
|
||||
return_direct=return_direct,
|
||||
resolve_input_errors=True,
|
||||
)
|
||||
# HACK: we are replacing the schema with our custom schema
|
||||
query_engine_tool.metadata.fn_schema = QueryToolSchema
|
||||
|
||||
return cls.from_tool(
|
||||
query_engine_tool,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user