Back to Llama Index

Agents

docs/examples/cookbooks/oreilly_course_cookbooks/Module-6/Agents.ipynb

0.14.214.1 KB
Original Source

Agents

Installation

python
!pip install llama-index

Setup LLM and Embedding Model

python
import nest_asyncio

nest_asyncio.apply()

import os

os.environ["OPENAI_API_KEY"] = "sk-..."

from llama_index.llms.openai import OpenAI
from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.core import Settings

llm = OpenAI(model="gpt-4", temperature=0.1)
embed_model = OpenAIEmbedding()

Settings.llm = llm
Settings.embed_model = embed_model

Agents and Tools usage

python
from llama_index.core.tools import FunctionTool
from llama_index.core.agent.workflow import (
    FunctionAgent,
    ReActAgent,
)

from IPython.display import display, HTML
python
def multiply(a: int, b: int) -> int:
    """Multiply two integers and returns the result integer"""
    return a * b


def add(a: int, b: int) -> int:
    """Add two integers and returns the result integer"""
    return a + b


def subtract(a: int, b: int) -> int:
    """Subtract two integers and returns the result integer"""
    return a - b


multiply_tool = FunctionTool.from_defaults(fn=multiply)
add_tool = FunctionTool.from_defaults(fn=add)
subtract_tool = FunctionTool.from_defaults(fn=subtract)

With ReAct Agent

python
agent = ReActAgent(
    tools=[multiply_tool, add_tool, subtract_tool],
    llm=llm,
)
python
response = await agent.run("What is (26 * 2) + 2024?")
python
display(HTML(f'<p style="font-size:20px">{response.response}</p>'))

With Function Calling.

python
agent = FunctionAgent(
    tools=[multiply_tool, add_tool, subtract_tool],
    llm=llm,
)
python
response = await agent.run("What is (26 * 2) + 2024?")
python
display(HTML(f'<p style="font-size:20px">{response}</p>'))

Agent with RAG Query Engine Tools

Download Data

We will use Uber-2021 and Lyft-2021 10K SEC filings.

python
!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/examples/data/10k/uber_2021.pdf' -O './uber_2021.pdf'
!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/examples/data/10k/lyft_2021.pdf' -O './lyft_2021.pdf'

Load Data

python
from llama_index.core import SimpleDirectoryReader

uber_docs = SimpleDirectoryReader(input_files=["./uber_2021.pdf"]).load_data()
lyft_docs = SimpleDirectoryReader(input_files=["./lyft_2021.pdf"]).load_data()

Build RAG on uber and lyft docs

python
from llama_index.core import VectorStoreIndex

uber_index = VectorStoreIndex.from_documents(uber_docs)
uber_query_engine = uber_index.as_query_engine(similarity_top_k=3)

lyft_index = VectorStoreIndex.from_documents(lyft_docs)
lyft_query_engine = lyft_index.as_query_engine(similarity_top_k=3)
python
response = uber_query_engine.query("What are the investments of Uber in 2021?")
python
display(HTML(f'<p style="font-size:20px">{response.response}</p>'))
python
response = lyft_query_engine.query("What are lyft investments in 2021?")
python
display(HTML(f'<p style="font-size:20px">{response.response}</p>'))

FunctionAgent with RAG QueryEngineTools.

Here we use Fuction Calling capabilities of the model.

python
from llama_index.core.tools import QueryEngineTool, ToolMetadata
from llama_index.core.agent.workflow import FunctionAgent

query_engine_tools = [
    QueryEngineTool(
        query_engine=lyft_query_engine,
        metadata=ToolMetadata(
            name="lyft_10k",
            description="Provides information about Lyft financials for year 2021",
        ),
    ),
    QueryEngineTool(
        query_engine=uber_query_engine,
        metadata=ToolMetadata(
            name="uber_10k",
            description="Provides information about Uber financials for year 2021",
        ),
    ),
]

agent = FunctionAgent(
    tools=query_engine_tools,
    llm=llm,
)
python
response = await agent.run("What are the investments of Uber in 2021?")
python
display(HTML(f'<p style="font-size:20px">{response}</p>'))
python
response = await agent.run("What are lyft investments in 2021?")
python
display(HTML(f'<p style="font-size:20px">{response}</p>'))