docs/examples/query_transformations/SimpleIndexDemo-multistep.ipynb
<a href="https://colab.research.google.com/github/run-llama/llama_index/blob/main/docs/examples/query_transformations/SimpleIndexDemo-multistep.ipynb" target="_parent"></a>
We have a multi-step query engine that's able to decompose a complex query into sequential subquestions. This guide walks you through how to set it up!
If you're opening this Notebook on colab, you will probably need to install LlamaIndex 🦙.
%pip install llama-index-llms-openai
!pip install llama-index
!mkdir -p 'data/paul_graham/'
!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'
import os
os.environ["OPENAI_API_KEY"] = "sk-..."
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_index.llms.openai import OpenAI
from IPython.display import Markdown, display
# LLM (gpt-3.5)
gpt35 = OpenAI(temperature=0, model="gpt-3.5-turbo")
# LLM (gpt-4)
gpt4 = OpenAI(temperature=0, model="gpt-4")
# load documents
documents = SimpleDirectoryReader("./data/paul_graham/").load_data()
index = VectorStoreIndex.from_documents(documents)
from llama_index.core.indices.query.query_transform.base import (
StepDecomposeQueryTransform,
)
# gpt-4
step_decompose_transform = StepDecomposeQueryTransform(llm=gpt4, verbose=True)
# gpt-3
step_decompose_transform_gpt3 = StepDecomposeQueryTransform(
llm=gpt35, verbose=True
)
index_summary = "Used to answer questions about the author"
# set Logging to DEBUG for more detailed outputs
from llama_index.core.query_engine import MultiStepQueryEngine
query_engine = index.as_query_engine(llm=gpt4)
query_engine = MultiStepQueryEngine(
query_engine=query_engine,
query_transform=step_decompose_transform,
index_summary=index_summary,
)
response_gpt4 = query_engine.query(
"Who was in the first batch of the accelerator program the author"
" started?",
)
display(Markdown(f"<b>{response_gpt4}</b>"))
sub_qa = response_gpt4.metadata["sub_qa"]
tuples = [(t[0], t[1].response) for t in sub_qa]
print(tuples)
response_gpt4 = query_engine.query(
"In which city did the author found his first company, Viaweb?",
)
print(response_gpt4)
query_engine = index.as_query_engine(llm=gpt35)
query_engine = MultiStepQueryEngine(
query_engine=query_engine,
query_transform=step_decompose_transform_gpt3,
index_summary=index_summary,
)
response_gpt3 = query_engine.query(
"In which city did the author found his first company, Viaweb?",
)
print(response_gpt3)