Back to Llama Index

Langchain Output Parsing

docs/examples/output_parsing/LangchainOutputParserDemo.ipynb

0.14.212.3 KB
Original Source

<a href="https://colab.research.google.com/github/run-llama/llama_index/blob/main/docs/examples/output_parsing/LangchainOutputParserDemo.ipynb" target="_parent"></a>

Langchain Output Parsing

Download Data

python
%pip install llama-index-llms-openai
python
!mkdir -p 'data/paul_graham/'
!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/examples/data/paul_graham/paul_graham_essay.txt' -O 'data/paul_graham/paul_graham_essay.txt'

Load documents, build the VectorStoreIndex

python
import logging
import sys

logging.basicConfig(stream=sys.stdout, level=logging.INFO)
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))

from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from IPython.display import Markdown, display

import os

os.environ["OPENAI_API_KEY"] = "sk-..."
python
# load documents
documents = SimpleDirectoryReader("./data/paul_graham/").load_data()
python
index = VectorStoreIndex.from_documents(documents, chunk_size=512)

Define Query + Langchain Output Parser

python
from llama_index.core.output_parsers import LangchainOutputParser
from langchain.output_parsers import StructuredOutputParser, ResponseSchema

Define custom QA and Refine Prompts

python
response_schemas = [
    ResponseSchema(
        name="Education",
        description=(
            "Describes the author's educational experience/background."
        ),
    ),
    ResponseSchema(
        name="Work",
        description="Describes the author's work experience/background.",
    ),
]
python
lc_output_parser = StructuredOutputParser.from_response_schemas(
    response_schemas
)
output_parser = LangchainOutputParser(lc_output_parser)
python
from llama_index.core.prompts.default_prompts import (
    DEFAULT_TEXT_QA_PROMPT_TMPL,
)

# take a look at the new QA template!
fmt_qa_tmpl = output_parser.format(DEFAULT_TEXT_QA_PROMPT_TMPL)
print(fmt_qa_tmpl)

Query Index

python
from llama_index.llms.openai import OpenAI

llm = OpenAI(output_parser=output_parser)

query_engine = index.as_query_engine(
    llm=llm,
)
response = query_engine.query(
    "What are a few things the author did growing up?",
)
python
print(response)