docs/examples/llm/llm_predictor.ipynb
<a href="https://colab.research.google.com/github/run-llama/llama_index/blob/main/docs/examples/llm/llm_predictor.ipynb" target="_parent"></a>
If you're opening this Notebook on colab, you will probably need to install LlamaIndex 🦙.
%pip install llama-index-llms-openai
%pip install llama-index-llms-langchain
!pip install llama-index
from langchain.chat_models import ChatAnyscale, ChatOpenAI
from llama_index.llms.langchain import LangChainLLM
from llama_index.core import PromptTemplate
llm = LangChainLLM(ChatOpenAI())
stream = await llm.astream(PromptTemplate("Hi, write a short story"))
async for token in stream:
print(token, end="")
## Test with ChatAnyscale
llm = LangChainLLM(ChatAnyscale())
stream = llm.stream(
PromptTemplate("Hi, Which NFL team have most Super Bowl wins")
)
for token in stream:
print(token, end="")
from llama_index.llms.openai import OpenAI
llm = OpenAI()
stream = await llm.astream("Hi, write a short story")
for token in stream:
print(token, end="")