Back to Llama Index

Chat completion

llama-index-integrations/llms/llama-index-llms-reka/llama_index_reka_samplenotebook.ipynb

0.14.212.5 KB
Original Source
python
pip install llama-index-llms-reka

To obtain API key, please visit https://platform.reka.ai/

Chat completion

python
import os
from llama_index.llms.reka import RekaLLM
from llama_index.core.base.llms.types import ChatMessage, MessageRole

# Initialize the Reka LLM
api_key = os.getenv("REKA_API_KEY")
reka_llm = RekaLLM(
    model="reka-flash",
    api_key=api_key,
)

# Chat completion
messages = [
    ChatMessage(role=MessageRole.SYSTEM, content="You are a helpful assistant."),
    ChatMessage(role=MessageRole.USER, content="What is the capital of France?"),
]
response = reka_llm.chat(messages)
print(response.message.content)

# Text completion
prompt = "The capital of France is"
response = reka_llm.complete(prompt)
print(response.text)

Streaming example

python
# Streaming chat completion
messages = [
    ChatMessage(role=MessageRole.SYSTEM, content="You are a helpful assistant."),
    ChatMessage(
        role=MessageRole.USER, content="List the first 5 planets in the solar system."
    ),
]
for chunk in reka_llm.stream_chat(messages):
    print(chunk.delta, end="", flush=True)

# Streaming text completion
prompt = "List the first 5 planets in the solar system:"
for chunk in reka_llm.stream_complete(prompt):
    print(chunk.delta, end="", flush=True)

Async use cases (chat/completion)

python
async def main():
    # Async chat completion
    messages = [
        ChatMessage(role=MessageRole.SYSTEM, content="You are a helpful assistant."),
        ChatMessage(
            role=MessageRole.USER,
            content="What is the largest planet in our solar system?",
        ),
    ]
    response = await reka_llm.achat(messages)
    print(response.message.content)

    # Async text completion
    prompt = "The largest planet in our solar system is"
    response = await reka_llm.acomplete(prompt)
    print(response.text)

    # Async streaming chat completion
    messages = [
        ChatMessage(role=MessageRole.SYSTEM, content="You are a helpful assistant."),
        ChatMessage(
            role=MessageRole.USER,
            content="Name the first 5 elements in the periodic table.",
        ),
    ]
    async for chunk in await reka_llm.astream_chat(messages):
        print(chunk.delta, end="", flush=True)

    # Async streaming text completion
    prompt = "List the first 5 elements in the periodic table:"
    async for chunk in await reka_llm.astream_complete(prompt):
        print(chunk.delta, end="", flush=True)


await main()