cookbook/TogetherAI_liteLLM.ipynb
!pip install litellm
import os
from litellm import completion
os.environ["TOGETHERAI_API_KEY"] = "" #@param
user_message = "Hello, whats the weather in San Francisco??"
messages = [{ "content": user_message,"role": "user"}]
https://api.together.xyz/playground/chat?model=togethercomputer%2Fllama-2-70b-chat
model_name = "togethercomputer/llama-2-70b-chat"
response = completion(model=model_name, messages=messages, max_tokens=200)
print(response)
model_name = "togethercomputer/CodeLlama-34b-Instruct"
response = completion(model=model_name, messages=messages, max_tokens=200)
print(response)
user_message = "Write 1page essay on YC + liteLLM"
messages = [{ "content": user_message,"role": "user"}]
async def parse_stream(stream):
async for elem in stream:
print(elem)
return
stream = completion(model="togethercomputer/llama-2-70b-chat", messages=messages, stream=True, max_tokens=800)
print(stream)
# Await the asynchronous function directly in the notebook cell
await parse_stream(stream)