cookbook/LiteLLM_Bedrock.ipynb
Important Note: For Bedrock Requests you need to ensure you have pip install boto3>=1.28.57, boto3 supports bedrock from boto3>=1.28.57 and higher
!pip install litellm
!pip install boto3>=1.28.57 # this version onwards has bedrock support
import os
os.environ["AWS_ACCESS_KEY_ID"] = "" # Access key
os.environ["AWS_SECRET_ACCESS_KEY"] = "" # Secret access key
os.environ["AWS_REGION_NAME"] = ""
from litellm import completion
response = completion(
model="bedrock/anthropic.claude-instant-v1",
messages=[{ "content": "Hello, how are you?","role": "user"}]
)
print("Claude instant 1, response")
print(response)
response = completion(
model="bedrock/anthropic.claude-v2",
messages=[{ "content": "Hello, how are you?","role": "user"}]
)
print("Claude v2, response")
print(response)
from litellm import completion
response = completion(
model="bedrock/anthropic.claude-instant-v1",
messages=[{ "content": "Hello, how are you?","role": "user"}],
stream=True,
)
print("Claude instant 1, response")
for chunk in response:
print(chunk)
response = completion(
model="bedrock/anthropic.claude-v2",
messages=[{ "content": "Hello, how are you?","role": "user"}],
stream=True
)
print("Claude v2, response")
print(response)
for chunk in response:
print(chunk)
response = completion(
model="bedrock/ai21.j2-ultra",
messages=[{ "content": "Hello, how are you?","role": "user"}],
)
print("J2 ultra response")
print(response)
response = completion(
model="bedrock/ai21.j2-mid",
messages=[{ "content": "Hello, how are you?","role": "user"}],
)
print("J2 mid response")
print(response)