cookbook/LiteLLM_Completion_Cost.ipynb
In this notebook we'll use litellm.completion_cost to get completion costs
!pip install litellm==0.1.549 # use 0.1.549 or later
from litellm import completion, completion_cost
import os
os.environ['OPENAI_API_KEY'] = ""
messages = [{ "content": "Hello, how are you?","role": "user"}]
response = completion(
model="gpt-3.5-turbo",
messages=messages,
)
print(response)
cost = completion_cost(completion_response=response)
formatted_string = f"Cost for completion call: ${float(cost):.10f}"
print(formatted_string)
from litellm import completion, completion_cost
import os
os.environ['TOGETHERAI_API_KEY'] = ""
messages = [{ "content": "Hello, how are you?","role": "user"}]
response = completion(
model="togethercomputer/llama-2-70b-chat",
messages=messages,
)
print(response)
cost = completion_cost(completion_response=response)
formatted_string = f"Cost for completion call: ${float(cost):.10f}"
print(formatted_string)
from litellm import completion, completion_cost
import os
os.environ['REPLICATE_API_KEY'] = ""
messages = [{ "content": "Hello, how are you?","role": "user"}]
response = completion(
model="replicate/llama-2-70b-chat:2796ee9483c3fd7aa2e171d38f4ca12251a30609463dcfd4cd76703f22e96cdf",
messages=messages,
)
print(response)
cost = completion_cost(completion_response=response)
formatted_string = f"Cost for completion call: ${float(cost):.10f}"
print(formatted_string)