cookbook/logging_observability/LiteLLM_Langfuse.ipynb
https://docs.litellm.ai/docs/observability/langfuse_integration
!pip install litellm langfuse
import litellm
from litellm import completion
import os
# from https://cloud.langfuse.com/
os.environ["LANGFUSE_PUBLIC_KEY"] = ""
os.environ["LANGFUSE_SECRET_KEY"] = ""
# OpenAI and Cohere keys
# You can use any of the litellm supported providers: https://docs.litellm.ai/docs/providers
os.environ['OPENAI_API_KEY']=""
os.environ['COHERE_API_KEY']=""
# set langfuse as a callback, litellm will send the data to langfuse
litellm.success_callback = ["langfuse"]
# openai call
response = completion(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": "Hi 👋 - i'm openai"}
]
)
print(response)
# we set langfuse as a callback in the prev cell
# cohere call
response = completion(
model="command-nightly",
messages=[
{"role": "user", "content": "Hi 👋 - i'm cohere"}
]
)
print(response)