cookbooks/customer-support-chatbot.ipynb
import os
from typing import List, Dict
from mem0 import Memory
from datetime import datetime
import anthropic
# Set up environment variables
os.environ["OPENAI_API_KEY"] = "your_openai_api_key" # needed for embedding model
os.environ["ANTHROPIC_API_KEY"] = "your_anthropic_api_key"
class SupportChatbot:
def __init__(self):
# Initialize Mem0 with Anthropic's Claude
self.config = {
"llm": {
"provider": "anthropic",
"config": {
"model": "claude-3-5-sonnet-latest",
"temperature": 0.1,
"max_tokens": 2000,
},
}
}
self.client = anthropic.Client(api_key=os.environ["ANTHROPIC_API_KEY"])
self.memory = Memory.from_config(self.config)
# Define support context
self.system_context = """
You are a helpful customer support agent. Use the following guidelines:
- Be polite and professional
- Show empathy for customer issues
- Reference past interactions when relevant
- Maintain consistent information across conversations
- If you're unsure about something, ask for clarification
- Keep track of open issues and follow-ups
"""
def store_customer_interaction(self, user_id: str, message: str, response: str, metadata: Dict = None):
"""Store customer interaction in memory."""
if metadata is None:
metadata = {}
# Add timestamp to metadata
metadata["timestamp"] = datetime.now().isoformat()
# Format conversation for storage
conversation = [{"role": "user", "content": message}, {"role": "assistant", "content": response}]
# Store in Mem0
self.memory.add(conversation, user_id=user_id, metadata=metadata)
def get_relevant_history(self, user_id: str, query: str) -> List[Dict]:
"""Retrieve relevant past interactions."""
return self.memory.search(
query=query,
user_id=user_id,
limit=5, # Adjust based on needs
)
def handle_customer_query(self, user_id: str, query: str) -> str:
"""Process customer query with context from past interactions."""
# Get relevant past interactions
relevant_history = self.get_relevant_history(user_id, query)
# Build context from relevant history
context = "Previous relevant interactions:\n"
for memory in relevant_history:
context += f"Customer: {memory['memory']}\n"
context += f"Support: {memory['memory']}\n"
context += "---\n"
# Prepare prompt with context and current query
prompt = f"""
{self.system_context}
{context}
Current customer query: {query}
Provide a helpful response that takes into account any relevant past interactions.
"""
# Generate response using Claude
response = self.client.messages.create(
model="claude-3-5-sonnet-latest",
messages=[{"role": "user", "content": prompt}],
max_tokens=2000,
temperature=0.1,
)
# Store interaction
self.store_customer_interaction(
user_id=user_id, message=query, response=response, metadata={"type": "support_query"}
)
return response.content[0].text
chatbot = SupportChatbot()
user_id = "customer_bot"
print("Welcome to Customer Support! Type 'exit' to end the conversation.")
while True:
# Get user input
query = input()
print("Customer:", query)
# Check if user wants to exit
if query.lower() == "exit":
print("Thank you for using our support service. Goodbye!")
break
# Handle the query and print the response
response = chatbot.handle_customer_query(user_id, query)
print("Support:", response, "\n\n")