apps/docs/update-delete-memories/overview.mdx
Choose from direct updates, idempotent upserts, single deletions, and powerful bulk operations.
Update existing memories by their ID when you know the specific memory you want to modify.
"queued"."done". Use this when updating fields like accepted, version, or other filter metadata without changing the document content.import Supermemory from 'supermemory';
const client = new Supermemory({
apiKey: process.env.SUPERMEMORY_API_KEY!
});
// Update by memory ID
const updated = await client.documents.update('memory_id_123', {
content: 'Updated content here',
metadata: { version: 2, updated: true }
});
console.log(updated.status); // "queued" when content changed; "done" when metadata-only
console.log(updated.id); // "memory_id_123"
from supermemory import Supermemory
import os
client = Supermemory(api_key=os.environ.get("SUPERMEMORY_API_KEY"))
# Update by memory ID
updated = client.documents.update(
'memory_id_123',
content='Updated content here',
metadata={'version': 2, 'updated': True}
)
print(f"Status: {updated.status}") # "queued" when content changed; "done" when metadata-only
print(f"ID: {updated.id}") # "memory_id_123"
curl -X PATCH "https://api.supermemory.ai/v3/documents/memory_id_123" \
-H "Authorization: Bearer $SUPERMEMORY_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"content": "Updated content here",
"metadata": {"version": 2, "updated": true}
}'
Use customId for idempotent operations where the same customId with add() will update existing memory instead of creating duplicates.
import Supermemory from 'supermemory';
const client = new Supermemory({
apiKey: process.env.SUPERMEMORY_API_KEY!
});
const customId = 'user-note-001';
// First call creates memory
const created = await client.add({
content: 'Initial content',
customId: customId,
metadata: { version: 1 }
});
console.log('Created memory:', created.id);
// Second call with same customId updates existing
const updated = await client.add({
content: 'Updated content',
customId: customId, // Same customId = upsert
metadata: { version: 2 }
});
from supermemory import Supermemory
import os
client = Supermemory(api_key=os.environ.get("SUPERMEMORY_API_KEY"))
custom_id = 'user-note-001'
# First call creates memory
created = client.add(
content='Initial content',
custom_id=custom_id,
metadata={'version': 1}
)
print(f'Created memory: {created.id}')
# Second call with same customId updates existing
updated = client.add(
content='Updated content',
custom_id=custom_id, # Same customId = upsert
metadata={'version': 2}
)
print(f'Updated memory: {updated.id}')
print(f'Same memory? {created.id == updated.id}') # True
# First call - creates memory
curl -X POST "https://api.supermemory.ai/v3/documents" \
-H "Authorization: Bearer $SUPERMEMORY_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"content": "Initial content",
"customId": "user-note-001",
"metadata": {"version": 1}
}'
# Response: {"id": "mem_abc123", "status": "queued", "customId": "user-note-001"}
# Second call - updates existing (same customId)
curl -X POST "https://api.supermemory.ai/v3/documents" \
-H "Authorization: Bearer $SUPERMEMORY_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"content": "Updated content",
"customId": "user-note-001",
"metadata": {"version": 2}
}'
# Response: {"id": "mem_abc123", "status": "queued", "customId": "user-note-001"}
# Note: Same ID returned - memory was updated, not created
The customId can have a maximum length of 100 characters.
Delete individual memories by their ID. This is a permanent hard delete with no recovery mechanism.
<CodeGroup>// Hard delete - permanently removes memory
await client.documents.delete('memory_id_123');
console.log('Memory deleted successfully');
# Hard delete - permanently removes memory
client.documents.delete('memory_id_123')
print('Memory deleted successfully')
# Error handling for single delete
try:
client.documents.delete('memory_id_123')
print('Delete successful')
except NotFoundError:
print('Memory not found or already deleted')
except AuthenticationError:
print('Authentication failed')
except Exception as e:
print(f'Delete failed: {e}')
curl -X DELETE "https://api.supermemory.ai/v3/documents/memory_id_123" \
-H "Authorization: Bearer $SUPERMEMORY_API_KEY"
# Response: 204 No Content (success)
# Response: 404 Not Found (memory doesn't exist)
Delete multiple memories at once by providing an array of memory IDs. Maximum of 100 IDs per request.
<CodeGroup>// Bulk delete by memory IDs
const result = await client.documents.deleteBulk({
ids: [
'memory_id_1',
'memory_id_2',
'memory_id_3',
'non_existent_id' // This will be reported in errors
]
});
console.log('Bulk delete result:', result);
// Output: {
// success: true,
// deletedCount: 3,
// errors: [
// { id: "non_existent_id", error: "Memory not found" }
// ]
// }
# Bulk delete by memory IDs
result = client.documents.delete_bulk(
ids=[
'memory_id_1',
'memory_id_2',
'memory_id_3',
'non_existent_id' # This will be reported in errors
]
)
print(f'Bulk delete result: {result}')
# Output: {
# 'success': True,
# 'deletedCount': 3,
# 'errors': [
# {'id': 'non_existent_id', 'error': 'Memory not found'}
# ]
# }
curl -X DELETE "https://api.supermemory.ai/v3/documents/bulk" \
-H "Authorization: Bearer $SUPERMEMORY_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"ids": [
"memory_id_1",
"memory_id_2",
"memory_id_3",
"non_existent_id"
]
}'
# Response: {
# "success": true,
# "deletedCount": 3,
# "errors": [
# {"id": "non_existent_id", "error": "Memory not found"}
# ]
# }
Delete all memories within specific container tags. This is useful for cleaning up entire projects or user data.
<CodeGroup>// Delete all memories in specific container tags
const result = await client.documents.deleteBulk({
containerTags: ['user-123', 'project-old', 'archived-content']
});
console.log('Bulk delete by tags result:', result);
// Output: {
// success: true,
// deletedCount: 45,
// containerTags: ["user-123", "project-old", "archived-content"]
// }
# Delete all memories in specific container tags
result = client.documents.delete_bulk(
container_tags=['user-123', 'project-old', 'archived-content']
)
print(f'Bulk delete by tags result: {result}')
# Output: {
# 'success': True,
# 'deletedCount': 45,
# 'containerTags': ['user-123', 'project-old', 'archived-content']
# }
curl -X DELETE "https://api.supermemory.ai/v3/documents/bulk" \
-H "Authorization: Bearer $SUPERMEMORY_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"containerTags": ["user-123", "project-old", "archived-content"]
}'
# Response: {
# "success": true,
# "deletedCount": 45,
# "containerTags": ["user-123", "project-old", "archived-content"]
# }
For applications requiring audit trails or recovery mechanisms, implement soft delete patterns using metadata:
<CodeGroup>// Soft delete pattern using metadata
await client.documents.update('memory_id', {
metadata: {
deleted: true,
deletedAt: new Date().toISOString(),
deletedBy: 'user_123'
}
});
// Filter out deleted memories in searches
const activeMemories = await client.documents.list({
filters: {
AND: [
{ key: "deleted", value: "true", negate: true }
]
}
});
console.log('Active memories:', activeMemories.memories.length);
from datetime import datetime
# Soft delete pattern using metadata
client.documents.update(
'memory_id',
metadata={
'deleted': True,
'deletedAt': datetime.now().isoformat(),
'deletedBy': 'user_123'
}
)
# Filter out deleted memories
active_memories = client.documents.list(
filters={
"AND": [
{"key": "deleted", "value": "true", "negate": True}
]
}
)
print(f'Active memories: {len(active_memories.memories)}')
# Soft delete using metadata
curl -X PATCH "https://api.supermemory.ai/v3/documents/memory_id" \
-H "Authorization: Bearer $SUPERMEMORY_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"metadata": {
"deleted": true,
"deletedAt": "2024-01-15T10:30:00Z",
"deletedBy": "user_123"
}
}'
# Response: {"id": "memory_id", "status": "queued"}
// Batch delete large numbers of memories safely
async function batchDeleteMemories(memoryIds: string[], batchSize = 100) {
const results = [];
for (let i = 0; i < memoryIds.length; i += batchSize) {
const batch = memoryIds.slice(i, i + batchSize);
console.log(`Processing batch ${Math.floor(i/batchSize) + 1} of ${Math.ceil(memoryIds.length/batchSize)}`);
try {
const result = await client.documents.deleteBulk({ ids: batch });
results.push(result);
// Brief delay between batches to avoid rate limiting
if (i + batchSize < memoryIds.length) {
await new Promise(resolve => setTimeout(resolve, 1000));
}
} catch (error) {
console.error(`Batch ${Math.floor(i/batchSize) + 1} failed:`, error);
results.push({ success: false, error: error.message, batch });
}
}
// Aggregate results
const totalDeleted = results
.filter(r => r.success)
.reduce((sum, r) => sum + (r.deletedCount || 0), 0);
console.log(`Total deleted: ${totalDeleted} out of ${memoryIds.length}`);
return { totalDeleted, results };
}
import time
import math
def batch_delete_memories(memory_ids, batch_size=100):
"""Batch delete large numbers of memories safely"""
results = []
for i in range(0, len(memory_ids), batch_size):
batch = memory_ids[i:i + batch_size]
batch_num = i // batch_size + 1
total_batches = math.ceil(len(memory_ids) / batch_size)
print(f'Processing batch {batch_num} of {total_batches}')
try:
result = client.documents.delete_bulk(ids=batch)
results.append(result)
# Brief delay between batches to avoid rate limiting
if i + batch_size < len(memory_ids):
time.sleep(1)
except Exception as error:
print(f'Batch {batch_num} failed: {error}')
results.append({'success': False, 'error': str(error), 'batch': batch})
# Aggregate results
total_deleted = sum(
r.get('deletedCount', 0) for r in results if r.get('success')
)
print(f'Total deleted: {total_deleted} out of {len(memory_ids)}')
return {'totalDeleted': total_deleted, 'results': results}
# Batch processing script example
#!/bin/bash
MEMORY_IDS=("id1" "id2" "id3") # Your memory IDs array
BATCH_SIZE=100
TOTAL_DELETED=0
# Process in batches
for ((i=0; i<${#MEMORY_IDS[@]}; i+=BATCH_SIZE)); do
batch=("${MEMORY_IDS[@]:i:BATCH_SIZE}")
batch_json=$(printf '%s\n' "${batch[@]}" | jq -R . | jq -s .)
echo "Processing batch $((i/BATCH_SIZE + 1))"
response=$(curl -s -X DELETE \
"https://api.supermemory.ai/v3/documents/bulk" \
-H "Authorization: Bearer $SUPERMEMORY_API_KEY" \
-H "Content-Type: application/json" \
-d "{\"ids\": $batch_json}")
deleted_count=$(echo "$response" | jq -r '.deletedCount // 0')
TOTAL_DELETED=$((TOTAL_DELETED + deleted_count))
echo "Batch deleted: $deleted_count memories"
sleep 1 # Rate limiting protection
done
echo "Total deleted: $TOTAL_DELETED memories"