packages/docs/examples-gallery/vercel.mdx
Deploy AI agents to Vercel's global edge network.
cd examples/vercel
vercel # Deploy to production
| Language | Directory | Runtime |
|---|---|---|
| TypeScript | examples/vercel/api/ | Edge Runtime |
| Python | examples/vercel/api/python/ | Python |
// api/chat.ts
import { AgentRuntime, ModelType } from "@elizaos/core";
import { openaiPlugin } from "@elizaos/plugin-openai";
export const config = {
runtime: "edge",
};
let runtime: AgentRuntime | null = null;
export default async function handler(request: Request) {
if (!runtime) {
runtime = new AgentRuntime({
character: { name: "Eliza", bio: "A helpful AI." },
plugins: [openaiPlugin],
});
await runtime.initialize();
}
const { message } = await request.json();
const response = await runtime.useModel(ModelType.TEXT_LARGE, {
prompt: message,
});
return new Response(JSON.stringify({ response: String(response) }), {
headers: { "Content-Type": "application/json" },
});
}
cd examples/vercel
vercel dev
curl -X POST http://localhost:3000/api/chat \
-H "Content-Type: application/json" \
-d '{"message": "Hello!"}'
Set in Vercel dashboard or .env.local:
OPENAI_API_KEY=your-key
export default async function handler(request: Request) {
const { message } = await request.json();
const runtime = await getRuntime();
const stream = new ReadableStream({
async start(controller) {
await runtime.messageService?.handleMessage(
runtime,
createMessage(message),
async (content) => {
if (content?.text) {
controller.enqueue(new TextEncoder().encode(content.text));
}
return [];
},
);
controller.close();
},
});
return new Response(stream, {
headers: { "Content-Type": "text/plain; charset=utf-8" },
});
}