Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions examples/game-engine/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Vercel AI Gateway
# Automatically authenticated when deployed on Vercel
# For local development, get your key from https://vercel.com/ai-gateway
AI_GATEWAY_API_KEY=

# AI Model Configuration
# Default: anthropic/claude-sonnet-4-6
AI_GATEWAY_MODEL=anthropic/claude-sonnet-4-6

# ElevenLabs Text-to-Speech (optional, for NPC dialogue)
ELEVENLABS_API_KEY=

# Upstash Redis for rate limiting (optional, no-op if not set)
KV_REST_API_URL=
KV_REST_API_TOKEN=
RATE_LIMIT_PER_MINUTE=10
RATE_LIMIT_PER_DAY=100
48 changes: 48 additions & 0 deletions examples/game-engine/app/api/ai-game/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import { streamText } from "ai";
import { gateway } from "@ai-sdk/gateway";
import { headers } from "next/headers";
import { generateGameAIPrompt } from "@/lib/ai-game-prompt";
import { minuteRateLimit, dailyRateLimit } from "@/lib/rate-limit";

export async function POST(req: Request) {
const headersList = await headers();
const ip = headersList.get("x-forwarded-for")?.split(",")[0] ?? "anonymous";

const [minuteResult, dailyResult] = await Promise.all([
minuteRateLimit.limit(ip),
dailyRateLimit.limit(ip),
]);

if (!minuteResult.success || !dailyResult.success) {
const isMinuteLimit = !minuteResult.success;
return new Response(
JSON.stringify({
error: "Rate limit exceeded",
message: isMinuteLimit
? "Too many requests. Please wait a moment before trying again."
: "Daily limit reached. Please try again tomorrow.",
}),
{
status: 429,
headers: { "Content-Type": "application/json" },
},
);
}

const { prompt, objects, previousPrompts } = await req.json();

if (!prompt) {
return Response.json({ error: "Prompt is required" }, { status: 400 });
}

const sceneObjects = Array.isArray(objects) ? objects : [];

const result = streamText({
model: gateway(
process.env.AI_GATEWAY_MODEL || "anthropic/claude-sonnet-4-6",
),
prompt: generateGameAIPrompt(prompt, sceneObjects, previousPrompts || []),
});

return result.toTextStreamResponse();
}
43 changes: 43 additions & 0 deletions examples/game-engine/app/api/ai/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import { streamText } from "ai";
import { gateway } from "@ai-sdk/gateway";
import { headers } from "next/headers";
import { generateSystemPrompt, generateUserPrompt } from "@/lib/ai-prompt";
import { minuteRateLimit, dailyRateLimit } from "@/lib/rate-limit";

export async function POST(req: Request) {
const headersList = await headers();
const ip = headersList.get("x-forwarded-for")?.split(",")[0] ?? "anonymous";

const [minuteResult, dailyResult] = await Promise.all([
minuteRateLimit.limit(ip),
dailyRateLimit.limit(ip),
]);

if (!minuteResult.success || !dailyResult.success) {
const isMinuteLimit = !minuteResult.success;
return new Response(
JSON.stringify({
error: "Rate limit exceeded",
message: isMinuteLimit
? "Too many requests. Please wait a moment before trying again."
: "Daily limit reached. Please try again tomorrow.",
}),
{
status: 429,
headers: { "Content-Type": "application/json" },
},
);
}

const { prompt, spec, previousPrompts } = await req.json();

const result = streamText({
model: gateway(
process.env.AI_GATEWAY_MODEL || "anthropic/claude-sonnet-4-6",
),
system: generateSystemPrompt(),
prompt: generateUserPrompt(prompt, spec, previousPrompts),
});

return result.toTextStreamResponse();
}
125 changes: 125 additions & 0 deletions examples/game-engine/app/api/character-responses/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
import { streamText } from "ai";
import { gateway } from "@ai-sdk/gateway";
import { headers } from "next/headers";
import speechQueue from "@/lib/speech-queue";
import { minuteRateLimit, dailyRateLimit } from "@/lib/rate-limit";

interface DialogMessage {
text: string;
audioUrl?: string;
}

const VOICE_MAP: Record<string, string> = {
elder: "pNInz6obpgDQGcFmaJgB",
old: "pNInz6obpgDQGcFmaJgB",
wise: "pNInz6obpgDQGcFmaJgB",
warrior: "AZnzlk1XvdvUeBnXmlld",
soldier: "AZnzlk1XvdvUeBnXmlld",
guard: "AZnzlk1XvdvUeBnXmlld",
child: "MF3mGyEYCl7XYWbV9V6O",
young: "MF3mGyEYCl7XYWbV9V6O",
merchant: "jBpfuIE2acCO8z3wKNLl",
trader: "jBpfuIE2acCO8z3wKNLl",
wizard: "IKne3meq5aSn9XLyUdCD",
mage: "IKne3meq5aSn9XLyUdCD",
magic: "IKne3meq5aSn9XLyUdCD",
};

function getVoiceIdForRole(role: string): string {
const lower = role.toLowerCase();
for (const [keyword, voiceId] of Object.entries(VOICE_MAP)) {
if (lower.includes(keyword)) return voiceId;
}
return "ThT5KcBeYPX3keUQqHPh";
}

export async function POST(req: Request) {
const headersList = await headers();
const ip = headersList.get("x-forwarded-for")?.split(",")[0] ?? "anonymous";

const [minuteResult, dailyResult] = await Promise.all([
minuteRateLimit.limit(ip),
dailyRateLimit.limit(ip),
]);

if (!minuteResult.success || !dailyResult.success) {
const isMinuteLimit = !minuteResult.success;
return new Response(
JSON.stringify({
error: "Rate limit exceeded",
message: isMinuteLimit
? "Too many requests. Please wait a moment before trying again."
: "Daily limit reached. Please try again tomorrow.",
}),
{
status: 429,
headers: { "Content-Type": "application/json" },
},
);
}

const { role } = await req.json();

const prompt = `You are a character with the following role: "${role || "villager"}".
Generate 2-3 short messages that this character would say when interacted with.
Keep each message under 100 characters.
Return ONLY a JSON array of objects with a "text" field for each message.
Example: [{"text":"Hello traveler! Welcome to our village."}, {"text":"Can I help you with something?"}]`;

try {
const result = await streamText({
model: gateway(
process.env.AI_GATEWAY_MODEL || "anthropic/claude-sonnet-4-6",
),
prompt,
});

let text = "";
for await (const chunk of result.textStream) {
text += chunk;
}

let messages: DialogMessage[] = [];
try {
const jsonMatch = text.match(/\[.*\]/s);
const jsonString = jsonMatch ? jsonMatch[0] : "[]";
messages = JSON.parse(jsonString);
} catch {
messages = [
{ text: "Hello there! How can I help you?" },
{ text: "It's a beautiful day, isn't it?" },
];
}

if (messages.length === 0) {
messages = [
{ text: "Hello there! How can I help you?" },
{ text: "It's a beautiful day, isn't it?" },
];
}

const hasTTS = !!process.env.ELEVENLABS_API_KEY;
if (hasTTS) {
const voiceId = getVoiceIdForRole(role || "");
const withAudio: DialogMessage[] = [];
for (const msg of messages) {
try {
const audioUrl = await speechQueue.add(msg.text, voiceId);
withAudio.push({ ...msg, audioUrl });
} catch {
withAudio.push(msg);
}
}
return Response.json({ messages: withAudio });
}

return Response.json({ messages });
} catch {
return Response.json({
messages: [
{ text: "Hello there! How can I help you?" },
{ text: "It's a beautiful day, isn't it?" },
],
});
}
}
21 changes: 21 additions & 0 deletions examples/game-engine/app/api/environments/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import { NextResponse } from "next/server";
import { list } from "@vercel/blob";

export async function GET() {
try {
const { blobs } = await list({ prefix: "game-engine/environments/" });
const environments = blobs.map((blob) => ({
name: blob.pathname.split("/").pop() || "Unknown",
url: blob.url,
size: blob.size,
uploadedAt: blob.uploadedAt,
}));
return NextResponse.json({ environments });
} catch (error) {
console.error("Failed to list environments:", error);
return NextResponse.json(
{ error: "Failed to list environments" },
{ status: 500 },
);
}
}
21 changes: 21 additions & 0 deletions examples/game-engine/app/api/models/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import { NextResponse } from "next/server";
import { list } from "@vercel/blob";

export async function GET() {
try {
const { blobs } = await list({ prefix: "game-engine/models/" });
const models = blobs.map((blob) => ({
name: blob.pathname.split("/").pop() || "Unknown",
url: blob.url,
size: blob.size,
uploadedAt: blob.uploadedAt,
}));
return NextResponse.json({ models });
} catch (error) {
console.error("Failed to list models:", error);
return NextResponse.json(
{ error: "Failed to list models" },
{ status: 500 },
);
}
}
50 changes: 50 additions & 0 deletions examples/game-engine/app/api/text-to-speech/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
export async function POST(req: Request) {
const { text, voiceId } = await req.json();

const apiKey = process.env.ELEVENLABS_API_KEY;
if (!apiKey) {
return new Response(
JSON.stringify({ error: "ELEVENLABS_API_KEY not set" }),
{
status: 500,
headers: { "Content-Type": "application/json" },
},
);
}

const voice = voiceId || "21m00Tcm4TlvDq8ikWAM";

const response = await fetch(
`https://api.elevenlabs.io/v1/text-to-speech/${voice}`,
{
method: "POST",
headers: {
"xi-api-key": apiKey,
"Content-Type": "application/json",
},
body: JSON.stringify({
text,
model_id: "eleven_monolingual_v1",
voice_settings: {
stability: 0.5,
similarity_boost: 0.75,
},
}),
},
);

if (!response.ok) {
return new Response(JSON.stringify({ error: "TTS generation failed" }), {
status: 500,
headers: { "Content-Type": "application/json" },
});
}

const audioBuffer = await response.arrayBuffer();
return new Response(audioBuffer, {
headers: {
"Content-Type": "audio/mpeg",
"Cache-Control": "public, max-age=3600",
},
});
}
29 changes: 29 additions & 0 deletions examples/game-engine/app/api/upload-environment/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { NextResponse } from "next/server";
import { put } from "@vercel/blob";

export async function POST(request: Request) {
try {
const formData = await request.formData();
const file = formData.get("file") as File;

if (!file) {
return NextResponse.json({ error: "No file provided" }, { status: 400 });
}

const filenameFromForm = formData.get("filename");
const filename = filenameFromForm ? String(filenameFromForm) : file.name;
const cleanFilename = filename.toLowerCase().replace(/[^a-z0-9.]/g, "-");

const blob = await put(`game-engine/environments/${cleanFilename}`, file, {
access: "public",
});

return NextResponse.json({ url: blob.url, name: cleanFilename });
} catch (error) {
console.error("Failed to upload environment:", error);
return NextResponse.json(
{ error: "Failed to upload file" },
{ status: 500 },
);
}
}
29 changes: 29 additions & 0 deletions examples/game-engine/app/api/upload-model/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { NextResponse } from "next/server";
import { put } from "@vercel/blob";

export async function POST(request: Request) {
try {
const formData = await request.formData();
const file = formData.get("file") as File;

if (!file) {
return NextResponse.json({ error: "No file provided" }, { status: 400 });
}

const filenameFromForm = formData.get("filename");
const filename = filenameFromForm ? String(filenameFromForm) : file.name;
const cleanFilename = filename.toLowerCase().replace(/[^a-z0-9.]/g, "-");

const blob = await put(`game-engine/models/${cleanFilename}`, file, {
access: "public",
});

return NextResponse.json({ url: blob.url, name: cleanFilename });
} catch (error) {
console.error("Failed to upload model:", error);
return NextResponse.json(
{ error: "Failed to upload file" },
{ status: 500 },
);
}
}
Loading
Loading