beta/app/w/[workspaceID]/p/gen/route.ts
Christopher Arraya bcf3f113ad i'm done
2023-11-05 07:34:37 -05:00

71 lines
1.9 KiB
TypeScript

import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs";
import { cookies } from "next/headers";
import { NextResponse, NextRequest } from "next/server";
import { Database } from "@/types/supabase";
import { prisma } from "@/lib/prisma";
import { completion } from "zod-gpt";
import * as z from "zod";
import { OpenAIChatApi } from "llm-api";
import { generateProjectQuestions } from "@/lib/prompts";
const OPENAI_MODEL = "gpt-3.5-turbo-16k";
const MIN_QUESTIONS = 1;
const MAX_QUESTIONS = 3;
const openai = new OpenAIChatApi(
{
apiKey: process.env.OPENAI_API_KEY!,
},
{
model: OPENAI_MODEL,
}
);
async function getSession(supabase: any) {
const {
data: { session },
} = await supabase.auth.getSession();
return session;
}
export async function POST(
req: NextRequest,
{ params: { workspaceID } }: { params: { workspaceID: string } }
) {
try {
const supabase = createRouteHandlerClient<Database>({ cookies });
const session = await getSession(supabase);
if (!session) return NextResponse.redirect("/auth");
const formData = await req.json();
const name = String(formData.name);
const description = String(formData.description);
const stack = formData.stack;
const questionPrompt = generateProjectQuestions(name, description, stack);
console.log("questionPrompt ", questionPrompt);
const res = await completion(openai, questionPrompt, {
schema: z.object({
questions: z
.array(
z.string().describe("A question to ask the user about the project")
)
.min(MIN_QUESTIONS)
.max(MAX_QUESTIONS),
}),
});
console.log("questions ", res.data.questions);
return NextResponse.json(
{ questions: res.data.questions },
{ status: 200 }
);
} catch (err) {
console.log(err);
return NextResponse.json({ error: err }, { status: 500 });
}
}