Here is the backend code for API route with two responses NextResponse and StreamingTextResponse in Next JS.
The StreamingTextResponse is from the Vercel AI SDK.
import { checkFreeApiLimit, incrementApiLimit } from "@/lib/api-limits";
import { checkSubscription } from "@/lib/subscription";
import { auth } from "@clerk/nextjs";
import { NextResponse } from "next/server";
import OpenAI from "openai";
import { OpenAIStream, StreamingTextResponse } from "ai";
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
export async function POST(request: Request) {
try {
const { userId } = auth();
const body = await request.json();
const { messages } = body;
if (!userId) {
return new NextResponse("Unauthorized User", { status: 401 });
}
if (!openai.apiKey) {
return new NextResponse("OpenAI API Key not configured.", {
status: 500,
});
}
if (!messages) {
return new NextResponse("Messages are required.", { status: 400 });
}
const freeTrial = await checkFreeApiLimit();
const isPro = await checkSubscription();
if (!freeTrial && !isPro) {
return new NextResponse("Free trial is expired.", { status: 403 });
}
const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
stream: true,
messages,
});
if (!isPro) {
await incrementApiLimit();
}
const stream = OpenAIStream(response);
return new StreamingTextResponse(stream);
} catch (error) {
console.log("CONVERSATION_ERROR", error);
return new NextResponse("Internal Error", { status: 500 });
}
}
And this is the Client Component code in which the API is called using useChat
hook import from vercel ai/react.
But the API only responded StreamingTextResponse, not the NextResponse.
I want both responses from API.
"use client";
import Heading from "@/components/Heading";
import { useChat } from "ai/react";
import { MessageSquare, SendHorizonal } from "lucide-react";
import { Input } from "@/components/ui/input";
import { Button } from "@/components/ui/button";
import { FormEvent, useState } from "react";
import { useRouter } from "next/navigation";
import { Empty } from "@/components/Empty";
import { cn } from "@/lib/utils";
import { UserAvatar } from "@/components/UserAvatar";
import { BotAvatar } from "@/components/BotAvatar";
import { useglobalProModal } from "@/hooks/ProModalProvider";
import { BeatLoader } from "react-spinners";
const ConversationPage = () => {
const router = useRouter();
const proModal = useglobalProModal();
const {
messages, input, isLoading, error, handleInputChange, handleSubmit, setInput,
} = useChat({
api: "/api/conversation",
onFinish() {
setInput("");
router.refresh();
},
});
console.log(error);
const onSubmit = (e: FormEvent<HTMLFormElement>) => {
e.preventDefault();
console.log("on submit");
const userMessage = {
role: "user",
content: input,
};
console.log(e);
console.log(userMessage);
handleSubmit(e);
};
return (
<div>
<Heading
title="Conversation"
description="The most advanced AI chat model."
icon={MessageSquare}
iconColor="text-violet-500"
iconBgColor="bg-violet-500/10"
/>
<div className="px-4 lg:px-8">
<div>
<form
onSubmit={onSubmit}
className="border rounded-md w-full p-4 px-3 md:px-6 focus-within:shadow-sm flex gap-x-2"
>
<Input
className="border-0 outline-0 focus-visible:ring-0 focus-visible:ring-transparent caret-indigo-600"
disabled={isLoading}
value={input}
placeholder="Prompt..."
onChange={handleInputChange}
/>
<Button disabled={isLoading}>
<SendHorizonal />
</Button>
</form>
</div>
<div className="space-y-4 mt-4">
{messages.length === 0 && !isLoading && (
<Empty label="let's start the conversation." />
)}
<div className="flex flex-col-reverse gap-y-4">
{messages.map((message) => (
<div
key={message.content}
className={cn(
"p-8 w-full flex items-start gap-x-8 rounded-md",
message.role === "user"
? "bg-white border border-black/10"
: "bg-muted"
)}
>
{message.role === "user" ? <UserAvatar /> : <BotAvatar />}
<p className="text-sm">
{isLoading && message.role !== "user" ? (
<BeatLoader size={5} className=" text-primary" />
) : (
message.content
)}
</p>
</div>
))}
</div>
</div>
</div>
</div>
);
};
export default ConversationPage;