It works fine locally the streaming is perfect. I have deployed it to the AWS server. It works but the data is too slow. Streaming is not working properly. There is an issue that delays for 1 to 2 sec. I am also using the Edge function in my code. Please let me know why it is happening like this. Why is it streaming data too slow? I am calling the API directly not using an SDK or NPM module for OpenAi.
Note: Next js version 13. There are two file. under /pages/api/streaming.ts and a component Home.tsx where I have an action function.
I have below code:
export const runtime = "edge";
export default async function handler(req: any) {
const { prompt } = await req.json();
const res = await fetch("https://api.openai.com/v1/chat/completions", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${key}`,
},
method: "POST",
body: JSON.stringify({
model: "gpt-3.5-turbo",
messages: [{ role: "user", content: prompt }],
temperature: 0.7,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
max_tokens: 1000,
stream: true,
n: 1,
}),
});
return new Response(res?.body, {
status: 200,
headers: {
"Content-Type": "text/event-stream",
"X-Content-Type-Options": "nosniff",
},
});
}
const handleGenerateResponse = async () => {
setShowResponse(true);
setResponse("");
const response = await fetch(`${baseUrl}api/streaming`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
prompt,
}),
});
if (!response.body) {
throw new Error("Response body is null or undefined from ChatGpt API.");
}
const reader = response.body.getReader();
console.log(reader, "reader");
const decoder = new TextDecoder();
function onParse(event: ParsedEvent | ReconnectInterval) {
console.log(event, "event");
if (event.type === "event") {
try {
if (event.data === "[DONE]") {
return;
}
const data = JSON.parse(event.data);
data.choices
.filter(
({ delta }: { delta: { content?: string } }) => !!delta?.content
)
.forEach(({ delta }: { delta: { content?: string } }) => {
setResponse((prev: string) => {
return `${prev}${delta.content}`;
});
});
} catch (error) {
console.log(error);
}
}
}
const handleGenerateResponse = async () => {
setShowResponse(true);
setResponse("");
const response = await fetch(`${baseUrl}api/streaming`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
prompt,
}),
});
if (!response.body) {
throw new Error("Response body is null or undefined from ChatGpt API.");
}
const reader = response.body.getReader();
console.log(reader, "reader");
const decoder = new TextDecoder();
function onParse(event: ParsedEvent | ReconnectInterval) {
console.log(event, "event");
if (event.type === "event") {
try {
if (event.data === "[DONE]") {
return;
}
const data = JSON.parse(event.data);
data.choices
.filter(
({ delta }: { delta: { content?: string } }) => !!delta?.content
)
.forEach(({ delta }: { delta: { content?: string } }) => {
setResponse((prev: string) => {
return `${prev}${delta.content}`;
});
});
} catch (error) {
console.log(error);
}
}
}