Currently, I am consuming a streaming API(transfer-encoding: 'chunked') and using the data in each chunk to render the UI as and when new chunk is received. When I check the timings tab under network call in my browser TTFB is around 300 ms but when I check the time to receive my first chunk, it comes more than 3000 ms though time to receive subsequent chunks is quite less (~2-3 ms) which seems like all the chunks are waiting in the internal queue. What could be the reason for this high latency for first chunk or what's wrong in the implementation below?
let requestOptions = {
method: 'GET',
headers,
};
fetch(`api url`, requestOptions)
.then(processChunkedResponse)
.then(processStream)
.then(onChunkedResponseComplete)
.catch(onChunkedResponseError)
;
function onChunkedResponseComplete(result) {
console.log("streaming completed")
}
function onChunkedResponseError(err) {
// handle error
}
function processChunkedResponse(response) {
let reader = response.body.getReader();
return new ReadableStream({
start(controller) {
function push() {
reader.read().then(({ done, value }) => {
if (done) {
console.log('done', done);
controller.close();
return;
}
controller.enqueue(value);
console.log(done, value);
push();
})
}
push();
}
});
}
function processStream(stream) {
let reader = stream.getReader();
return readChunk();
function readChunk() {
return reader.read().then(appendChunks);
}
function appendChunks(result) {
// process chunk as Uint8Array and update the UI
if (result.done) {
console.log('returning')
return result;
} else {
console.log('recursing')
return readChunk();
}
}
}