1

Currently, I am consuming a streaming API(transfer-encoding: 'chunked') and using the data in each chunk to render the UI as and when new chunk is received. When I check the timings tab under network call in my browser TTFB is around 300 ms but when I check the time to receive my first chunk, it comes more than 3000 ms though time to receive subsequent chunks is quite less (~2-3 ms) which seems like all the chunks are waiting in the internal queue. What could be the reason for this high latency for first chunk or what's wrong in the implementation below?

let requestOptions = {
    method: 'GET',
    headers,
};

fetch(`api url`, requestOptions)
    .then(processChunkedResponse)
    .then(processStream)
    .then(onChunkedResponseComplete)
    .catch(onChunkedResponseError)
    ;

function onChunkedResponseComplete(result) {
    console.log("streaming completed")
}

function onChunkedResponseError(err) {
    // handle error
}

function processChunkedResponse(response) {
    let reader = response.body.getReader();
    return new ReadableStream({
        start(controller) {
            function push() {
                reader.read().then(({ done, value }) => {      
                    if (done) {
                        console.log('done', done);
                        controller.close();
                        return;
                    }
                    controller.enqueue(value);
                    console.log(done, value);
                    push();
                })
            }
            push();
        }
    });
}

function processStream(stream) {
    let reader = stream.getReader();

    return readChunk();

    function readChunk() {
        return reader.read().then(appendChunks);
    }

    function appendChunks(result) {
        // process chunk as Uint8Array and update the UI

        if (result.done) {
            console.log('returning')
            return result;
        } else {
            console.log('recursing')
            return readChunk();
        }
    }
}
nole
  • 563
  • 1
  • 5
  • 16
  • seems like an issue on the server rather than the client – Bravo Feb 28 '22 at 08:49
  • Not an issue on the server(written in Go) as I ran the test on the API layer and latency is fine there. It's the reader on Client(JS) which is delaying the first chunk. – nole Feb 28 '22 at 08:56
  • I take it you've tried multiple browsers to determine it's not an issue with whatever particular browser you use – Bravo Feb 28 '22 at 09:14
  • Yes, Chrome, Mozilla & Safari I have tried – nole Feb 28 '22 at 09:37

0 Answers0