2

I am using AudioWorkletProcessor, I need to store all the audio stream data into a single file and play it at the end.

Below is my AudioWorkletProcessor code:

class RecorderProcessor extends AudioWorkletProcessor {
  // 0. Determine the buffer size (this is the same as the 1st argument of ScriptProcessor)
  //bufferSize = 4096
  bufferSize = 256
  // 1. Track the current buffer fill level
  _bytesWritten = 0

  // 2. Create a buffer of fixed size
  _buffer = new Float32Array(this.bufferSize)
  
    constructor() {
          super(); // exception thrown here when not called

    this.initBuffer()
  }

  initBuffer() {
    this._bytesWritten = 0
  }

  isBufferEmpty() {
    return this._bytesWritten === 0
  }

  isBufferFull() {
    return this._bytesWritten === this.bufferSize
  }
  
    /**
   * @param {Float32Array[][]} inputs
   * @returns {boolean}
   */
  process(inputs) {
    
    // Grabbing the 1st channel similar to ScriptProcessorNode
    this.append(inputs[0][0])
  //  this.append(outputs[0][0])

    return true
  }

  /**
   *
   * @param {Float32Array} channelData
   */
  append(channelData) {
    
    
    
    if (this.isBufferFull()) {
      this.flush()
    }

    if (!channelData) return


    

    for (let i = 0; i < channelData.length; i++) {
      this._buffer[this._bytesWritten++] = channelData[i]
    }
  }
  
  flush() {
    // trim the buffer if ended prematurely
    this.port.postMessage(
      this._bytesWritten < this.bufferSize
        ? this._buffer.slice(0, this._bytesWritten)
        : this._buffer
    )
    this.initBuffer()
  }

}

registerProcessor("recorderWorkletProcessor", RecorderProcessor)

which returning 32 bit Float array Audio Data.

Below is my javascript code :

var recordingNode; //audio worklet node
var micSourceNode; //mic node
const chunks = []; // storing all stream audio chunks
try {
  navigator.getUserMedia = navigator.getUserMedia ||
    navigator.webkitGetUserMedia ||
    navigator.mozGetUserMedia;
  microphone = navigator.getUserMedia({
    audio: true,
    video: false
  }, onMicrophoneGranted, onMicrophoneDenied);
} catch (e) {
  alert(e)
}

function onMicrophoneDenied() {
  console.log('denied')
}

async function onMicrophoneGranted(stream) {
  context = new AudioContext({
    sampleRate: 48000
  });
  micSourceNode = context.createMediaStreamSource(stream);
  await context.audioWorklet.addModule('/app_resources/recorderWorkletProcessor.js');
  recordingNode = new AudioWorkletNode(context, "recorderWorkletProcessor")
  recordingNode.port.onmessage = function(e) {

    chunks.push(e.data); //storing chunks in arrau
  }
  micSourceNode
    .connect(recordingNode)
    .connect(context.destination);

}

function stopstream() {
  if (micSourceNode)
    micSourceNode.disconnect(recordingNode);
  var blob = new Blob(chunks, {
    type: "audio/webm;codecs=opus"
  });
  console.log(blob.size)
  const audioUrl = URL.createObjectURL(blob);
  document.getElementById('song').innerHTML = '<audio id="audio-player" controls="controls" src="' + audioUrl + '" type="audio/mpeg">';

}

I am unable to convert the float 32 bit array into audio file. i can see the size in the blob but unable to play audio. Please help me understand what can i do here to make it work.

Feroz Siddiqui
  • 3,840
  • 6
  • 34
  • 69

0 Answers0