Javascript Audio Streaming via Audio Worklet Float 32 Array convert to audio file

481 views Asked by At

I am using AudioWorkletProcessor, I need to store all the audio stream data into a single file and play it at the end.

Below is my AudioWorkletProcessor code:

class RecorderProcessor extends AudioWorkletProcessor {
  // 0. Determine the buffer size (this is the same as the 1st argument of ScriptProcessor)
  //bufferSize = 4096
  bufferSize = 256
  // 1. Track the current buffer fill level
  _bytesWritten = 0

  // 2. Create a buffer of fixed size
  _buffer = new Float32Array(this.bufferSize)
  
    constructor() {
          super(); // exception thrown here when not called

    this.initBuffer()
  }

  initBuffer() {
    this._bytesWritten = 0
  }

  isBufferEmpty() {
    return this._bytesWritten === 0
  }

  isBufferFull() {
    return this._bytesWritten === this.bufferSize
  }
  
    /**
   * @param {Float32Array[][]} inputs
   * @returns {boolean}
   */
  process(inputs) {
    
    // Grabbing the 1st channel similar to ScriptProcessorNode
    this.append(inputs[0][0])
  //  this.append(outputs[0][0])

    return true
  }

  /**
   *
   * @param {Float32Array} channelData
   */
  append(channelData) {
    
    
    
    if (this.isBufferFull()) {
      this.flush()
    }

    if (!channelData) return


    

    for (let i = 0; i < channelData.length; i++) {
      this._buffer[this._bytesWritten++] = channelData[i]
    }
  }
  
  flush() {
    // trim the buffer if ended prematurely
    this.port.postMessage(
      this._bytesWritten < this.bufferSize
        ? this._buffer.slice(0, this._bytesWritten)
        : this._buffer
    )
    this.initBuffer()
  }

}

registerProcessor("recorderWorkletProcessor", RecorderProcessor)

which returning 32 bit Float array Audio Data.

Below is my javascript code :

var recordingNode; //audio worklet node
var micSourceNode; //mic node
const chunks = []; // storing all stream audio chunks
try {
  navigator.getUserMedia = navigator.getUserMedia ||
    navigator.webkitGetUserMedia ||
    navigator.mozGetUserMedia;
  microphone = navigator.getUserMedia({
    audio: true,
    video: false
  }, onMicrophoneGranted, onMicrophoneDenied);
} catch (e) {
  alert(e)
}

function onMicrophoneDenied() {
  console.log('denied')
}

async function onMicrophoneGranted(stream) {
  context = new AudioContext({
    sampleRate: 48000
  });
  micSourceNode = context.createMediaStreamSource(stream);
  await context.audioWorklet.addModule('/app_resources/recorderWorkletProcessor.js');
  recordingNode = new AudioWorkletNode(context, "recorderWorkletProcessor")
  recordingNode.port.onmessage = function(e) {

    chunks.push(e.data); //storing chunks in arrau
  }
  micSourceNode
    .connect(recordingNode)
    .connect(context.destination);

}

function stopstream() {
  if (micSourceNode)
    micSourceNode.disconnect(recordingNode);
  var blob = new Blob(chunks, {
    type: "audio/webm;codecs=opus"
  });
  console.log(blob.size)
  const audioUrl = URL.createObjectURL(blob);
  document.getElementById('song').innerHTML = '<audio id="audio-player" controls="controls" src="' + audioUrl + '" type="audio/mpeg">';

}

I am unable to convert the float 32 bit array into audio file. i can see the size in the blob but unable to play audio. Please help me understand what can i do here to make it work.

1

There are 1 answers

0
Tung Le On

To store all the audio stream data into a single file and play it at the end as below steps:

Step 1: Add the function to take raw audio data in the form of a buffer and converts it into a WAV file, including the necessary WAV header information:

function convertToWav(buffer, sampleRate) {
  const numberOfChannels = 1; // Assuming mono audio
  const bytesPerSample = 2; // 16-bit PCM
  const dataSize = buffer.length * bytesPerSample;

  const bufferLength = buffer.length;
  const newBuffer = new ArrayBuffer(44 + dataSize);
  const view = new DataView(newBuffer);

  // WAV header
  writeString(view, 0, "RIFF");
  view.setUint32(4, 36 + dataSize, true);
  writeString(view, 8, "WAVE");
  writeString(view, 12, "fmt ");
  view.setUint32(16, 16, true);
  view.setUint16(20, 1, true); // AudioFormat: 1 (PCM)
  view.setUint16(22, numberOfChannels, true);
  view.setUint32(24, sampleRate, true);
  view.setUint32(28, sampleRate * numberOfChannels * bytesPerSample, true);
  view.setUint16(32, numberOfChannels * bytesPerSample, true);
  view.setUint16(34, bytesPerSample * 8, true);
  writeString(view, 36, "data");
  view.setUint32(40, dataSize, true);

  // PCM data
  const data = new Int16Array(newBuffer, 44);
  for (let i = 0; i < bufferLength; i++) {
    const val = Math.max(-1, Math.min(1, buffer[i]));
    data[i] = val < 0 ? val * 0x8000 : val * 0x7FFF;
  }

  return new Blob([view], { type: "audio/wav" });
}

function writeString(view, offset, string) {
  for (let i = 0; i < string.length; i++) {
    view.setUint8(offset + i, string.charCodeAt(i));
  }
}

Step 2: Update your stopstream function:

  function stopstream() {
    const wavBlob = convertToWav(chunks, 48000);
    const url = URL.createObjectURL(wavBlob);

    const audio = new Audio(url);
    audio.play();

    if (micSourceNode) micSourceNode.disconnect(recordingNode);
  }

The audio will be played after stopstream is called.