0

For a vue.js messaging project, I'm using the wavesurfer.js library to record voice messages. However Google chrome gives me an audio/webm blob and Safari gives me an audio/mp4 blob.

I'm trying to find a solution to transcode the blob into audio/mp3. I've tried several methods, including ffmpeg. However, ffmpeg gives me an error when compiling "npm run dev": "Can't resolve '/node_modules/@ffmpeg/core/dist/ffmpeg-core.js'".

"@ffmpeg/core": "^0.11.0",
"@ffmpeg/ffmpeg": "^0.11.6"

I tried to downgrade ffmpeg

"@ffmpeg/core": "^0.9.0",
"@ffmpeg/ffmpeg": "^0.9.8"

I no longer get the error message when compiling, but when I want to convert my audio stream, the console displays a problem with SharedBuffer: "Uncaught (in promise) ReferenceError: SharedArrayBuffer is not defined".

Here's my complete code below. Is there a reliable way of transcoding the audio stream into mp3?

Can you give me an example?

Thanks

<template>
  <div class="left-panel">
    <header class="radial-blue">
      <div class="container">
        <h1 class="mb-30">Posez votre première question à nos thérapeutes</h1>
        <p><b>Attention</b>, vous disposez seulement de 2 messages. Veillez à les utiliser de manière judicieuse !</p>
        <div class="available-messages">
          <div class="item disabled">
            <span>Message 1</span>
          </div>
          <div class="item">
            <span>Message 2</span>
          </div>
        </div>
      </div>
    </header>
  </div>
  <div class="right-panel">
    <div class="messagerie bg-light">
      <messaging ref="messagingComponent" :access="access"></messaging>
      <footer>
        <button type="button" v-show="!isRecording && !isMicrophoneLoading && !isSubmitLoading" @click="startRecording"><img src="/assets/backoffice/images/record-start.svg"></button>
        <div v-show="isMicrophoneLoading || isSubmitLoading" class="loading-animation">
          <img src="/assets/backoffice/images/record-loading.svg">
        </div>
        <button type="button" v-show="isRecording && !isSubmitLoading" @click="stopRecording"><img src="/assets/backoffice/images/record-stop.svg"></button>
        <div v-show="!isRecording && !isMicrophoneLoading && !isSubmitLoading" class="textarea gradient text-dark">
          <textarea id="messageTextarea" placeholder="Posez votre question" v-model="messageText"></textarea>
        </div>
        <div v-show="isMicrophoneLoading" class="loading-text">Chargement de votre microphone en cours...</div>
        <div v-show="isSubmitLoading" class="loading-text">Envoi de votre message en cours...</div>
        <div v-show="isRecording" id="visualizer" ref="visualizer"></div>
        <button v-show="!isRecording && !isMicrophoneLoading && !isSubmitLoading" type="button" @click="sendMessage"><img src="/assets/backoffice/images/send.svg"></button>
        <div v-show="isRecording" :class="timer >= (redTimer * 60) ? 'timer red' : (timer >= (orangeTimer * 60) ? 'timer orange' : 'timer')">
          {{ formatTimer() }}
        </div>
      </footer>
    </div>
  </div>
</template>

<script>
import Messaging from "./Messaging.vue";
import { createFFmpeg, fetchFile } from '@ffmpeg/ffmpeg';

export default {
  data() {
    return {
      isMicrophoneLoading: false,
      isSubmitLoading: false,
      isMobile: false,
      isMessagerie: false,
      isRecording: false,
      audioUrl: '',
      messageText: '',
      message:null,
      wavesurfer: null,
      access:(this.isMobile?'denied':'granted'),
      maxMinutes: 5,
      orangeTimer: 3,
      redTimer: 4,
      timer: 0,
      timerInterval: null,
      ffmpeg: null,
    };
  },
  components: {
    Messaging,
  },
  mounted() {
    this.checkScreenSize();
    window.addEventListener('resize', this.checkScreenSize);

    if(!this.isMobile)
    {
      this.$moment.locale('fr');
      window.addEventListener('beforeunload', (event) => {
        if (this.isMessagerie) {
          event.preventDefault();
          event.returnValue = '';
        }
      });

      this.initializeWaveSurfer();
    }
  },
  beforeUnmount() {
    window.removeEventListener('resize', this.checkScreenSize);
  },
  methods: {
    checkScreenSize() {
      this.isMobile = window.innerWidth < 1200;

      const windowHeight = window.innerHeight;
      const navbarHeight = this.$navbarHeight;
      let padding = parseInt(navbarHeight +181);

      const messageListHeight = windowHeight - padding;
      this.$refs.messagingComponent.$refs.messageList.style.height = messageListHeight + 'px';
    },
    showMessagerie() {
      this.isMessagerie = true;
      this.$refs.messagingComponent.scrollToBottom();
    },
    checkMicrophoneAccess() {
      if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {

        return navigator.mediaDevices.getUserMedia({audio: true})
            .then(function (stream) {
              stream.getTracks().forEach(function (track) {
                track.stop();
              });
              return true;
            })
            .catch(function (error) {
              console.error('Erreur lors de la demande d\'accès au microphone:', error);
              return false;
            });
      } else {
        console.error('getUserMedia n\'est pas supporté par votre navigateur.');
        return false;
      }
    },
    initializeWaveSurfer() {
      this.wavesurfer = this.$wavesurfer.create({
        container: '#visualizer',
        barWidth: 3,
        barHeight: 1.5,
        height: 46,
        responsive: true,
        waveColor: 'rgba(108,115,202,0.3)',
        progressColor: 'rgba(108,115,202,1)',
        cursorColor: 'transparent'
      });

      this.record = this.wavesurfer.registerPlugin(this.$recordPlugin.create());
    },
    startRecording() {
      const _this = this;
      this.isMicrophoneLoading = true;

      setTimeout(() =>
      {
        _this.checkMicrophoneAccess().then(function (accessible)
        {
          if (accessible) {
            _this.record.startRecording();

            _this.record.once('startRecording', () => {
              _this.isMicrophoneLoading = false;
              _this.isRecording = true;
              _this.updateChildMessage( 'server', 'Allez-y ! Vous pouvez enregistrer votre message audio maintenant. La durée maximale autorisée pour votre enregistrement est de 5 minutes.', 'text', '', 'Message automatique');
              _this.startTimer();
            });
          } else {
            _this.isRecording = false;
            _this.isMicrophoneLoading = false;
            _this.$swal.fire({
              title: 'Microphone non détecté',
              html: '<p>Le microphone de votre appareil est inaccessible ou l\'accès a été refusé.</p><p>Merci de vérifier les paramètres de votre navigateur afin de vérifier les autorisations de votre microphone.</p>',
              footer: '<a href="/contact">Vous avez besoin d\'aide ?</a>',
            });
          }
        });
      }, 100);
    },
    stopRecording() {
      this.stopTimer();
      this.isRecording = false;
      this.isSubmitLoading = true;
      this.record.stopRecording();

      this.record.once('stopRecording', () => {
        const blobUrl = this.record.getRecordedUrl();
        fetch(blobUrl).then(response => response.blob()).then(blob => {
          this.uploadAudio(blob);
        });
      });
    },
    startTimer() {
      this.timerInterval = setInterval(() => {
        this.timer++;
        if (this.timer === this.maxMinutes * 60) {
          this.stopRecording();
        }
      }, 1000);
    },
    stopTimer() {
      clearInterval(this.timerInterval);
      this.timer = 0;
    },
    formatTimer() {
      const minutes = Math.floor(this.timer / 60);
      const seconds = this.timer % 60;
      const formattedMinutes = minutes < 10 ? `0${minutes}` : minutes;
      const formattedSeconds = seconds < 10 ? `0${seconds}` : seconds;
      return `${formattedMinutes}:${formattedSeconds}`;
    },
    async uploadAudio(blob)
    {
      const format = blob.type === 'audio/webm' ? 'webm' : 'mp4';

      // Convert the blob to MP3
      const mp3Blob = await this.convertToMp3(blob, format);

      const s3 = new this.$AWS.S3({
        accessKeyId: 'xxx',
        secretAccessKey: 'xxx',
        region: 'eu-west-1'
      });

      var currentDate = new Date();
      var filename = currentDate.getDate().toString() + '-' + currentDate.getMonth().toString() + '-' + currentDate.getFullYear().toString() + '--' + currentDate.getHours().toString() + '-' + currentDate.getMinutes().toString() + '.mp4';

      const params = {
        Bucket: 'xxx/audio',
        Key: filename,
        Body: mp3Blob,
        ACL: 'public-read',
        ContentType: 'audio/mp3'
      }

      s3.upload(params, (err, data) => {
        if (err) {
          console.error('Error uploading audio:', err)
        } else {
          const currentDate = this.$moment();
          const timestamp = currentDate.format('dddd DD MMMM YYYY HH:mm');

          this.updateChildMessage( 'client', '', 'audio', mp3Blob, timestamp);
          this.isSubmitLoading = false;
        }
      });
    },
    async convertToMp3(blob, format) {
      const ffmpeg = createFFmpeg({ log: true });
      await ffmpeg.load();

      const inputPath = 'input.' + format;
      const outputPath = 'output.mp3';

      ffmpeg.FS('writeFile', inputPath, await fetchFile(blob));

      await ffmpeg.run('-i', inputPath, '-acodec', 'libmp3lame', outputPath);

      const mp3Data = ffmpeg.FS('readFile', outputPath);
      const mp3Blob = new Blob([mp3Data.buffer], { type: 'audio/mp3' });

      ffmpeg.FS('unlink', inputPath);
      ffmpeg.FS('unlink', outputPath);

      return mp3Blob;
    },
    sendMessage() {
      this.isSubmitLoading = true;
      if (this.messageText.trim() !== '') {
        const emmet = 'client';
        const text = this.escapeHTML(this.messageText)
            .replace(/\n/g, '<br>');

        const currentDate = this.$moment();
        const timestamp = currentDate.format('dddd DD MMMM YYYY HH:mm');

        this.$nextTick(() => {
          this.messageText = '';

          const textarea = document.getElementById('messageTextarea');
          if (textarea) {
            textarea.scrollTop = 0;
            textarea.scrollLeft = 0;
          }
        });

        this.updateChildMessage(emmet, text, 'text', '', timestamp);
        this.isSubmitLoading = false;
      }
    },
    escapeHTML(text) {
      const map = {
        '&': '&amp;',
        '<': '&lt;',
        '>': '&gt;',
        '"': '&quot;',
        "'": '&#039;',
        "`": '&#x60;',
        "/": '&#x2F;'
      };
      return text.replace(/[&<>"'`/]/g, (match) => map[match]);
    },
    updateChildMessage(emmet, text, type, blob, timestamp) {
      const newMessage = {
        id: this.$refs.messagingComponent.lastMessageId + 1,
        emmet: emmet,
        text: text,
        type: type,
        blob: blob,
        timestamp: timestamp
      };

      this.$refs.messagingComponent.updateMessages(newMessage);
    }
  },
};
</script>
Raphael M
  • 15
  • 2
  • It mentions in the [installation instructions](https://github.com/ffmpegwasm/ffmpeg.wasm#installation) "SharedArrayBuffer is only available to pages that are cross-origin isolated. So you need to host your own server with `Cross-Origin-Embedder-Policy: require-corp` and `Cross-Origin-Opener-Policy: same-origin` headers to use ffmpeg.wasm." – yoduh Jul 21 '23 at 12:50

0 Answers0