0

I've been trying to use the microhpone with AudioKit.

The code compiles and runs it also request permission to access to microphone but all the frequency and amplitude readings are all 0.

Here is the class I wrote to dispatch Microphone readings as events.

I've seen other questions which could have been relavant and tried them all like I've tried to change input but no luck.

My guess is either i'm not understanding the AudioKit lifecycle correctly or @objc tags changes the behaviour.

This code used to work on older ios versions but on the 13.3 seems like there are changes. (reason for @objc is that I need to use this with react native bridge)

I thought this could be related to info.plist but I have configured the info.plist for microphone privacy with a string but still no luck.

Am I missing something?

Thanks please check the code here:

import Foundation
import AudioKit


@objc(AudioKitWrapper)
class AudioKitWrapper: NSObject {

  @objc  var mic: AKMicrophone!
  @objc  var timer: Timer!
  @objc  var tracker: AKFrequencyTracker!

  override init(){

    super.init()

    do {
        try AKSettings.setSession(category: .playAndRecord)
    } catch {
        AKLog("Could not set session category.")
    }

    AKSettings.defaultToSpeaker = true
    if let inputs = AudioKit.inputDevices {
      do {
        print(inputs)
        try AudioKit.setInputDevice(inputs[0])
        AKSettings.audioInputEnabled = true
        mic = AKMicrophone()
        try mic?.setDevice(inputs[0])
      }
      catch {
        print("microphone not supported")
      }
    }

    try? AudioKit.start()
    mic?.stop()

    self.tracker = AKFrequencyTracker.init(mic, hopSize: 4_096, peakCount: 20)
    timer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(self.loop), userInfo: nil, repeats: true)

  }


  @objc func start()  {
    if (mic?.isStarted == true) {
      print("stop")
      mic?.stop()
      try! AudioKit.stop()
    }
    else {
      print("start")
      mic?.start()
//      try! AudioKit.start()
       var silence = AKBooster(tracker, gain: 0)
      AudioKit.output = silence

    }

  }

  @objc static func requiresMainQueueSetup() -> Bool {
      return false
  }

  @objc func loop() {
    if (mic?.isStarted == true){
      print(self.tracker.amplitude, self.tracker.frequency)
      EventEmitter.sharedInstance.dispatch(name: "onChange",
                                           body: ["amplitude": tracker.amplitude, "frequency":tracker.frequency])

    }


  }



}
Bamdad Dashtban
  • 354
  • 3
  • 17

1 Answers1

0

I watched the videos to go from sandbox to production and changed few things.

I realised the main issue with my code was not using AKBooster. change the variable declarations to what the videos mentioned and it started to work.

Here is swift code that works:

import Foundation
import AudioKit


@objc(AudioKitWrapper)
class AudioKitWrapper: NSObject {

  @objc  var mic: AKMicrophone!
  @objc  var timer: Timer!
  @objc  var tracker: AKFrequencyTracker!
  @objc  var silence: AKBooster!
  @objc  var micCopy1: AKBooster!


  @objc override init(){
    super.init()
    mic = AKMicrophone()
    micCopy1 = AKBooster(mic)
       do {
           try AKSettings.setSession(category: .playAndRecord)
           AKSettings.defaultToSpeaker = true
       } catch {
           print("Could not set session category.")
         return
       }
    if let inputs = AudioKit.inputDevices {
      do {
        try AudioKit.setInputDevice(inputs[0])
        try mic.setDevice(inputs[0])
      }
      catch {
        print("microphone not supported")
        return
      }
    }
    do {
      tracker = AKFrequencyTracker.init(micCopy1, hopSize: 4_096, peakCount: 20)
      silence = AKBooster(tracker, gain: 0)
      AudioKit.output = silence
      try AudioKit.start()
      mic.stop()
    }
    catch {
      print("AudioKit did not start")
    }
    timer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(loop), userInfo: nil, repeats: true)
  }

  @objc func start()  {

    print("started?", mic?.isStarted == true)
    if (mic?.isStarted == true) {
      print("stop")
      mic?.stop()
    }
    else {
      print("start")
      mic.start()
    }

  }

  @objc static func requiresMainQueueSetup() -> Bool {
      return true
  }

  @objc func loop() {
    if (mic.isStarted == true){
      print("dispatch", tracker.frequency, tracker.amplitude)
      EventEmitter.sharedInstance.dispatch(name: "onChange",
                                           body: ["amplitude": self.tracker.amplitude, "frequency": self.tracker.frequency])
    }
  }



}

Also for those interested to make AudioKit work on React Native, here are the objectiveC code:

AudioKitBridge.m

#import <Foundation/Foundation.h>
#import "React/RCTBridgeModule.h"

@interface RCT_EXTERN_MODULE(AudioKitWrapper, NSObject)

    RCT_EXTERN_METHOD(start)
@end

AudioKitBridge.h

#ifndef AudioKitBridge_h
#define AudioKitBridge_h

#import <React/RCTBridgeModule.h>

@interface AudioKitBridge: NSObject

@end
Bamdad Dashtban
  • 354
  • 3
  • 17