6

I want to list all available audio devices in swift to provide a selection for input and output. My application should listen on a audio channel and "write" to another. I do not want the system default!

let devices = AVCaptureDevice.devices(for: .audio)

print(devices.count)

for device in devices {
    print(device.localizedName)
}

The Code lists 0 devices. But I expect at least the internal output.

Some links to CoreAudio, AudioToolbox and AVFoundation that explain the audio source selection would be nice.

pkamb
  • 33,281
  • 23
  • 160
  • 191
Peter Shaw
  • 1,867
  • 1
  • 19
  • 32

4 Answers4

17

Here's some Swift 5 code that will enumerate all the audio devices.

You can use the uid with AVAudioPlayer's currentDevice property to output to a specific device.

import Cocoa
import AVFoundation

class AudioDevice {
    var audioDeviceID:AudioDeviceID

    init(deviceID:AudioDeviceID) {
        self.audioDeviceID = deviceID
    }

    var hasOutput: Bool {
        get {
            var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
                mSelector:AudioObjectPropertySelector(kAudioDevicePropertyStreamConfiguration),
                mScope:AudioObjectPropertyScope(kAudioDevicePropertyScopeOutput),
                mElement:0)

            var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size);
            var result:OSStatus = AudioObjectGetPropertyDataSize(self.audioDeviceID, &address, 0, nil, &propsize);
            if (result != 0) {
                return false;
            }

            let bufferList = UnsafeMutablePointer<AudioBufferList>.allocate(capacity:Int(propsize))
            result = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, bufferList);
            if (result != 0) {
                return false
            }

            let buffers = UnsafeMutableAudioBufferListPointer(bufferList)
            for bufferNum in 0..<buffers.count {
                if buffers[bufferNum].mNumberChannels > 0 {
                    return true
                }
            }

            return false
        }
    }

    var uid:String? {
        get {
            var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
                mSelector:AudioObjectPropertySelector(kAudioDevicePropertyDeviceUID),
                mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
                mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))

            var name:CFString? = nil
            var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size)
            let result:OSStatus = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, &name)
            if (result != 0) {
                return nil
            }

            return name as String?
        }
    }

    var name:String? {
        get {
            var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
                mSelector:AudioObjectPropertySelector(kAudioDevicePropertyDeviceNameCFString),
                mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
                mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))

            var name:CFString? = nil
            var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size)
            let result:OSStatus = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, &name)
            if (result != 0) {
                return nil
            }

            return name as String?
        }
    }
}


class AudioDeviceFinder {
    static func findDevices() {
        var propsize:UInt32 = 0

        var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
            mSelector:AudioObjectPropertySelector(kAudioHardwarePropertyDevices),
            mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
            mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))

        var result:OSStatus = AudioObjectGetPropertyDataSize(AudioObjectID(kAudioObjectSystemObject), &address, UInt32(MemoryLayout<AudioObjectPropertyAddress>.size), nil, &propsize)

        if (result != 0) {
            print("Error \(result) from AudioObjectGetPropertyDataSize")
            return
        }

        let numDevices = Int(propsize / UInt32(MemoryLayout<AudioDeviceID>.size))

        var devids = [AudioDeviceID]()
        for _ in 0..<numDevices {
            devids.append(AudioDeviceID())
        }

        result = AudioObjectGetPropertyData(AudioObjectID(kAudioObjectSystemObject), &address, 0, nil, &propsize, &devids);
        if (result != 0) {
            print("Error \(result) from AudioObjectGetPropertyData")
            return
        }

        for i in 0..<numDevices {
            let audioDevice = AudioDevice(deviceID:devids[i])
            if (audioDevice.hasOutput) {
                if let name = audioDevice.name,
                    let uid = audioDevice.uid {
                    print("Found device \"\(name)\", uid=\(uid)")
                }
            }
        }
    }
}
stevex
  • 5,589
  • 37
  • 52
  • 1
    Aren't you also supposed to deallocate the memory you allocated (using .allocate)? – akuz Mar 20 '20 at 14:03
  • I came here looking for modern code that would help me supplant my deprecated dependency on CoreAudio: 'AudioHardwareGetPropertyInfo' is deprecated: first deprecated in macOS 10.6. Sigh. Still hoping. – zzyzy Dec 23 '20 at 18:16
  • @stevex This is not working for USB audio Devices. USB audio devices are listed but if we are speaking voice is going via system speaker only. Do we need to tweak some property? – Shivam Tripathi Aug 17 '21 at 14:01
5

The code you posted works perfectly fine for audio input devices when I paste it into an Xcode Playground.

Note, however, that AVCaptureDevice API does not list audio output devices as they are no capture devices but playback devices. If a device supports both, input and output, you can still use the device's uniqueID in an output context, for example with AVPlayer's audioOutputDeviceUniqueID.

(Also note, that if you want your code to work on iOS as well, devices(for:) is marked as deprecated since iOS 11 and you should move to AVCaptureDevice.DiscoverySession instead.)

Regarding your request for additional info on Core Audio and AudioToolbox, this SO question has some pretty comprehensive answers on the matter. The question asks for input devices but the answers provide enough context to let you understand handling of the output side as well. There's even an answer with some (dated) Swift code. On a personal note I have to say calling Core Audio API from Swift is oftentimes more pain than gain. Because of that it might be faster, although a bit unsafer, wrapping those portions of code into Objective-C or plain C and exposing them via the Swift bridging header, if your project allows it.

bfx
  • 897
  • 10
  • 16
5

If you want something like a actionSheet and need to switch between audio devices seamlessly. Use this code.

Code

import Foundation
import AVFoundation
import UIKit

@objc class AudioDeviceHandler: NSObject {
    
    @objc static let shared = AudioDeviceHandler()
    
    /// Present audio device selection alert
    /// - Parameters:
    ///   - presenterViewController: viewController where the alert need to present
    ///   - sourceView: alertController source view in case of iPad
    @objc func presentAudioOutput(_ presenterViewController : UIViewController, _ sourceView: UIView) {
        let speakerTitle = "Speaker"
        let headphoneTitle = "Headphones"
        let deviceTitle = (UIDevice.current.userInterfaceIdiom == .pad) ? "iPad" : "iPhone"
        let cancelTitle = "Cancel"
        
        var deviceAction = UIAlertAction()
        var headphonesExist = false
        let optionMenu = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
        
        guard let availableInputs = AVAudioSession.sharedInstance().availableInputs else {
            print("No inputs available ")
            return
        }
        
        for audioPort in availableInputs {
            switch audioPort.portType {
            case .bluetoothA2DP, .bluetoothHFP, .bluetoothLE :
                let bluetoothAction = UIAlertAction(title: audioPort.portName, style: .default) { _ in
                    self.setPreferredInput(port: audioPort)
                }
                
                if isCurrentOutput(portType: audioPort.portType) {
                    bluetoothAction.setValue(true, forKey: "checked")
                }
                
                optionMenu.addAction(bluetoothAction)
                
            case .builtInMic, .builtInReceiver:
                
                deviceAction = UIAlertAction(title: deviceTitle, style: .default, handler: { _ in
                    self.setToDevice(port: audioPort)
                })
                
            case .headphones, .headsetMic:
                headphonesExist = true
                
                let headphoneAction = UIAlertAction(title: headphoneTitle, style: .default) { _ in
                    self.setPreferredInput(port: audioPort)
                }
                
                if isCurrentOutput(portType: .headphones) || isCurrentOutput(portType: .headsetMic) {
                    headphoneAction.setValue(true, forKey: "checked")
                }
                
                optionMenu.addAction(headphoneAction)
                
            case .carAudio:
                let carAction = UIAlertAction(title: audioPort.portName, style: .default) { _ in
                    self.setPreferredInput(port: audioPort)
                }
                
                if isCurrentOutput(portType: audioPort.portType) {
                    carAction.setValue(true, forKey: "checked")
                }
                optionMenu.addAction(carAction)
                
            default:
                break
            }
        }
        
        // device actions only required if no headphone available
        if !headphonesExist {
            if (isCurrentOutput(portType: .builtInReceiver) ||
                isCurrentOutput(portType: .builtInMic)) {
                deviceAction.setValue(true, forKey: "checked")
            }
            optionMenu.addAction(deviceAction)
        }
        
        // configure speaker action
        let speakerAction = UIAlertAction(title: speakerTitle, style: .default) { _ in
            self.setOutputToSpeaker()
        }
        if isCurrentOutput(portType: .builtInSpeaker) {
            speakerAction.setValue(true, forKey: "checked")
        }
        optionMenu.addAction(speakerAction)
        
        // configure cancel action
        let cancelAction = UIAlertAction(title: cancelTitle, style: .cancel)
        optionMenu.addAction(cancelAction)
        
        optionMenu.modalPresentationStyle = .popover
        if let presenter = optionMenu.popoverPresentationController {
            presenter.sourceView = sourceView
            presenter.sourceRect = sourceView.bounds
        }
        
        presenterViewController.present(optionMenu, animated: true, completion: nil)
        
        // auto dismiss after 5 seconds
        DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) {
            optionMenu.dismiss(animated: true, completion: nil)
        }
    }
    
    @objc func setOutputToSpeaker() {
        do {
            try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
        } catch let error as NSError {
            print("audioSession error turning on speaker: \(error.localizedDescription)")
        }
    }
    
    fileprivate func setPreferredInput(port: AVAudioSessionPortDescription) {
        do {
            try AVAudioSession.sharedInstance().setPreferredInput(port)
        } catch let error as NSError {
            print("audioSession error change to input: \(port.portName) with error: \(error.localizedDescription)")
        }
    }
    
    fileprivate func setToDevice(port: AVAudioSessionPortDescription) {
        do {
            // remove speaker if needed
            try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none)
            // set new input
            try AVAudioSession.sharedInstance().setPreferredInput(port)
        } catch let error as NSError {
            print("audioSession error change to input: \(AVAudioSession.PortOverride.none.rawValue) with error: \(error.localizedDescription)")
        }
    }
    
    @objc func isCurrentOutput(portType: AVAudioSession.Port) -> Bool {
        AVAudioSession.sharedInstance().currentRoute.outputs.contains(where: { $0.portType == portType })
    }
    
}

How to use

class ViewController: UIViewController {

    @IBOutlet weak var audioButton: UIButton!
    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.
    }

    @IBAction func selectAudio(_ sender: Any) {
        // present audio device selection action sheet
        AudioDeviceHandler.shared.presentAudioOutput(self, audioButton)
    }
    
}

Result

enter image description here

Sreekuttan
  • 1,579
  • 13
  • 19
2

It is possible to list input and output devices. This is a simplification of stevex's answer.

For output devices:

    if (audioDevice.hasOutput) {
        if let name = audioDevice.name,
            let uid = audioDevice.uid {
            print("Found device \"\(name)\", uid=\(uid)")
        }
    }

For input devices:

    if (!audioDevice.hasOutput) {
        if let name = audioDevice.name,
            let uid = audioDevice.uid {
            print("Found device \"\(name)\", uid=\(uid)")
        }
    }

(Notice the ! before audioDevice.hasOutput.)

Cody Gray - on strike
  • 239,200
  • 50
  • 490
  • 574
user
  • 21
  • 1