I am trying to play back an audio file using AudioToolbox. I wrote this and swift based on an old objective C example on the apple website. It compiles and runs, however the callback function never gets triggered once CFRunLoop starts. (It gets called during set up, but I call it manually so that doesn't count.)
My understanding of how this is supposed to work is that when this line is called:
status = AudioQueueNewOutput(&dataFormat, callback, &aqData, CFRunLoopGetCurrent(), commonModes, 0, &queue)
It is supposed to create an AudioQueue object, place it inside CFRunLoop, set the callback function called "callback", then give me back a reference to the queue object. The callback function either lives inside the AudioQueue, which lives inside CFRunLoop, or the callback function lives in CFRunLoop directly. Not sure.
When I'm done setting up I call: status = AudioQueueStart(aqData.mQueue!, nil) which "starts" the queue.
Then I call:
repeat {
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.1, false)
}
My understanding is that this is supposed to trigger the audio queue, which in turn calls my callback function. However, from this point on the callback function never gets hit. I and thinking there might be a way to inspect the audio queue, or perhaps inspect CFRunLoop. I might have made a mistake on one of the pointers somewhere.
The that result is the app plays nothing but silence.
let kNumberBuffers = 3;
var aqData = AQPlayerState()
var bufferLength : Float64 = 0.1
func playAudioFileWithToolbox(){
let bundle = Bundle.main
let permissions : AudioFilePermissions = .readPermission
let filePath = bundle.path(forResource: "dreams", ofType: "wav")!
var filePathArray = Array(filePath.utf8)
let filePathSize = filePath.count
let audioFileUrl = CFURLCreateFromFileSystemRepresentation(nil, &filePathArray, filePathSize, false)
var status = AudioFileOpenURL(audioFileUrl!, permissions, kAudioFileWAVEType, &aqData.mAudioFile)
if status != noErr{
print("ErrorOpeningAudioFileUrl")
}
var dataFormatSize:UInt32 = UInt32(MemoryLayout<AudioStreamBasicDescription>.size)
status = AudioFileGetProperty(aqData.mAudioFile!, kAudioFilePropertyDataFormat, &dataFormatSize, &aqData.mDataFormat)
if status != noErr{
print("Error getting AudioStreamBasicDescription")
}
var queue : AudioQueueRef? = aqData.mQueue
var dataFormat = aqData.mDataFormat
let commonModes = CFRunLoopMode.commonModes.rawValue
status = AudioQueueNewOutput(&dataFormat, callback, &aqData, CFRunLoopGetCurrent(), commonModes, 0, &queue)
if status == noErr{
aqData.mQueue = queue
} else {
print("TroubleSettingUpOutputQueue")
}
var maxPacketSize:UInt32 = 0;
var propertySize:UInt32 = UInt32(MemoryLayout.size(ofValue: maxPacketSize))
AudioFileGetProperty(aqData.mAudioFile!, kAudioFilePropertyPacketSizeUpperBound, &propertySize, &maxPacketSize)
var bufferByteSize = aqData.bufferByteSize
DeriveBufferSize(ASBDesc: &dataFormat, maxPacketSize: maxPacketSize, seconds: bufferLength, outBufferSize: &bufferByteSize, outNumPacketsToRead: &aqData.mNumPacketsToRead)
aqData.bufferByteSize = bufferByteSize
let isFormatVBR = aqData.mDataFormat.mBytesPerPacket == 0 || aqData.mDataFormat.mFramesPerPacket == 0
if isFormatVBR{
aqData.mPacketDescs = UnsafeMutablePointer<AudioStreamPacketDescription>.allocate(capacity: Int(aqData.mNumPacketsToRead))
} else {
aqData.mPacketDescs = nil
}
var cookieSize = UInt32(MemoryLayout.size(ofValue: UInt32.self))
let couldNotGetProperty = AudioFileGetPropertyInfo(aqData.mAudioFile!, kAudioFilePropertyMagicCookieData, &cookieSize, nil)
if couldNotGetProperty == 0 && cookieSize > 0{
var magicCookie = UnsafeMutableRawPointer.allocate(byteCount: Int(cookieSize), alignment: MemoryLayout<UInt32>.alignment)
status = AudioFileGetProperty(aqData.mAudioFile!, kAudioFilePropertyMagicCookieData, &cookieSize, &magicCookie)
if status != noErr{
print("Error:Failed to get magic cookie.")
}
AudioQueueSetProperty(aqData.mQueue!, kAudioQueueProperty_MagicCookie, magicCookie, cookieSize)
magicCookie.deallocate()
}
aqData.mCurrentPacket = 0
for i in 0..<kNumberBuffers{
var pointer = aqData.mBuffers?.advanced(by: i)
status = AudioQueueAllocateBuffer(aqData.mQueue!, aqData.bufferByteSize, &pointer)
if status != noErr{
print("Error allocating audio buffer.")
continue
}
var buffer = aqData.mBuffers![i]
callback(&aqData, aqData.mQueue!, &buffer) //I can imagine how this does anything when it is not running
}
//Set Volume
AudioQueueSetParameter(aqData.mQueue!, kAudioQueueParam_Volume, 0.5)//I have way bigger problems
//Start Playing
aqData.mIsRunning = true
status = AudioQueueStart(aqData.mQueue!, nil)
if status != noErr{
print("Error:Failed to start audio queue.")
}
repeat {
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.1, false)
} while aqData.mIsRunning
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 1, false)
}
private let callback: AudioQueueOutputCallback = { userData, inAQ, inBuffer in
var aqData = userData!.load(as: AQPlayerState.self) // 255
if !aqData.mIsRunning{ return } // 2
var numBytesReadFromFile : UInt32 = 0
var numPackets = aqData.mNumPacketsToRead
AudioFileReadPacketData(aqData.mAudioFile!, false, &numBytesReadFromFile, aqData.mPacketDescs, aqData.mCurrentPacket, &numPackets, inBuffer)
if (numPackets > 0) {
inBuffer.pointee.mAudioDataByteSize = numBytesReadFromFile
let packetCount = aqData.mPacketDescs!.pointee.mVariableFramesInPacket
AudioQueueEnqueueBuffer (
aqData.mQueue!,
inBuffer,
packetCount,
aqData.mPacketDescs
);
aqData.mCurrentPacket += Int64(numPackets)
} else {
AudioQueueStop (aqData.mQueue!,false)
aqData.mIsRunning = false
}
}
func DeriveBufferSize (ASBDesc: inout AudioStreamBasicDescription, maxPacketSize:UInt32, seconds:Float64, outBufferSize: inout UInt32,outNumPacketsToRead: inout UInt32) {
let maxBufferSize = 0x50000
let minBufferSize = 0x4000
if ASBDesc.mFramesPerPacket != 0 {
let numPacketsForTime = ASBDesc.mSampleRate / Float64(ASBDesc.mFramesPerPacket) * seconds
outBufferSize = UInt32(numPacketsForTime) * maxPacketSize
} else { // 9
outBufferSize = max(UInt32(maxBufferSize), maxPacketSize)
}
if outBufferSize > maxBufferSize && outBufferSize > maxPacketSize{ //10
outBufferSize = UInt32(maxBufferSize)
} else if outBufferSize < minBufferSize {
outBufferSize = UInt32(minBufferSize)
}
outNumPacketsToRead = outBufferSize / UInt32(maxPacketSize) // 12
}
}
struct AQPlayerState{
var mDataFormat:AudioStreamBasicDescription = AudioStreamBasicDescription()
var mQueue:AudioQueueRef?
var mBuffers:AudioQueueBufferRef? = UnsafeMutablePointer<AudioQueueBuffer>.allocate(capacity: 3)
var mAudioFile: AudioFileID?
var bufferByteSize:UInt32 = 0
var mCurrentPacket:Int64 = 0
var mNumPacketsToRead:UInt32 = 0
var mPacketDescs : UnsafeMutablePointer<AudioStreamPacketDescription>?
var mIsRunning : Bool = false
init(){
}
}