I try to record with an iPad and get raw data, like importing an m4a file into Audacity, the displayed waveform is what I want, but when I try to do something similar in Swift, I get wrong numbers
Here is my code: the recording part
let recordingSession = AVAudioSession.sharedInstance()
try! recordingSession.setCategory(.playAndRecord, mode: .default, options: [.allowBluetooth, .defaultToSpeaker])
try! recordingSession.overrideOutputAudioPort(.speaker)
try! recordingSession.setActive(true)
let filename = "1.m4a"
let fileUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
.appendingPathComponent(filename)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.max.rawValue
]
do {
audioRecorder = try AVAudioRecorder(url: fileUrl, settings: settings)
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record()
} catch {
finishRecording(success: false)
}
Here is my code: the parsing part
if let url = Bundle.main.url(forResource: "1", withExtension: "m4a") {
do {
let file = try AVAudioFile(forReading: url)
if let format = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: file.fileFormat.sampleRate, channels: file.fileFormat.channelCount, interleaved: false), let buf = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(file.length)){
try file.read(into: buf)
let buffer = buf.audioBufferList[0].mBuffers
let floatArray = Array(UnsafeMutableRawBufferPointer(start: buffer.mData, count: Int(buffer.mDataByteSize)))
print(floatArray)
}
} catch {
print("Audio Error: \(error)")
}
}
Even the number of Array is much lower than the number I expect. Is there any step I misunderstood? Sorry if there are English grammar mistakes, thank you.