I had a similar question and tried several ways of making a versatile polyphonic sampler.
It's true that the AppleSampler
and the DunneSampler
support polyphone; however, I needed a sampler that I could control with more precision on a note-by-note bases; i.e. playing each "voice" with unique playback parameters like playspeed, etc.
I found that building a sampler based on the AudioPlayer
was the right path for me; and there, I created a member variable inside my sampler "voice" that kept track of when that voice was "busy"; when a "voice" is assigned a note to play, it marks itself as "busy", and when it's done, the callback from the AudioPlayer
executes a function that sets the voice's "busy" variable to "false".
I then use a "conductor" to find the first available voice that is not "busy" to play a sound.
Here is a snippet:
import AudioKit
import AudioKitUI
import AVFoundation
import Keyboard
import Combine
import SwiftUI
import DunneAudioKit
class AudioPlayerVoice: ObservableObject, HasAudioEngine {
// For audio playback
let engine = AudioEngine()
let player = AudioPlayer()
let variSpeed: VariSpeed
var voiceNumber = 0
var busy : Bool
init() {
variSpeed = VariSpeed(player)
engine.output = variSpeed
do {
try engine.start()
} catch {
Log("AudioKit did not start!")
}
busy = false
variSpeed.rate = 1.0
player.isBuffered = true
player.completionHandler = donePlaying
}
func play(buffer: AVAudioPCMBuffer) {
// Set this voice to busy so that new incoming notes are not palyed here
busy = true
// Load buffer into player
player.load(buffer: buffer)
// Compare buffer and audioplayer formats
// print("Player format 1: ")
// print(player.outputFormat)
// print("Buffer format: ")
// print(buffer.format)
// Set AudioPlayer format to be the same as buffer format
player.playerNode.engine?.connect( player.playerNode, to: player.mixerNode, format: buffer.format)
// Compare buffer and audioplayer formats again to see if the above line changed anything
// print("Player format 2: ")
// print(player.outputFormat)
// Play sound with a completion callback
player.play(completionCallbackType: .dataPlayedBack)
}
func donePlaying() {
print("done!")
busy = false
}
}
class AudioPlayerConductor: ObservableObject {
// Mark Published so View updates label on changes
@Published private(set) var lastPlayed: String = "None"
let voiceCount = 16
var soundFileList: [String] = []
var buffers : [AVAudioPCMBuffer] = []
var players: [AudioPlayerVoice] = []
var sampleDict: [String: AVAudioPCMBuffer] = [:]
func loadAudioFiles() {
// Build audio file name list
let fileNameExtension = ".wav"
if let files = try? FileManager.default.contentsOfDirectory(atPath: Bundle.main.bundlePath + "/Samples" ){
// var counter = 0
///print("Files... " + files)
for file in files {
if file.hasSuffix(fileNameExtension) {
let name = file.prefix(file.count - fileNameExtension.count)
// add sound file name without extension to our soundFileist
soundFileList.append(String(name))
// get url for current sound
let url = Bundle.main.url(forResource: String(name), withExtension: "wav", subdirectory: "Samples")
// read audiofile into an AVAudioFile
let audioFile = try! AVAudioFile(forReading: url!)
// find the audio format and frame count
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
// create a new AVAudioPCMBuffer and read from the AVAudioFile into the AVAudioPCMBuffer
let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount)
try! audioFile.read(into: audioFileBuffer!)
// updated the sampleDict dictionary with "name" / "buffer" key / value
sampleDict[String(name)] = audioFileBuffer
//print("loading... " + name)
//print(".......... " + url!.absoluteString)
}
}
}
print("Loaded Samples:")
print(soundFileList)
}
func initializeSamplerVoices() {
for i in 1...voiceCount {
let newAudioPlayerVoice = AudioPlayerVoice()
newAudioPlayerVoice.voiceNumber = i
players.append(newAudioPlayerVoice)
}
}
func playWithAvailableVoice (bufferToPlay: AVAudioPCMBuffer, playspeed: Float) {
for i in 0...(voiceCount-1) {
if (!players[i].busy) {
players[i].variSpeed.rate = playspeed
players[i].play(buffer: bufferToPlay)
break
}
}
}
func playXY(x: Double, y: Double) {
let playspeed = Float(AliSwift.scale(x, 0.0, UIScreen.screenWidth, 0.1, 3.0))
let soundNumber = Int(AliSwift.scale(y, 0.0, UIScreen.screenHeight, 0 , Double(soundFileList.count - 1)))
let soundBuffer = sampleDict[soundFileList[soundNumber]]
playWithAvailableVoice(bufferToPlay: soundBuffer!, playspeed: playspeed)
}
init() {
loadAudioFiles()
initializeSamplerVoices()
}
}
struct ContentViewAudioPlayer: View {
@StateObject var conductor = AudioPlayerConductor()
// @StateObject var samplerVoice = AudioPlayerVoice()
var body: some View {
ZStack {
VStack {
Rectangle()
.fill(.red)
.frame(maxWidth: .infinity)
.frame(maxHeight: .infinity)
.onTapGesture { location in
print("Tapped at \(location)")
let someSound = conductor.sampleDict.randomElement()!
let someSoundName = someSound.key
let someSoundBuffer = someSound.value
print("Playing: " + someSoundName)
conductor.playXY(x: location.x, y: location.y)
}
}
.onAppear {
// conductor.start()
}
.onDisappear {
// conductor.stop()
}
}
}
struct ContentViewAudioPlayer_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}