I use an audio unit with VoiceProcessingIO type to receive voice without echo. In RenderCallback I get sound samples and then set all buffer values to zero so that there is no playback.
Now I need to change its sample rate from 48000 to 16000 after receiving the sound and then pass the resulting sound through a lowpass filter.
I can't figure out how to configure several audio units to connect to each other and chain the data.
I know that I have to use kAudioUnitSubType_AUConverter for the converter and kAudioUnitSubType_LowPassFilter for the filter.
I am already desperate to find a help of any kind.
P.S. I found this blog post, there is a similar problem, but the author was never answered to his question. But I don't understand why the author uses two converters. I'm also worried that he uses the Remote type, and I don't understand why he connects these buses and in that order.
public static class SoundSettings
{
public static readonly int SampleRate = 16000;
public static readonly int Channels = 1;
public static readonly int BytesPerSample = 2;
public static readonly int FramesPerPacket = 1;
}
private void SetupAudioSession()
{
AudioSession.Initialize();
AudioSession.Category = AudioSessionCategory.PlayAndRecord;
AudioSession.Mode = AudioSessionMode.GameChat;
AudioSession.PreferredHardwareIOBufferDuration = 0.08f;
}
private void PrepareAudioUnit()
{
_srcFormat = new AudioStreamBasicDescription
{
Format = AudioFormatType.LinearPCM,
FormatFlags = AudioFormatFlags.LinearPCMIsSignedInteger |
AudioFormatFlags.LinearPCMIsPacked,
SampleRate = AudioSession.CurrentHardwareSampleRate,
FramesPerPacket = SoundSettings.FramesPerPacket,
BytesPerFrame = SoundSettings.BytesPerSample * SoundSettings.Channels,
BytesPerPacket = SoundSettings.FramesPerPacket *
SoundSettings.BytesPerSample *
SoundSettings.Channels,
BitsPerChannel = SoundSettings.BytesPerSample * 8,
ChannelsPerFrame = SoundSettings.Channels,
Reserved = 0
};
var audioComponent = AudioComponent.FindComponent(AudioTypeOutput.VoiceProcessingIO);
_audioUnit = new AudioUnit.AudioUnit(audioComponent);
_audioUnit.SetEnableIO(true, AudioUnitScopeType.Input, 1);
_audioUnit.SetEnableIO(true, AudioUnitScopeType.Output, 0);
_audioUnit.SetFormat(_srcFormat, AudioUnitScopeType.Input, 0);
_audioUnit.SetFormat(_srcFormat, AudioUnitScopeType.Output, 1);
_audioUnit.SetRenderCallback(this.RenderCallback, AudioUnitScopeType.Input, 0);
}
private AudioUnitStatus RenderCallback(
AudioUnitRenderActionFlags actionFlags,
AudioTimeStamp timeStamp,
uint busNumber,
uint numberFrames,
AudioBuffers data)
{
var status = _audioUnit.Render(ref actionFlags, timeStamp, 1, numberFrames, data);
if (status != AudioUnitStatus.OK)
{
return status;
}
var msgArray = new byte[dataByteSize];
Marshal.Copy(data[0].Data, msgArray, 0, dataByteSize);
var msg = _msgFactory.CreateAudioMsg(msgArray, msgArray.Length, (++_lastIndex));
this.OnMsgReady(msg);
// Disable playback IO
var array = new byte[dataByteSize];
Marshal.Copy(array, 0, data[0].Data, dataByteSize);
return AudioUnitStatus.NoError;
}