0

I'm trying to get the following example working in a Xamarin.Mac project. It's an AUGraph that connects a mixer to the default output. The mixer has one input which is a render callback that generates a sine wav.

        var graph = new AUGraph();

        var output = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Default));

        var mixer = graph.AddNode(AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel));

        if (graph.TryOpen() != 0)
        {
            throw new Exception();
        }

        var mixNode = graph.GetNodeInfo(mixer);

        // configure mixer
        if (mixNode.SetElementCount(AudioUnitScopeType.Input, 1) != AudioUnitStatus.OK)
        {
            throw new Exception();
        }

        if (mixNode.SetRenderCallback(HandleRenderDelegate, AudioUnitScopeType.Global, 0) != AudioUnitStatus.OK)
        {
            throw new Exception();
        }

        var outNode = graph.GetNodeInfo(output);

        // define stream description
        var desc = new AudioStreamBasicDescription();
        desc.BitsPerChannel = 32;
        desc.BytesPerFrame = 4;
        desc.BytesPerPacket = 4;
        desc.Format = AudioFormatType.LinearPCM;
        desc.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsAudioUnitNativeFloat;
        desc.FramesPerPacket = 1;
        desc.ChannelsPerFrame = 2;
        desc.SampleRate = 44100;

        // set mixer input format
        if (mixNode.SetFormat(desc, AudioUnitScopeType.Input, 0) != AudioUnitStatus.OK)
        {
            throw new Exception();
        }

        // connect mixer's output to the output
        if (graph.ConnnectNodeInput(mixer, 0, output, 0) != AUGraphError.OK)
        {
            throw new Exception();
        }

        // set format of mixer's output
        desc = mixNode.GetAudioFormat(AudioUnitScopeType.Output);

        desc.SampleRate = 44100;

        if (outNode.SetFormat(desc, AudioUnitScopeType.Input,0) != AudioUnitStatus.OK)
        {
            throw new Exception();
        }

        if (mixNode.SetFormat(desc, AudioUnitScopeType.Output) != AudioUnitStatus.OK)
        {
            throw new Exception();
        }

        if (graph.Initialize() != AUGraphError.OK)
        {
            throw new Exception();
        }

        if (graph.Start() != AUGraphError.OK)
        {
            throw new Exception();
        }

The callback:

int sample = 0;

unsafe AudioUnitStatus HandleRenderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
{
    var left = (float*)data[0].Data;
    var right = (float*)data[1].Data;

    for (var i = 0; i < numberFrames; i++)
    {
        float sampleValue = (float)Math.Sin(sample * 2 * Math.PI * 440 / 44100);
        left[i] = right[i] = sampleValue;
        sample++;
    }
    return AudioUnitStatus.OK;
}

The callback is running and the buffer is being filled but no sound is generated. The example works in iOS with AudioTypeOutput.Remote being used in place of AudioTypeOutput.Default but for some reason macOS is not playing the sound. Any ideas?

Aaron Allen
  • 165
  • 6

1 Answers1

0

The output volume of the mixer had to be set manually because it defaults to 0.

Aaron Allen
  • 165
  • 6