-1

Somehow I cannot change the output or mute it using the render callback when using core audio.

Here is my IO initializing function:

- (void)setupIOUnit
{
    // Create a new instance of AURemoteIO

    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_RemoteIO;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;

    AudioComponent comp = AudioComponentFindNext(NULL, &desc);
    AudioComponentInstanceNew(comp, &rioUnit);


    //  Enable input and output on AURemoteIO
    //  Input is enabled on the input scope of the input element
    //  Output is enabled on the output scope of the output element

    UInt32 one = 1;
    AudioUnitSetProperty(rioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof(one));
    AudioUnitSetProperty(rioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, 0, &one, sizeof(one));

    AudioStreamBasicDescription audioFormat;
    audioFormat.mSampleRate         = 44100.00;
    audioFormat.mFormatID           = kAudioFormatLinearPCM;
    audioFormat.mFormatFlags        = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
    audioFormat.mFramesPerPacket    = 1;
    audioFormat.mChannelsPerFrame   = 1;
    audioFormat.mBitsPerChannel     = 16;
    audioFormat.mBytesPerPacket     = 2;
    audioFormat.mBytesPerFrame      = 2;

    AudioUnitSetProperty(rioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &audioFormat, sizeof(audioFormat));
    AudioUnitSetProperty(rioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &audioFormat, sizeof(audioFormat));

    // Set the MaximumFramesPerSlice property. This property is used to describe to an audio unit the maximum number
    // of samples it will be asked to produce on any single given call to AudioUnitRender
    UInt32 maxFramesPerSlice = 4096;
    AudioUnitSetProperty(rioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, sizeof(UInt32));

    // Get the property value back from AURemoteIO. We are going to use this value to allocate buffers accordingly
    UInt32 propSize = sizeof(UInt32);
    AudioUnitGetProperty(rioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, &propSize);

    // Set the render callback on AURemoteIO
    AURenderCallbackStruct renderCallback;
    renderCallback.inputProc = performRender;
    renderCallback.inputProcRefCon = NULL;
    AudioUnitSetProperty(rioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &renderCallback, sizeof(renderCallback));
    NSLog(@"render set now");
    // Initialize the AURemoteIO instance
    AudioUnitInitialize(rioUnit);
    [self startIOUnit];
    return;
}

Here is my function for rendering:

// Render callback function
static OSStatus performRender (void                         *inRefCon,
                           AudioUnitRenderActionFlags   *ioActionFlags,
                           const AudioTimeStamp         *inTimeStamp,
                           UInt32                       inBusNumber,
                           UInt32                       inNumberFrames,
                           AudioBufferList              *ioData)
{
    OSStatus err = noErr;
    err = AudioUnitRender(rioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, ioData);
    if (ioData->mBuffers[0].mDataByteSize >= 12) {
        NSData *myAudioData = [NSData dataWithBytes: ioData->mBuffers[0].mData length:12];
        NSLog(@"aa playback's first 12 bytes: %@", myAudioData);
    }

    for (UInt32 i=0; i<ioData->mNumberBuffers; ++i) {
        memset(ioData->mBuffers[i].mData, 0, ioData->mBuffers[i].mDataByteSize);
    }

    return err;
}

This does not mute the output sound and I can still hear things from my streaming App. What are the possible scenarios in which this could happen? How come my sound is not muted?

Any insight would be helpful

Bowen Su
  • 1,219
  • 3
  • 13
  • 15

1 Answers1

0

Could you clarify what you mean when you say "I can still hear things from my streaming App"? Are you referring to sound coming in through the microphone, or do you have something else going on in the app?

FWIW: you're clearly adapting Apple's aurioTouch sample, and if I drop your setupIOUnit method and render callback into that project and run it, muting works fine - i.e., you don't appear to be breaking anything with the code you've posted here. That suggests the problem lies somewhere else in your code.