1

I'm trying to implement iOS audio recording using RoboVM using the Apple's AudioQueue guide and their sample SpeakHere project and am running into this error:

No @Marshaler found for parameter 1 of @Callback method <AQRecorder: void HandleInputBuffer(AQRecorder,org.robovm.apple.audiotoolbox.AudioQueue,org.robovm.apple.audiotoolbox.AudioQueueBuffer,org.robovm.apple.coreaudio.AudioTimeStamp,int,org.robovm.apple.coreaudio.AudioStreamPacketDescription)>

Any ideas? Here's the code I'm using:

Main.java:

import org.robovm.apple.coregraphics.CGRect;
import org.robovm.apple.foundation.NSAutoreleasePool;
import org.robovm.apple.uikit.UIApplication;
import org.robovm.apple.uikit.UIApplicationDelegateAdapter;
import org.robovm.apple.uikit.UIApplicationLaunchOptions;
import org.robovm.apple.uikit.UIButton;
import org.robovm.apple.uikit.UIButtonType;
import org.robovm.apple.uikit.UIColor;
import org.robovm.apple.uikit.UIControl;
import org.robovm.apple.uikit.UIControlState;
import org.robovm.apple.uikit.UIEvent;
import org.robovm.apple.uikit.UIScreen;
import org.robovm.apple.uikit.UIWindow;

public class IOSDemo extends UIApplicationDelegateAdapter {

    private UIWindow window = null;

    @Override
    public boolean didFinishLaunching(UIApplication application, 
            UIApplicationLaunchOptions launchOptions) {

        final AQRecorder aqRecorder = new AQRecorder();

        final UIButton button = UIButton.create(UIButtonType.RoundedRect);
        button.setFrame(new CGRect(115.0f, 121.0f, 91.0f, 37.0f));
        button.setTitle("Start", UIControlState.Normal);

        button.addOnTouchUpInsideListener(new UIControl.OnTouchUpInsideListener() {
            @Override
            public void onTouchUpInside(UIControl control, UIEvent event) {
                if(button.getTitle(UIControlState.Normal) == "Stop"){
                    aqRecorder.stopRecord();
                    button.setTitle("Start", UIControlState.Normal);
                }
                else{
                    aqRecorder.startRecord();
                    button.setTitle("Stop", UIControlState.Normal);
                }
            }
        });

        window = new UIWindow(UIScreen.getMainScreen().getBounds());
        window.setBackgroundColor(UIColor.lightGray());
        window.addSubview(button);
        window.makeKeyAndVisible();

        try {
            aqRecorder.setUpAudioFormat();
        } catch (NoSuchMethodException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        return true;
    }

    public static void main(String[] args) {
        try (NSAutoreleasePool pool = new NSAutoreleasePool()) {
            UIApplication.main(args, null, IOSDemo.class);
        }
    }
}

AQRecorder.java:


import org.robovm.apple.audiotoolbox.AudioFile;
import org.robovm.apple.audiotoolbox.AudioQueue;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer;
import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr;
import org.robovm.apple.coreaudio.AudioFormat;
import org.robovm.apple.coreaudio.AudioStreamBasicDescription;
import org.robovm.apple.coreaudio.AudioStreamPacketDescription;
import org.robovm.apple.coreaudio.AudioTimeStamp;
import org.robovm.rt.bro.annotation.Callback;
import org.robovm.rt.bro.ptr.FunctionPtr;
import org.robovm.rt.bro.ptr.VoidPtr;

public class AQRecorder {
        AudioStreamBasicDescription  mDataFormat;                   // 2
        AudioQueue                mQueue;                        // 3
        //AudioQueueBufferRef          mBuffers[kNumberBuffers];      // 4
        AudioFile                  mAudioFile;                    // 5
        int                      bufferByteSize;                // 6
        int                       mCurrentPacket;                // 7
        boolean                         mIsRunning;                    // 8

        public void startRecord(){
            mQueue.start(null);
        }

        public void stopRecord(){
            mQueue.stop(true);
        }

        @Callback
        static void HandleInputBuffer(
                AQRecorder                           aqData,
                AudioQueue                        inAQ,
                AudioQueueBuffer                  inBuffer,
                AudioTimeStamp                 inStartTime,
                int                               inNumPackets,
                AudioStreamPacketDescription   inPacketDesc
            ) {
                AQRecorder pAqData = aqData;               // 1

                if (inNumPackets == 0 && pAqData.mDataFormat.mBytesPerPacket() != 0)
                   inNumPackets = inBuffer.mAudioDataByteSize() / pAqData.mDataFormat.mBytesPerPacket();

               if (!aqData.mIsRunning)                                         // 5
                  return;

               System.out.println(inBuffer.mAudioData());
            }

        void setUpAudioFormat() throws NoSuchMethodException{
            mDataFormat = new AudioStreamBasicDescription(
                        16000, // mSampleRate
                        AudioFormat.LinearPCM, // mFormatID
                        (1 << 2), // mFormatFlags
                        512, // mBytesPerPacket
                        1, // mFramesPerPacket
                        512, // mBytesPerFrame
                        1, // mChanneslPerFrame
                        16, // mBitsPerChannel
                        0 // mReserved
                    );
            AudioQueuePtr mQueuePtr = new AudioQueuePtr();
            mQueuePtr.set(mQueue);
            VoidPtr self = new VoidPtr();

            @SuppressWarnings("rawtypes")
            Class[] cArg = new Class[6];
            cArg[0] = AQRecorder.class;
            cArg[1] = AudioQueue.class;
            cArg[2] = AudioQueueBuffer.class;
            cArg[3] = AudioTimeStamp.class;
            cArg[4] = int.class;
            cArg[5] = AudioStreamPacketDescription.class;

            FunctionPtr handleInputBuffer = new FunctionPtr((AQRecorder.class).getDeclaredMethod("HandleInputBuffer", cArg));
            AudioQueue.newInput(mDataFormat, handleInputBuffer, self, null, "", 0, mQueuePtr);
        }
    };
schmoofer
  • 11
  • 3

1 Answers1

1

With RoboVM 1.0.0-beta-3 I was finally able to get audio record and playback working. Not sure why the recording audio queue takes up to 20 seconds to stop, but here is some sample code that works in the simulator and on my iPhone 4:

Main Class:

import java.util.Vector;

import org.robovm.apple.coregraphics.*;
import org.robovm.apple.foundation.*;
import org.robovm.apple.uikit.*;

public class TestAudioQueueCrash extends UIApplicationDelegateAdapter
{
       private UIWindow window = null;
        private int clickCount = 0;

        @Override
        public boolean didFinishLaunching(UIApplication application, UIApplicationLaunchOptions launchOptions) 
        {

            final UIButton button = UIButton.create(UIButtonType.RoundedRect);
            button.setFrame(new CGRect(15.0f, 121.0f, 291.0f, 37.0f));
            button.setTitle("Click me!", UIControlState.Normal);

            button.addOnTouchUpInsideListener(new UIControl.OnTouchUpInsideListener() 
            {
                @Override
                public void onTouchUpInside(UIControl control, UIEvent event) 
                {
                    if (clickCount == 0)
                    {
                        button.setTitle("Recording for 5 seconds... (SPEAK!)", UIControlState.Normal);

                        Runnable r = new Runnable() 
                        {
                            public void run() 
                            {
                                try
                                {
                                    clickCount = 1;

                                    AudioRecord record = new AudioRecord();
                                    record.startRecording();

                                    long when = System.currentTimeMillis() + 5000;
                                    final Vector<byte[]> v = new Vector();
                                    byte[] ba = new byte[3072];
                                    while (System.currentTimeMillis() < when)
                                    {
                                        int n = 0;
                                        while (n<3072)
                                        {
                                            int i = record.read(ba, n, 3072-n);
                                            if (i==-1 || i == 0) break;
                                            n += i;
                                        }

                                        if (n>0)
                                        {
                                            byte[] ba2 = new byte[n];
                                            System.arraycopy(ba, 0, ba2, 0, n);
                                            v.addElement(ba2);
                                        }
                                    }

                                    System.out.println("DONE RECORDING");
                                    record.release();
                                    System.out.println("RECORDER STOPPED");

                                    System.out.println("Playing back recorded audio...");
                                    button.setTitle("Playing back recorded audio...", UIControlState.Normal);

                                    AudioTrack at = new AudioTrack();
                                    at.play();

                                    while (v.size() > 0) 
                                    {
                                        ba = v.remove(0);
                                        at.write(ba, 0, ba.length);
                                        Thread.yield();
                                    }
                                    at.stop();

                                    button.setTitle("DONE", UIControlState.Normal);
                                    System.out.println("FINISHED PIPING AUDIO");
                                }
                                catch (Exception x)
                                {
                                    x.printStackTrace();
                                    button.setTitle("ERROR: " + x.getMessage(), UIControlState.Normal);
                                }

                                clickCount = 0;
                            }
                        };

                        new Thread(r).start();
                    }
                }
            });

            window = new UIWindow(UIScreen.getMainScreen().getBounds());
            window.setBackgroundColor(UIColor.lightGray());
            window.addSubview(button);
            window.makeKeyAndVisible();

            return true;
        }

        public static void main(String[] args) 
        {
            try (NSAutoreleasePool pool = new NSAutoreleasePool()) 
            {
                UIApplication.main(args, null, TestAudioQueueCrash.class);
            }
        }

}

AQRecorderState:

/*<imports>*/
import java.util.Hashtable;

import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
/*</imports>*/

/*<javadoc>*/

/*</javadoc>*/
/*<annotations>*//*</annotations>*/
/*<visibility>*/public/*</visibility>*/ class /*<name>*/AQRecorderState/*</name>*/ 
    extends /*<extends>*/Struct<AQRecorderState>/*</extends>*/ 
    /*<implements>*//*</implements>*/ {

    protected static Hashtable<Integer, AudioRecord> mAudioRecords = new Hashtable<>();
    protected static int mLastID = 0;

    /*<ptr>*/public static class AQRecorderStatePtr extends Ptr<AQRecorderState, AQRecorderStatePtr> {}/*</ptr>*/
    /*<bind>*/
    /*</bind>*/
    /*<constants>*//*</constants>*/
    /*<constructors>*/
    public AQRecorderState() {}
    public AQRecorderState(AudioRecord ar) 
    {
        this.mID(++mLastID);
        mAudioRecords.put(mID(), ar);
    }
    /*</constructors>*/
    /*<properties>*//*</properties>*/
    /*<members>*/
    @StructMember(0) public native int mID();
    @StructMember(0) public native AQRecorderState mID(int mID);
    /*</members>*/
    /*<methods>*//*</methods>*/

    public AudioRecord getRecord()
    {
        return mAudioRecords.get(mID());
    }

    public static void drop(int mStateID) 
    {
        mAudioRecords.remove(mStateID);
    }
}

AudioRecord:

import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.lang.reflect.Method;

import org.robovm.apple.audiotoolbox.AudioQueue;
import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer.AudioQueueBufferPtr;
import org.robovm.apple.audiotoolbox.AudioQueueError;
import org.robovm.apple.coreaudio.AudioFormat;
import org.robovm.apple.coreaudio.AudioStreamBasicDescription;
import org.robovm.apple.coreaudio.AudioStreamPacketDescription.AudioStreamPacketDescriptionPtr;
import org.robovm.apple.coreaudio.AudioTimeStamp.AudioTimeStampPtr;
import org.robovm.apple.coreaudio.CoreAudio;
import org.robovm.apple.corefoundation.CFRunLoopMode;
import org.robovm.rt.VM;
import org.robovm.rt.bro.Bro;
import org.robovm.rt.bro.Struct;
import org.robovm.rt.bro.annotation.Callback;
import org.robovm.rt.bro.annotation.Library;
import org.robovm.rt.bro.annotation.Pointer;
import org.robovm.rt.bro.ptr.FunctionPtr;
import org.robovm.rt.bro.ptr.VoidPtr;

/*<annotations>*/@Library("AudioToolbox")/*</annotations>*/
public class AudioRecord
{
    protected double mSampleRate;
    protected AudioFormat mFormatID;
    protected int mFormatFlags;
    protected int mBytesPerPacket;
    protected int mFramesPerPacket;
    protected int mBytesPerFrame;
    protected int mChannelsPerFrame;
    protected int mBitsPerChannel;  

    protected AudioQueue mQueue = null;

    private int kNumberBuffers = 3;
    private PipedInputStream mPIS;
    private PipedOutputStream mPOS;
    private int mStateID = -1;

    private boolean mRunning = false;

    public AudioRecord() throws IOException 
    {
        mSampleRate = 44100;
        mFormatID = AudioFormat.LinearPCM;
        mFormatFlags = CoreAudio.AudioFormatFlagIsPacked | CoreAudio.AudioFormatFlagIsSignedInteger;
        mBytesPerPacket = 2;
        mFramesPerPacket = 1;
        mBytesPerFrame = 2;
        mChannelsPerFrame = 1;
        mBitsPerChannel = 16;    

        mPOS = new PipedOutputStream();
        mPIS = new PipedInputStream(mPOS);
    }

    public static int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) 
    {
        // TODO Auto-generated method stub
        return 0;
    }

    public int deriveBufferSize(AudioQueue audioQueue, AudioStreamBasicDescription ASBDescription, double seconds)
    {
        int maxBufferSize = 0x50000;
        int maxPacketSize = ASBDescription.getMBytesPerPacket();
        System.out.println(3);
        double numBytesForTime = ASBDescription.getMSampleRate() * maxPacketSize * seconds;
        return (int)(numBytesForTime < maxBufferSize ? numBytesForTime : maxBufferSize);
    }

    public void release() 
    {
        System.out.println("RECORD QUEUE STOPPING...");
        mRunning = false;
        mQueue.stop(true);
//      mQueue.dispose(true);
        System.out.println("RECORD QUEUE STOPPED");
        try
        {
            mPOS.close();
            mPIS.close();
            AQRecorderState.drop(mStateID);
        }
        catch (Exception x) { x.printStackTrace(); }
    }

    public int read(byte[] abData, int i, int length) throws IOException 
    {
        return mPIS.read(abData, i, length);
    }

    /*<bind>*/static { Bro.bind(AudioRecord.class); }/*</bind>*/
    /*<constants>*//*</constants>*/
    /*<constructors>*//*</constructors>*/
    /*<properties>*//*</properties>*/
    /*<members>*//*</members>*/
    @Callback
    public static void callbackMethod(
            @Pointer long                     refcon,
            AudioQueue                        inAQ,
            AudioQueueBuffer                  inBuffer,
            AudioTimeStampPtr                 inStartTime,
            int                               inNumPackets,
            AudioStreamPacketDescriptionPtr   inPacketDesc
        )
    {
        try
        {
            System.out.println("a");
            AQRecorderState.AQRecorderStatePtr ptr = new AQRecorderState.AQRecorderStatePtr();
            ptr.set(refcon);
            System.out.println("b");
            AQRecorderState aqrs = ptr.get();
            System.out.println("c");
            byte[] ba = VM.newByteArray(inBuffer.getMAudioData().getHandle(), inBuffer.getMAudioDataByteSize());
            System.out.println("d");
            aqrs.getRecord().receive(ba);
            System.out.println("e");
        }
        catch (Exception x) { x.printStackTrace(); }

        inAQ.enqueueBuffer(inBuffer, 0, null);
        System.out.println("f");
    }

    private void receive(byte[] ba) 
    {
        if (mRunning) try { mPOS.write(ba); } catch (Exception x) { x.printStackTrace(); }
    }

    public void startRecording() throws Exception
    {
        AudioStreamBasicDescription asbd = new AudioStreamBasicDescription(mSampleRate, mFormatID, mFormatFlags, mBytesPerPacket, mFramesPerPacket, mBytesPerFrame, mChannelsPerFrame, mBitsPerChannel, 0);
        AudioQueuePtr mQueuePtr = new AudioQueuePtr();
        AudioQueueBufferPtr mBuffers = Struct.allocate(AudioQueueBufferPtr.class, kNumberBuffers);
        System.out.println(11);
        AQRecorderState aqData = new AQRecorderState(this);
        mStateID = aqData.mID();
        System.out.println(12);
        Method callbackMethod = null;
        Method[] methods = this.getClass().getMethods();
        int i = methods.length;
        while (i-->0) if (methods[i].getName().equals("callbackMethod")) 
        {
            callbackMethod = methods[i];
            break;
        }
        FunctionPtr fp = new FunctionPtr(callbackMethod );
        System.out.println(13);

        VoidPtr vp = aqData.as(VoidPtr.class);
        System.out.println(14);

        AudioQueueError aqe = AudioQueue.newInput(asbd, fp, vp, null, null, 0, mQueuePtr);
        System.out.println(CFRunLoopMode.Common.value());
        System.out.println(aqe.name());
        mQueue = mQueuePtr.get();
        System.out.println(2);
        int bufferByteSize = deriveBufferSize(mQueue, asbd, 0.5);
        System.out.println("BUFFER SIZE: "+bufferByteSize);

        AudioQueueBufferPtr[] buffers = mBuffers.toArray(kNumberBuffers);
        for (i = 0; i < kNumberBuffers; ++i) 
        {
            mQueue.allocateBuffer(bufferByteSize, buffers[i]);
            mQueue.enqueueBuffer(buffers[i].get(), 0, null);
        }

        mRunning = true;
        mQueue.start(null);
    }

}

AQPlayerState:

/*<imports>*/
import java.util.Hashtable;

import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
/*</imports>*/

/*<javadoc>*/

/*</javadoc>*/
/*<annotations>*//*</annotations>*/
/*<visibility>*/public/*</visibility>*/ class /*<name>*/AQPlayerState/*</name>*/ 
    extends /*<extends>*/Struct<AQPlayerState>/*</extends>*/ 
    /*<implements>*//*</implements>*/ {

    protected static Hashtable<Integer, AudioTrack> mAudioTracks = new Hashtable<>();
    protected static int mLastID = 0;

    /*<ptr>*/public static class AQPlayerStatePtr extends Ptr<AQPlayerState, AQPlayerStatePtr> {}/*</ptr>*/
    /*<bind>*/
    /*</bind>*/
    /*<constants>*//*</constants>*/
    /*<constructors>*/
    public AQPlayerState() {}
    public AQPlayerState(AudioTrack ar) 
    {
        this.mID(++mLastID);
        this.mID2(mLastID);
        mAudioTracks.put(mID(), ar);
    }
    /*</constructors>*/
    /*<properties>*//*</properties>*/
    /*<members>*/
    @StructMember(0) public native int mID();
    @StructMember(0) public native AQPlayerState mID(int mID);
    @StructMember(1) public native int mID2();
    @StructMember(1) public native AQPlayerState mID2(int mID2);
    /*</members>*/
    /*<methods>*//*</methods>*/

    public AudioTrack getTrack()
    {
        return mAudioTracks.get(mID());
    }

    public static void drop(int mStateID) 
    {
        mAudioTracks.remove(mStateID);
    }
}

AudioTrack:

import java.lang.reflect.Method;
import java.util.Vector;

import org.robovm.apple.audiotoolbox.AudioQueue;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer;
import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer.AudioQueueBufferPtr;
import org.robovm.apple.audiotoolbox.AudioQueueError;
import org.robovm.apple.audiotoolbox.AudioQueueParam;
import org.robovm.apple.coreaudio.AudioFormat;
import org.robovm.apple.coreaudio.AudioStreamBasicDescription;
import org.robovm.apple.coreaudio.CoreAudio;
import org.robovm.rt.bro.Bro;
import org.robovm.rt.bro.Struct;
import org.robovm.rt.bro.annotation.Callback;
import org.robovm.rt.bro.annotation.Pointer;
import org.robovm.rt.bro.ptr.BytePtr;
import org.robovm.rt.bro.ptr.FunctionPtr;
import org.robovm.rt.bro.ptr.VoidPtr;


public class AudioTrack {

    public static final int MODE_STREAM = -1;

    private int kNumberBuffers = 3;
    private Vector<byte[]> mData = new Vector<>();
    private int mStateID = -1;
    private boolean mRunning = false;

    protected double mSampleRate;
    protected AudioFormat mFormatID;
    protected int mFormatFlags;
    protected int mBytesPerPacket;
    protected int mFramesPerPacket;
    protected int mBytesPerFrame;
    protected int mChannelsPerFrame;
    protected int mBitsPerChannel;  

    protected AudioQueue mQueue = null;

    public AudioTrack() 
    {
        mSampleRate = 44100;
        mFormatID = AudioFormat.LinearPCM;
        mFormatFlags = CoreAudio.AudioFormatFlagIsPacked | CoreAudio.AudioFormatFlagIsSignedInteger;
        mBytesPerPacket = 2;
        mFramesPerPacket = 1;
        mBytesPerFrame = 2;
        mChannelsPerFrame = 1;
        mBitsPerChannel = 16;    
    }

    public static int getMinBufferSize(int sampleRate, int channelConfigurationMono, int encodingPcm16bit) 
    {
        // TODO Auto-generated method stub
        return 0;
    }

    public int deriveBufferSize(AudioStreamBasicDescription ASBDescription, int maxPacketSize, double seconds)
    {
        int maxBufferSize = 0x50000;
        int minBufferSize = 0x4000;

        double numPacketsForTime = ASBDescription.getMSampleRate() / ASBDescription.getMFramesPerPacket() * seconds;
        int outBufferSize = (int)(numPacketsForTime * maxPacketSize);
        if (outBufferSize > maxBufferSize) return maxBufferSize;
        if (outBufferSize < minBufferSize) return minBufferSize;
        return outBufferSize;
    }

    /*<bind>*/static { Bro.bind(AudioTrack.class); }/*</bind>*/
    /*<constants>*//*</constants>*/
    /*<constructors>*//*</constructors>*/
    /*<properties>*//*</properties>*/
    /*<members>*//*</members>*/
    @Callback
    public static void callbackMethod(
            @Pointer long                     refcon,
            AudioQueue                        inAQ,
            AudioQueueBuffer                  inBuffer
        )
    {
        System.out.println("In Callback");
        AQPlayerState.AQPlayerStatePtr ptr = new AQPlayerState.AQPlayerStatePtr();
        ptr.set(refcon);
        AQPlayerState aqps = ptr.get();
        AudioTrack me = aqps.getTrack();
        me.nextChunk(inAQ, inBuffer);
    }

    private void nextChunk(AudioQueue inAQ, AudioQueueBuffer inBuffer) 
    {
        byte[] ba = null;
        long when = System.currentTimeMillis() + 30000;
        while (mRunning && System.currentTimeMillis() < when)
        {
            if (mData.size() > 0)
            {
                ba = mData.remove(0);
                break;
            }
            try { Thread.yield(); } catch (Exception x) { x.printStackTrace(); }
        }
        if (ba == null) ba = new byte[0];
        System.out.println("PLAYING BYTES: "+ba.length);

        if (ba.length>0)
        {
            VoidPtr vp = inBuffer.getMAudioData();
            BytePtr bp = vp.as(BytePtr.class); //Struct.allocate(BytePtr.class, ba.length);
            bp.set(ba);
//          inBuffer.setMAudioData(vp);
            inBuffer.setMAudioDataByteSize(ba.length);
        }
        mQueue.enqueueBuffer(inBuffer, 0, null);
    }

    public void play() 
    {
        final AudioTrack me = this;

        Runnable r = new Runnable() 
        {
            public void run() 
            {
                AudioStreamBasicDescription asbd = new AudioStreamBasicDescription(mSampleRate, mFormatID, mFormatFlags, mBytesPerPacket, mFramesPerPacket, mBytesPerFrame, mChannelsPerFrame, mBitsPerChannel, 0);
                AudioQueuePtr mQueuePtr = new AudioQueuePtr();
                Method callbackMethod = null;
                Method[] methods = me.getClass().getMethods();
                int i = methods.length;
                while (i-->0) if (methods[i].getName().equals("callbackMethod")) 
                {
                    callbackMethod = methods[i];
                    break;
                }

                FunctionPtr fp = new FunctionPtr(callbackMethod );

                AQPlayerState aqData = new AQPlayerState(me);
                mStateID = aqData.mID();
                VoidPtr vp = aqData.as(VoidPtr.class);
//              AudioQueueError aqe = AudioQueue.newOutput(asbd, fp, vp, CFRunLoop.getCurrent(), new CFString(CFRunLoopMode.Common.value()), 0, mQueuePtr);
                AudioQueueError aqe = AudioQueue.newOutput(asbd, fp, vp, null, null, 0, mQueuePtr);
                System.out.println(aqe.name());
                mQueue = mQueuePtr.get();

                int bufferByteSize = deriveBufferSize(asbd, 2, 0.5);
                System.out.println("BUFFER SIZE: "+bufferByteSize);

                System.out.println("Volume PARAM:"+(int)AudioQueueParam.Volume.value());
                mQueue.setParameter((int)AudioQueueParam.Volume.value(), 1.0f);

                mRunning = true;

                AudioQueueBufferPtr mBuffers = Struct.allocate(AudioQueueBufferPtr.class, kNumberBuffers);
                AudioQueueBufferPtr[] buffers = mBuffers.toArray(kNumberBuffers);

                for (i = 0; i < kNumberBuffers; ++i) 
                {
                    mQueue.allocateBuffer(bufferByteSize, buffers[i]);
                    nextChunk(mQueue, buffers[i].get());
                }

                System.out.println("STARTING QUEUE");
                mQueue.start(null);
                System.out.println("QUEUE STARTED");
/*              
                System.out.println("RUNNING LOOP");

                do
                {
                    System.out.print(".");

                    CFRunLoop.runInMode(CFRunLoopMode.Default, 0.25, false);

                    System.out.print("#");

                }
                while (mRunning);

                System.out.println("!!!");

                CFRunLoop.runInMode(CFRunLoopMode.Default, 1, false);

                System.out.println("DONE RUNNING LOOP");

                mQueue.stop(true);
                AQPlayerState.drop(mStateID);

                System.out.println("QUEUE STOPPED");
*/
            }
        };

        new Thread(r).start();
    }

    public void write(byte[] ba, int i, int length) 
    {
        while (mData.size() > 10) Thread.yield();

        System.out.println("SOUND IN: "+length+" bytes");
        mData.addElement(ba);
    }

    public void stop() 
    {
        System.out.println("STOPPING AUDIO PLAYER");
        mRunning = false;
        mQueue.stop(true);
        AQPlayerState.drop(mStateID);
    }

    public void release() 
    {
        // TODO Auto-generated method stub

    }

}
  • Thanks for the tip. Implemented your revisions and am now getting the same exact error as you mentioned. No idea where to go from here as of this moment. – schmoofer Dec 03 '14 at 12:21
  • Also, how have you implemented your "startRecord" method? With RoboVM, you need direct access to the mQueue object in order to call its "start" method. – schmoofer Dec 03 '14 at 12:49
  • @schmoofer I was able to fix the audio format error by changing the following parameters: mSampleRate = 44100; mFormatID = AudioFormat.LinearPCM; mFormatFlags = CoreAudio.AudioFormatFlagIsBigEndian | CoreAudio.AudioFormatFlagIsPacked | CoreAudio.AudioFormatFlagIsSignedInteger; mBytesPerPacket = 2; mFramesPerPacket = 1; mBytesPerFrame = 2; mChannelsPerFrame = 1; mBitsPerChannel = 16; – Marc Raiser Dec 03 '14 at 15:18
  • @schmoofer You're supposed to be able to get the AudioQueue instance by passing the AudioQueuePtr into AudioQueue.newInput, and then calling mQueuePtr.get(). However, that is causing my app to crash and I'm stuck at that point. I asked a question about that here as well, but no one has been able to answer it yet: [link](http://stackoverflow.com/questions/27239166/robovm-app-crashes-on-ptr-get) – Marc Raiser Dec 03 '14 at 15:21
  • Thanks for submitting the issue on their Github. Glad they fixed that error. Have you had any luck in getting the callback method working? It doesn't seemed to get called after starting the recording ("aqData.mQueue().get().start(null);") – schmoofer Dec 06 '14 at 05:14
  • @Callback public static void handleInputBuffer( AQRecorderState aqData, AudioQueue inAQ, AudioQueueBuffer inBuffer, AudioTimeStamp inStartTime, int inNumPackets, AudioStreamPacketDescription inPacketDesc ) { System.out.println("This never prints"); if (!aqData.mIsRunning()) return; } – schmoofer Dec 06 '14 at 05:16
  • @schmoofer try this for the callback signature: `@Callback public static void callbackMethod( @Pointer long refcon, AudioQueue inAQ, AudioQueueBuffer inBuffer, AudioTimeStampPtr inStartTime, int inNumPackets, AudioStreamPacketDescriptionPtr inPacketDesc ) ` – Marc Raiser Dec 06 '14 at 15:48
  • No luck, Marc. The code inside the callback isn't executing. Have you already tested it? – schmoofer Dec 07 '14 at 07:37
  • I actually got it working and streamed audio from iOS to a PC. I edited my answer above with the full working source code. PLAYING audio on iOS, on the other hand, still not working... – Marc Raiser Dec 07 '14 at 17:58
  • So IIUC you are trying to replicate what is done in the SpeakHere sample? If that's the case it would be awesome if we could get some help porting that sample to RoboVM (as far as possible). We could then have a look at it and fix whatever bugs are there to get it to work properly. Anyone of you up for it? If yes please send a PR to the samples repos here: https://github.com/robovm/robovm-samples. – ntherning Dec 09 '14 at 05:51
  • @marc-raiser Is the callback method supposed to execute continuously? It only executes once for me, using your example. – schmoofer Dec 12 '14 at 11:04
  • It should call the callback repeatedly until you call `release()` at which point it would stop. the `inAQ.enqueueBuffer(inBuffer, 0, null);` returns the buffer to the queue so it can be filled again, so if that's not getting called it could be running out of buffers and not calling the callback, but in my example there's 3 buffers so it should at least get called 3 times. – Marc Raiser Dec 12 '14 at 19:58
  • @marc-raiser Excuse the delay, I actually never got the callback to get called more than once. I've created a github for what I have (https://github.com/schmoofer/speakhere-robovm). Would it be possible for you to take a look? – schmoofer Jan 20 '15 at 09:04
  • @ntherning I'm open to implementing the SpeakHere sample, but won't be able to do so until I iron out the callback issue. – schmoofer Jan 20 '15 at 09:05
  • @schmoofer Try upgrading robovm to 1.0.0-beta-03, and replace `CoreFoundation.RunLoopCommonModes()` with null in `AudioQueue.newInput`. – Marc Raiser Jan 21 '15 at 20:36
  • @ntherning We are also having problems getting audio going on IOS. We're also in Gothenburg (Drottninggatan) and using your latest cutting edge releases as soon as they are out. We get error -50 when starting the AudioQueue, using almost the exact same code as is posted above. We wonder if there is a chance to work together on RoboVM's IOS Audio support, since we're in the same city? – c.fogelklou Jan 22 '15 at 12:07
  • @schmoofer I updated my answer with code that both records and then plays back audio. Works on my iphone 4 and in simulator. – Marc Raiser Jan 22 '15 at 19:53
  • @ntherning I created a pull request with a preliminary SpeakHere sample using marc-raiser's code. The UI does not yet reflect that of the original SpeakHere sample, and I haven't figured out exactly how to access the floating point PCM values of audio data (to display the volume). – schmoofer Jan 23 '15 at 10:37
  • 1
    @c.fogelklou Here's the link to the mic/playback demo using Marc's working code: https://github.com/schmoofer/robovm-samples/tree/master/SpeakHere – schmoofer Jan 23 '15 at 10:39
  • Are import org.robovm.apple.audiotoolbox.AudioQueueError and import org.robovm.apple.coreaudio.CoreAudio deprecated in the newest versions of RoboVM? My project is not resolving those (using RoboVM 1.9) Are there alternatives in the newest versions? – jeudyx Nov 06 '15 at 04:41