Реализация демонстрационной записи с использованием AudioQueue в RoboVM приводит к ошибке «No @Marshaler found»

1

Я пытаюсь реализовать запись звука iOS с помощью RoboVM, используя руководство Apple AudioQueue и их образец проекта SpeakHere, и я столкнулся с этой ошибкой:

No @Marshaler found for parameter 1 of @Callback method <AQRecorder: void HandleInputBuffer(AQRecorder,org.robovm.apple.audiotoolbox.AudioQueue,org.robovm.apple.audiotoolbox.AudioQueueBuffer,org.robovm.apple.coreaudio.AudioTimeStamp,int,org.robovm.apple.coreaudio.AudioStreamPacketDescription)>

Есть идеи? Вот код, который я использую:

Main.java:

import org.robovm.apple.coregraphics.CGRect;
import org.robovm.apple.foundation.NSAutoreleasePool;
import org.robovm.apple.uikit.UIApplication;
import org.robovm.apple.uikit.UIApplicationDelegateAdapter;
import org.robovm.apple.uikit.UIApplicationLaunchOptions;
import org.robovm.apple.uikit.UIButton;
import org.robovm.apple.uikit.UIButtonType;
import org.robovm.apple.uikit.UIColor;
import org.robovm.apple.uikit.UIControl;
import org.robovm.apple.uikit.UIControlState;
import org.robovm.apple.uikit.UIEvent;
import org.robovm.apple.uikit.UIScreen;
import org.robovm.apple.uikit.UIWindow;

public class IOSDemo extends UIApplicationDelegateAdapter {

    private UIWindow window = null;

    @Override
    public boolean didFinishLaunching(UIApplication application, 
            UIApplicationLaunchOptions launchOptions) {

        final AQRecorder aqRecorder = new AQRecorder();

        final UIButton button = UIButton.create(UIButtonType.RoundedRect);
        button.setFrame(new CGRect(115.0f, 121.0f, 91.0f, 37.0f));
        button.setTitle("Start", UIControlState.Normal);

        button.addOnTouchUpInsideListener(new UIControl.OnTouchUpInsideListener() {
            @Override
            public void onTouchUpInside(UIControl control, UIEvent event) {
                if(button.getTitle(UIControlState.Normal) == "Stop"){
                    aqRecorder.stopRecord();
                    button.setTitle("Start", UIControlState.Normal);
                }
                else{
                    aqRecorder.startRecord();
                    button.setTitle("Stop", UIControlState.Normal);
                }
            }
        });

        window = new UIWindow(UIScreen.getMainScreen().getBounds());
        window.setBackgroundColor(UIColor.lightGray());
        window.addSubview(button);
        window.makeKeyAndVisible();

        try {
            aqRecorder.setUpAudioFormat();
        } catch (NoSuchMethodException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        return true;
    }

    public static void main(String[] args) {
        try (NSAutoreleasePool pool = new NSAutoreleasePool()) {
            UIApplication.main(args, null, IOSDemo.class);
        }
    }
}

AQRecorder.java:

 import org.robovm.apple.audiotoolbox.AudioFile; import org.robovm.apple.audiotoolbox.AudioQueue; import org.robovm.apple.audiotoolbox.AudioQueueBuffer; import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr; import org.robovm.apple.coreaudio.AudioFormat; import org.robovm.apple.coreaudio.AudioStreamBasicDescription; import org.robovm.apple.coreaudio.AudioStreamPacketDescription; import org.robovm.apple.coreaudio.AudioTimeStamp; import org.robovm.rt.bro.annotation.Callback; import org.robovm.rt.bro.ptr.FunctionPtr; import org.robovm.rt.bro.ptr.VoidPtr; public class AQRecorder { AudioStreamBasicDescription mDataFormat;//2 AudioQueue mQueue;//3//AudioQueueBufferRef mBuffers[kNumberBuffers];//4 AudioFile mAudioFile;//5 int bufferByteSize;//6 int mCurrentPacket;//7 boolean mIsRunning;//8 public void startRecord(){ mQueue.start(null); } public void stopRecord(){ mQueue.stop(true); } @Callback static void HandleInputBuffer( AQRecorder aqData, AudioQueue inAQ, AudioQueueBuffer inBuffer, AudioTimeStamp inStartTime, int inNumPackets, AudioStreamPacketDescription inPacketDesc ) { AQRecorder pAqData = aqData;//1 if (inNumPackets == 0 && pAqData.mDataFormat.mBytesPerPacket() != 0) inNumPackets = inBuffer.mAudioDataByteSize()/pAqData.mDataFormat.mBytesPerPacket(); if (!aqData.mIsRunning)//5 return; System.out.println(inBuffer.mAudioData()); } void setUpAudioFormat() throws NoSuchMethodException{ mDataFormat = new AudioStreamBasicDescription( 16000,//mSampleRate AudioFormat.LinearPCM,//mFormatID (1 << 2),//mFormatFlags 512,//mBytesPerPacket 1,//mFramesPerPacket 512,//mBytesPerFrame 1,//mChanneslPerFrame 16,//mBitsPerChannel 0//mReserved ); AudioQueuePtr mQueuePtr = new AudioQueuePtr(); mQueuePtr.set(mQueue); VoidPtr self = new VoidPtr(); @SuppressWarnings("rawtypes") Class[] cArg = new Class[6]; cArg[0] = AQRecorder.class; cArg[1] = AudioQueue.class; cArg[2] = AudioQueueBuffer.class; cArg[3] = AudioTimeStamp.class; cArg[4] = int.class; cArg[5] = AudioStreamPacketDescription.class; FunctionPtr handleInputBuffer = new FunctionPtr((AQRecorder.class).getDeclaredMethod("HandleInputBuffer", cArg)); AudioQueue.newInput(mDataFormat, handleInputBuffer, self, null, "", 0, mQueuePtr); } };
Теги:
robovm
audioqueue

1 ответ

1

С RoboVM 1.0.0-beta-3 я наконец смог получить аудиозапись и воспроизведение. Не уверен, почему очередь звуковой записи занимает до 20 секунд, но вот пример кода, который работает в симуляторе и на моем iPhone 4:

Основной класс:

import java.util.Vector;

import org.robovm.apple.coregraphics.*;
import org.robovm.apple.foundation.*;
import org.robovm.apple.uikit.*;

public class TestAudioQueueCrash extends UIApplicationDelegateAdapter
{
       private UIWindow window = null;
        private int clickCount = 0;

        @Override
        public boolean didFinishLaunching(UIApplication application, UIApplicationLaunchOptions launchOptions) 
        {

            final UIButton button = UIButton.create(UIButtonType.RoundedRect);
            button.setFrame(new CGRect(15.0f, 121.0f, 291.0f, 37.0f));
            button.setTitle("Click me!", UIControlState.Normal);

            button.addOnTouchUpInsideListener(new UIControl.OnTouchUpInsideListener() 
            {
                @Override
                public void onTouchUpInside(UIControl control, UIEvent event) 
                {
                    if (clickCount == 0)
                    {
                        button.setTitle("Recording for 5 seconds... (SPEAK!)", UIControlState.Normal);

                        Runnable r = new Runnable() 
                        {
                            public void run() 
                            {
                                try
                                {
                                    clickCount = 1;

                                    AudioRecord record = new AudioRecord();
                                    record.startRecording();

                                    long when = System.currentTimeMillis() + 5000;
                                    final Vector<byte[]> v = new Vector();
                                    byte[] ba = new byte[3072];
                                    while (System.currentTimeMillis() < when)
                                    {
                                        int n = 0;
                                        while (n<3072)
                                        {
                                            int i = record.read(ba, n, 3072-n);
                                            if (i==-1 || i == 0) break;
                                            n += i;
                                        }

                                        if (n>0)
                                        {
                                            byte[] ba2 = new byte[n];
                                            System.arraycopy(ba, 0, ba2, 0, n);
                                            v.addElement(ba2);
                                        }
                                    }

                                    System.out.println("DONE RECORDING");
                                    record.release();
                                    System.out.println("RECORDER STOPPED");

                                    System.out.println("Playing back recorded audio...");
                                    button.setTitle("Playing back recorded audio...", UIControlState.Normal);

                                    AudioTrack at = new AudioTrack();
                                    at.play();

                                    while (v.size() > 0) 
                                    {
                                        ba = v.remove(0);
                                        at.write(ba, 0, ba.length);
                                        Thread.yield();
                                    }
                                    at.stop();

                                    button.setTitle("DONE", UIControlState.Normal);
                                    System.out.println("FINISHED PIPING AUDIO");
                                }
                                catch (Exception x)
                                {
                                    x.printStackTrace();
                                    button.setTitle("ERROR: " + x.getMessage(), UIControlState.Normal);
                                }

                                clickCount = 0;
                            }
                        };

                        new Thread(r).start();
                    }
                }
            });

            window = new UIWindow(UIScreen.getMainScreen().getBounds());
            window.setBackgroundColor(UIColor.lightGray());
            window.addSubview(button);
            window.makeKeyAndVisible();

            return true;
        }

        public static void main(String[] args) 
        {
            try (NSAutoreleasePool pool = new NSAutoreleasePool()) 
            {
                UIApplication.main(args, null, TestAudioQueueCrash.class);
            }
        }

}

AQRecorderState:

/*<imports>*/
import java.util.Hashtable;

import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
/*</imports>*/

/*<javadoc>*/

/*</javadoc>*/
/*<annotations>*//*</annotations>*/
/*<visibility>*/public/*</visibility>*/ class /*<name>*/AQRecorderState/*</name>*/ 
    extends /*<extends>*/Struct<AQRecorderState>/*</extends>*/ 
    /*<implements>*//*</implements>*/ {

    protected static Hashtable<Integer, AudioRecord> mAudioRecords = new Hashtable<>();
    protected static int mLastID = 0;

    /*<ptr>*/public static class AQRecorderStatePtr extends Ptr<AQRecorderState, AQRecorderStatePtr> {}/*</ptr>*/
    /*<bind>*/
    /*</bind>*/
    /*<constants>*//*</constants>*/
    /*<constructors>*/
    public AQRecorderState() {}
    public AQRecorderState(AudioRecord ar) 
    {
        this.mID(++mLastID);
        mAudioRecords.put(mID(), ar);
    }
    /*</constructors>*/
    /*<properties>*//*</properties>*/
    /*<members>*/
    @StructMember(0) public native int mID();
    @StructMember(0) public native AQRecorderState mID(int mID);
    /*</members>*/
    /*<methods>*//*</methods>*/

    public AudioRecord getRecord()
    {
        return mAudioRecords.get(mID());
    }

    public static void drop(int mStateID) 
    {
        mAudioRecords.remove(mStateID);
    }
}

Аудиозапись:

import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.lang.reflect.Method;

import org.robovm.apple.audiotoolbox.AudioQueue;
import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer.AudioQueueBufferPtr;
import org.robovm.apple.audiotoolbox.AudioQueueError;
import org.robovm.apple.coreaudio.AudioFormat;
import org.robovm.apple.coreaudio.AudioStreamBasicDescription;
import org.robovm.apple.coreaudio.AudioStreamPacketDescription.AudioStreamPacketDescriptionPtr;
import org.robovm.apple.coreaudio.AudioTimeStamp.AudioTimeStampPtr;
import org.robovm.apple.coreaudio.CoreAudio;
import org.robovm.apple.corefoundation.CFRunLoopMode;
import org.robovm.rt.VM;
import org.robovm.rt.bro.Bro;
import org.robovm.rt.bro.Struct;
import org.robovm.rt.bro.annotation.Callback;
import org.robovm.rt.bro.annotation.Library;
import org.robovm.rt.bro.annotation.Pointer;
import org.robovm.rt.bro.ptr.FunctionPtr;
import org.robovm.rt.bro.ptr.VoidPtr;

/*<annotations>*/@Library("AudioToolbox")/*</annotations>*/
public class AudioRecord
{
    protected double mSampleRate;
    protected AudioFormat mFormatID;
    protected int mFormatFlags;
    protected int mBytesPerPacket;
    protected int mFramesPerPacket;
    protected int mBytesPerFrame;
    protected int mChannelsPerFrame;
    protected int mBitsPerChannel;  

    protected AudioQueue mQueue = null;

    private int kNumberBuffers = 3;
    private PipedInputStream mPIS;
    private PipedOutputStream mPOS;
    private int mStateID = -1;

    private boolean mRunning = false;

    public AudioRecord() throws IOException 
    {
        mSampleRate = 44100;
        mFormatID = AudioFormat.LinearPCM;
        mFormatFlags = CoreAudio.AudioFormatFlagIsPacked | CoreAudio.AudioFormatFlagIsSignedInteger;
        mBytesPerPacket = 2;
        mFramesPerPacket = 1;
        mBytesPerFrame = 2;
        mChannelsPerFrame = 1;
        mBitsPerChannel = 16;    

        mPOS = new PipedOutputStream();
        mPIS = new PipedInputStream(mPOS);
    }

    public static int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) 
    {
        // TODO Auto-generated method stub
        return 0;
    }

    public int deriveBufferSize(AudioQueue audioQueue, AudioStreamBasicDescription ASBDescription, double seconds)
    {
        int maxBufferSize = 0x50000;
        int maxPacketSize = ASBDescription.getMBytesPerPacket();
        System.out.println(3);
        double numBytesForTime = ASBDescription.getMSampleRate() * maxPacketSize * seconds;
        return (int)(numBytesForTime < maxBufferSize ? numBytesForTime : maxBufferSize);
    }

    public void release() 
    {
        System.out.println("RECORD QUEUE STOPPING...");
        mRunning = false;
        mQueue.stop(true);
//      mQueue.dispose(true);
        System.out.println("RECORD QUEUE STOPPED");
        try
        {
            mPOS.close();
            mPIS.close();
            AQRecorderState.drop(mStateID);
        }
        catch (Exception x) { x.printStackTrace(); }
    }

    public int read(byte[] abData, int i, int length) throws IOException 
    {
        return mPIS.read(abData, i, length);
    }

    /*<bind>*/static { Bro.bind(AudioRecord.class); }/*</bind>*/
    /*<constants>*//*</constants>*/
    /*<constructors>*//*</constructors>*/
    /*<properties>*//*</properties>*/
    /*<members>*//*</members>*/
    @Callback
    public static void callbackMethod(
            @Pointer long                     refcon,
            AudioQueue                        inAQ,
            AudioQueueBuffer                  inBuffer,
            AudioTimeStampPtr                 inStartTime,
            int                               inNumPackets,
            AudioStreamPacketDescriptionPtr   inPacketDesc
        )
    {
        try
        {
            System.out.println("a");
            AQRecorderState.AQRecorderStatePtr ptr = new AQRecorderState.AQRecorderStatePtr();
            ptr.set(refcon);
            System.out.println("b");
            AQRecorderState aqrs = ptr.get();
            System.out.println("c");
            byte[] ba = VM.newByteArray(inBuffer.getMAudioData().getHandle(), inBuffer.getMAudioDataByteSize());
            System.out.println("d");
            aqrs.getRecord().receive(ba);
            System.out.println("e");
        }
        catch (Exception x) { x.printStackTrace(); }

        inAQ.enqueueBuffer(inBuffer, 0, null);
        System.out.println("f");
    }

    private void receive(byte[] ba) 
    {
        if (mRunning) try { mPOS.write(ba); } catch (Exception x) { x.printStackTrace(); }
    }

    public void startRecording() throws Exception
    {
        AudioStreamBasicDescription asbd = new AudioStreamBasicDescription(mSampleRate, mFormatID, mFormatFlags, mBytesPerPacket, mFramesPerPacket, mBytesPerFrame, mChannelsPerFrame, mBitsPerChannel, 0);
        AudioQueuePtr mQueuePtr = new AudioQueuePtr();
        AudioQueueBufferPtr mBuffers = Struct.allocate(AudioQueueBufferPtr.class, kNumberBuffers);
        System.out.println(11);
        AQRecorderState aqData = new AQRecorderState(this);
        mStateID = aqData.mID();
        System.out.println(12);
        Method callbackMethod = null;
        Method[] methods = this.getClass().getMethods();
        int i = methods.length;
        while (i-->0) if (methods[i].getName().equals("callbackMethod")) 
        {
            callbackMethod = methods[i];
            break;
        }
        FunctionPtr fp = new FunctionPtr(callbackMethod );
        System.out.println(13);

        VoidPtr vp = aqData.as(VoidPtr.class);
        System.out.println(14);

        AudioQueueError aqe = AudioQueue.newInput(asbd, fp, vp, null, null, 0, mQueuePtr);
        System.out.println(CFRunLoopMode.Common.value());
        System.out.println(aqe.name());
        mQueue = mQueuePtr.get();
        System.out.println(2);
        int bufferByteSize = deriveBufferSize(mQueue, asbd, 0.5);
        System.out.println("BUFFER SIZE: "+bufferByteSize);

        AudioQueueBufferPtr[] buffers = mBuffers.toArray(kNumberBuffers);
        for (i = 0; i < kNumberBuffers; ++i) 
        {
            mQueue.allocateBuffer(bufferByteSize, buffers[i]);
            mQueue.enqueueBuffer(buffers[i].get(), 0, null);
        }

        mRunning = true;
        mQueue.start(null);
    }

}

AQPlayerState:

/*<imports>*/
import java.util.Hashtable;

import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
/*</imports>*/

/*<javadoc>*/

/*</javadoc>*/
/*<annotations>*//*</annotations>*/
/*<visibility>*/public/*</visibility>*/ class /*<name>*/AQPlayerState/*</name>*/ 
    extends /*<extends>*/Struct<AQPlayerState>/*</extends>*/ 
    /*<implements>*//*</implements>*/ {

    protected static Hashtable<Integer, AudioTrack> mAudioTracks = new Hashtable<>();
    protected static int mLastID = 0;

    /*<ptr>*/public static class AQPlayerStatePtr extends Ptr<AQPlayerState, AQPlayerStatePtr> {}/*</ptr>*/
    /*<bind>*/
    /*</bind>*/
    /*<constants>*//*</constants>*/
    /*<constructors>*/
    public AQPlayerState() {}
    public AQPlayerState(AudioTrack ar) 
    {
        this.mID(++mLastID);
        this.mID2(mLastID);
        mAudioTracks.put(mID(), ar);
    }
    /*</constructors>*/
    /*<properties>*//*</properties>*/
    /*<members>*/
    @StructMember(0) public native int mID();
    @StructMember(0) public native AQPlayerState mID(int mID);
    @StructMember(1) public native int mID2();
    @StructMember(1) public native AQPlayerState mID2(int mID2);
    /*</members>*/
    /*<methods>*//*</methods>*/

    public AudioTrack getTrack()
    {
        return mAudioTracks.get(mID());
    }

    public static void drop(int mStateID) 
    {
        mAudioTracks.remove(mStateID);
    }
}

Аудио трек:

import java.lang.reflect.Method;
import java.util.Vector;

import org.robovm.apple.audiotoolbox.AudioQueue;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer;
import org.robovm.apple.audiotoolbox.AudioQueue.AudioQueuePtr;
import org.robovm.apple.audiotoolbox.AudioQueueBuffer.AudioQueueBufferPtr;
import org.robovm.apple.audiotoolbox.AudioQueueError;
import org.robovm.apple.audiotoolbox.AudioQueueParam;
import org.robovm.apple.coreaudio.AudioFormat;
import org.robovm.apple.coreaudio.AudioStreamBasicDescription;
import org.robovm.apple.coreaudio.CoreAudio;
import org.robovm.rt.bro.Bro;
import org.robovm.rt.bro.Struct;
import org.robovm.rt.bro.annotation.Callback;
import org.robovm.rt.bro.annotation.Pointer;
import org.robovm.rt.bro.ptr.BytePtr;
import org.robovm.rt.bro.ptr.FunctionPtr;
import org.robovm.rt.bro.ptr.VoidPtr;


public class AudioTrack {

    public static final int MODE_STREAM = -1;

    private int kNumberBuffers = 3;
    private Vector<byte[]> mData = new Vector<>();
    private int mStateID = -1;
    private boolean mRunning = false;

    protected double mSampleRate;
    protected AudioFormat mFormatID;
    protected int mFormatFlags;
    protected int mBytesPerPacket;
    protected int mFramesPerPacket;
    protected int mBytesPerFrame;
    protected int mChannelsPerFrame;
    protected int mBitsPerChannel;  

    protected AudioQueue mQueue = null;

    public AudioTrack() 
    {
        mSampleRate = 44100;
        mFormatID = AudioFormat.LinearPCM;
        mFormatFlags = CoreAudio.AudioFormatFlagIsPacked | CoreAudio.AudioFormatFlagIsSignedInteger;
        mBytesPerPacket = 2;
        mFramesPerPacket = 1;
        mBytesPerFrame = 2;
        mChannelsPerFrame = 1;
        mBitsPerChannel = 16;    
    }

    public static int getMinBufferSize(int sampleRate, int channelConfigurationMono, int encodingPcm16bit) 
    {
        // TODO Auto-generated method stub
        return 0;
    }

    public int deriveBufferSize(AudioStreamBasicDescription ASBDescription, int maxPacketSize, double seconds)
    {
        int maxBufferSize = 0x50000;
        int minBufferSize = 0x4000;

        double numPacketsForTime = ASBDescription.getMSampleRate() / ASBDescription.getMFramesPerPacket() * seconds;
        int outBufferSize = (int)(numPacketsForTime * maxPacketSize);
        if (outBufferSize > maxBufferSize) return maxBufferSize;
        if (outBufferSize < minBufferSize) return minBufferSize;
        return outBufferSize;
    }

    /*<bind>*/static { Bro.bind(AudioTrack.class); }/*</bind>*/
    /*<constants>*//*</constants>*/
    /*<constructors>*//*</constructors>*/
    /*<properties>*//*</properties>*/
    /*<members>*//*</members>*/
    @Callback
    public static void callbackMethod(
            @Pointer long                     refcon,
            AudioQueue                        inAQ,
            AudioQueueBuffer                  inBuffer
        )
    {
        System.out.println("In Callback");
        AQPlayerState.AQPlayerStatePtr ptr = new AQPlayerState.AQPlayerStatePtr();
        ptr.set(refcon);
        AQPlayerState aqps = ptr.get();
        AudioTrack me = aqps.getTrack();
        me.nextChunk(inAQ, inBuffer);
    }

    private void nextChunk(AudioQueue inAQ, AudioQueueBuffer inBuffer) 
    {
        byte[] ba = null;
        long when = System.currentTimeMillis() + 30000;
        while (mRunning && System.currentTimeMillis() < when)
        {
            if (mData.size() > 0)
            {
                ba = mData.remove(0);
                break;
            }
            try { Thread.yield(); } catch (Exception x) { x.printStackTrace(); }
        }
        if (ba == null) ba = new byte[0];
        System.out.println("PLAYING BYTES: "+ba.length);

        if (ba.length>0)
        {
            VoidPtr vp = inBuffer.getMAudioData();
            BytePtr bp = vp.as(BytePtr.class); //Struct.allocate(BytePtr.class, ba.length);
            bp.set(ba);
//          inBuffer.setMAudioData(vp);
            inBuffer.setMAudioDataByteSize(ba.length);
        }
        mQueue.enqueueBuffer(inBuffer, 0, null);
    }

    public void play() 
    {
        final AudioTrack me = this;

        Runnable r = new Runnable() 
        {
            public void run() 
            {
                AudioStreamBasicDescription asbd = new AudioStreamBasicDescription(mSampleRate, mFormatID, mFormatFlags, mBytesPerPacket, mFramesPerPacket, mBytesPerFrame, mChannelsPerFrame, mBitsPerChannel, 0);
                AudioQueuePtr mQueuePtr = new AudioQueuePtr();
                Method callbackMethod = null;
                Method[] methods = me.getClass().getMethods();
                int i = methods.length;
                while (i-->0) if (methods[i].getName().equals("callbackMethod")) 
                {
                    callbackMethod = methods[i];
                    break;
                }

                FunctionPtr fp = new FunctionPtr(callbackMethod );

                AQPlayerState aqData = new AQPlayerState(me);
                mStateID = aqData.mID();
                VoidPtr vp = aqData.as(VoidPtr.class);
//              AudioQueueError aqe = AudioQueue.newOutput(asbd, fp, vp, CFRunLoop.getCurrent(), new CFString(CFRunLoopMode.Common.value()), 0, mQueuePtr);
                AudioQueueError aqe = AudioQueue.newOutput(asbd, fp, vp, null, null, 0, mQueuePtr);
                System.out.println(aqe.name());
                mQueue = mQueuePtr.get();

                int bufferByteSize = deriveBufferSize(asbd, 2, 0.5);
                System.out.println("BUFFER SIZE: "+bufferByteSize);

                System.out.println("Volume PARAM:"+(int)AudioQueueParam.Volume.value());
                mQueue.setParameter((int)AudioQueueParam.Volume.value(), 1.0f);

                mRunning = true;

                AudioQueueBufferPtr mBuffers = Struct.allocate(AudioQueueBufferPtr.class, kNumberBuffers);
                AudioQueueBufferPtr[] buffers = mBuffers.toArray(kNumberBuffers);

                for (i = 0; i < kNumberBuffers; ++i) 
                {
                    mQueue.allocateBuffer(bufferByteSize, buffers[i]);
                    nextChunk(mQueue, buffers[i].get());
                }

                System.out.println("STARTING QUEUE");
                mQueue.start(null);
                System.out.println("QUEUE STARTED");
/*              
                System.out.println("RUNNING LOOP");

                do
                {
                    System.out.print(".");

                    CFRunLoop.runInMode(CFRunLoopMode.Default, 0.25, false);

                    System.out.print("#");

                }
                while (mRunning);

                System.out.println("!!!");

                CFRunLoop.runInMode(CFRunLoopMode.Default, 1, false);

                System.out.println("DONE RUNNING LOOP");

                mQueue.stop(true);
                AQPlayerState.drop(mStateID);

                System.out.println("QUEUE STOPPED");
*/
            }
        };

        new Thread(r).start();
    }

    public void write(byte[] ba, int i, int length) 
    {
        while (mData.size() > 10) Thread.yield();

        System.out.println("SOUND IN: "+length+" bytes");
        mData.addElement(ba);
    }

    public void stop() 
    {
        System.out.println("STOPPING AUDIO PLAYER");
        mRunning = false;
        mQueue.stop(true);
        AQPlayerState.drop(mStateID);
    }

    public void release() 
    {
        // TODO Auto-generated method stub

    }

}
  • 0
    Спасибо за чаевые. Внедрил ваши ревизии и теперь получаю ту же самую ошибку, о которой вы упоминали. Не знаю, куда идти отсюда с этого момента.
  • 0
    Кроме того, как вы реализовали свой метод "startRecord"? С RoboVM вам нужен прямой доступ к объекту mQueue, чтобы вызвать его метод «start».
Показать ещё 18 комментариев

Ещё вопросы

Сообщество Overcoder
Наверх
Меню