0

I use MediaCodec in a standard form such as:

public void run() {
    MediaExtractor extractor = new MediaExtractor();
    try {
        extractor.setDataSource("/sdcard/video-only.mpg");
    } catch (Exception e1) {
    }
    MediaFormat format = extractor.getTrackFormat(0);
    extractor.selectTrack(0);
    MediaCodec decoder = MediaCodec.createDecoderByType("video/avc");
    decoder.configure(format, mSurface, null, 0);
    decoder.start();
    ByteBuffer[] inputBuffers = decoder.getInputBuffers();
    ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
    BufferInfo info = new BufferInfo();
    boolean isEOS = false;
    long startMs = System.currentTimeMillis();
    while (!Thread.interrupted()) {
        if (!isEOS) {
            int inIndex = decoder.dequeueInputBuffer(10000);
            if (inIndex >= 0) {
                ByteBuffer buffer = inputBuffers[inIndex];
                int sampleSize = extractor.readSampleData(buffer, 0);
                if (sampleSize < 0) {
                    decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    isEOS = true;
                } else {
                    decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                    extractor.advance();
                }
            }
        }
        int outIndex = decoder.dequeueOutputBuffer(info, 10000);
        switch (outIndex) {
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            outputBuffers = decoder.getOutputBuffers();
            break;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            break;
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            break;
        default:
            ByteBuffer buffer = outputBuffers[outIndex];
            while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
                try {
                    Thread.sleep(10);
                } catch (Exception e) {
                    break;
                }
            }
            decoder.releaseOutputBuffer(outIndex, true);
            break;
        }
        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
            break;
    }
    decoder.stop();
    decoder.release();
    extractor.release();
}

I'm now getting the input buffer from a SEPARATE thread running in JNI. In this thread, I receive a callback and I get the (already parsed) h264 data. I intend to modify the java code to the following (no more extractor):

public void run() {
    MediaFormat format = MediaFormat format = MediaFormat.createVideoFormat("video/avc", 640, 480);
    MediaCodec decoder = MediaCodec.createDecoderByType("video/avc");
    decoder.configure(format, mSurface, null, 0);
    decoder.start();
    ByteBuffer[] inputBuffers = decoder.getInputBuffers();
    ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
    BufferInfo info = new BufferInfo();
    boolean isEOS = false;
    long startMs = System.currentTimeMillis();
    while (!Thread.interrupted()) {
        if (!isEOS) {
            int inIndex = decoder.dequeueInputBuffer(10000);
            if (inIndex >= 0) {
                ByteBuffer buffer = inputBuffers[inIndex];
                int sampleSize = ??????? // How to memcpy from JNI
                if (sampleSize < 0) {
                    decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    isEOS = true;
                }
            }
        }
        int outIndex = decoder.dequeueOutputBuffer(info, 10000);
        switch (outIndex) {
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            outputBuffers = decoder.getOutputBuffers();
            break;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            break;
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            break;
        default:
            ByteBuffer buffer = outputBuffers[outIndex];
            while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
                try {
                    Thread.sleep(10);
                } catch (Exception e) {
                    break;
                }
            }
            decoder.releaseOutputBuffer(outIndex, true);
            break;
        }
        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
            break;
    }
    decoder.stop();
    decoder.release();
}

In the JNI code, I receive the following callback:

static void callback_with_buffer (uint8 *buffer, uint size) {
}

What should I do to memcpy the JNI buffer to the Java inputBuffers[inIndex] especially with the constraint that both thread are not really synchronized?

gregoiregentil
  • 1,793
  • 1
  • 26
  • 56

1 Answers1

1

In JNI:

    JNIEnv *env = get_jni_env();
    jbyteArray jBuf = (*env)->NewByteArray(env, map.size);
    (*env)->SetByteArrayRegion(env, jBuf, 0, map.size, (jbyte*) map.data);
    (*env)->CallVoidMethod(env, data->app, set_buffer_method_id, jBuf);
    if ((*env)->ExceptionCheck(env)) {
        GST_ERROR("Failed to call Java method");
        (*env)->ExceptionClear(env);
    }
    (*env)->DeleteLocalRef(env, jBuf);

In Java:

public LinkedList<byte[]> mData = new LinkedList<byte[]>();
private void setBuffer(byte[] buf) {
    mData.add(buf);
}

In the MediaCodec decoder thread:

                ByteBuffer buffer = inputBuffers[inIndex];
                int sampleSize = 0;
                buffer.clear();
                while (!((IVESampleActivity) mContext).mData.isEmpty()) {
                    byte[] buf = ((IVESampleActivity) mContext).mData.removeFirst();
                    buffer.put(buf);
                    sampleSize += buf.length;
                }

I'm wondering if I could avoid some duplication...

gregoiregentil
  • 1,793
  • 1
  • 26
  • 56