Search in sources :

Example 6 with BufferInfo

use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by DirtyUnicorns.

the class TrackDecoder method drainOutputBuffer.

public boolean drainOutputBuffer() {
    BufferInfo outputInfo = new BufferInfo();
    int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(outputInfo, TIMEOUT_US);
    if ((outputInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mListener.onEndOfStream(this);
        return false;
    }
    if (mShouldEnqueueEndOfStream) {
        tryEnqueueEndOfStream();
    }
    if (outputBufferIndex >= 0) {
        return onDataAvailable(mMediaCodec, mCodecOutputBuffers, outputBufferIndex, outputInfo);
    } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
        mCodecOutputBuffers = mMediaCodec.getOutputBuffers();
        return true;
    } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        mOutputFormat = mMediaCodec.getOutputFormat();
        Log.d(LOG_TAG, "Output format has changed to " + mOutputFormat);
        return true;
    }
    return false;
}
Also used : BufferInfo(android.media.MediaCodec.BufferInfo)

Example 7 with BufferInfo

use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by ResurrectionRemix.

the class TrackDecoder method drainOutputBuffer.

public boolean drainOutputBuffer() {
    BufferInfo outputInfo = new BufferInfo();
    int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(outputInfo, TIMEOUT_US);
    if ((outputInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mListener.onEndOfStream(this);
        return false;
    }
    if (mShouldEnqueueEndOfStream) {
        tryEnqueueEndOfStream();
    }
    if (outputBufferIndex >= 0) {
        return onDataAvailable(mMediaCodec, mCodecOutputBuffers, outputBufferIndex, outputInfo);
    } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
        mCodecOutputBuffers = mMediaCodec.getOutputBuffers();
        return true;
    } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        mOutputFormat = mMediaCodec.getOutputFormat();
        Log.d(LOG_TAG, "Output format has changed to " + mOutputFormat);
        return true;
    }
    return false;
}
Also used : BufferInfo(android.media.MediaCodec.BufferInfo)

Example 8 with BufferInfo

use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by crdroidandroid.

the class TrackDecoder method drainOutputBuffer.

public boolean drainOutputBuffer() {
    BufferInfo outputInfo = new BufferInfo();
    int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(outputInfo, TIMEOUT_US);
    if ((outputInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mListener.onEndOfStream(this);
        return false;
    }
    if (mShouldEnqueueEndOfStream) {
        tryEnqueueEndOfStream();
    }
    if (outputBufferIndex >= 0) {
        return onDataAvailable(mMediaCodec, mCodecOutputBuffers, outputBufferIndex, outputInfo);
    } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
        mCodecOutputBuffers = mMediaCodec.getOutputBuffers();
        return true;
    } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        mOutputFormat = mMediaCodec.getOutputFormat();
        Log.d(LOG_TAG, "Output format has changed to " + mOutputFormat);
        return true;
    }
    return false;
}
Also used : BufferInfo(android.media.MediaCodec.BufferInfo)

Example 9 with BufferInfo

use of android.media.MediaCodec.BufferInfo in project libstreaming by fyhertz.

the class EncoderDebugger method decode.

/**
	 * @param withPrefix If set to true, the decoder will be fed with NALs preceeded with 0x00000001.
	 * @return How long it took to decode all the NALs
	 */
private long decode(boolean withPrefix) {
    int n = 0, i = 0, j = 0;
    long elapsed = 0, now = timestamp();
    int decInputIndex = 0, decOutputIndex = 0;
    ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers();
    ByteBuffer[] decOutputBuffers = mDecoder.getOutputBuffers();
    BufferInfo info = new BufferInfo();
    while (elapsed < 3000000) {
        // Feeds the decoder with a NAL unit
        if (i < NB_ENCODED) {
            decInputIndex = mDecoder.dequeueInputBuffer(1000000 / FRAMERATE);
            if (decInputIndex >= 0) {
                int l1 = decInputBuffers[decInputIndex].capacity();
                int l2 = mVideo[i].length;
                decInputBuffers[decInputIndex].clear();
                if ((withPrefix && hasPrefix(mVideo[i])) || (!withPrefix && !hasPrefix(mVideo[i]))) {
                    check(l1 >= l2, "The decoder input buffer is not big enough (nal=" + l2 + ", capacity=" + l1 + ").");
                    decInputBuffers[decInputIndex].put(mVideo[i], 0, mVideo[i].length);
                } else if (withPrefix && !hasPrefix(mVideo[i])) {
                    check(l1 >= l2 + 4, "The decoder input buffer is not big enough (nal=" + (l2 + 4) + ", capacity=" + l1 + ").");
                    decInputBuffers[decInputIndex].put(new byte[] { 0, 0, 0, 1 });
                    decInputBuffers[decInputIndex].put(mVideo[i], 0, mVideo[i].length);
                } else if (!withPrefix && hasPrefix(mVideo[i])) {
                    check(l1 >= l2 - 4, "The decoder input buffer is not big enough (nal=" + (l2 - 4) + ", capacity=" + l1 + ").");
                    decInputBuffers[decInputIndex].put(mVideo[i], 4, mVideo[i].length - 4);
                }
                mDecoder.queueInputBuffer(decInputIndex, 0, l2, timestamp(), 0);
                i++;
            } else {
                if (VERBOSE)
                    Log.d(TAG, "No buffer available !");
            }
        }
        // Tries to get a decoded image
        decOutputIndex = mDecoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE);
        if (decOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            decOutputBuffers = mDecoder.getOutputBuffers();
        } else if (decOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            mDecOutputFormat = mDecoder.getOutputFormat();
        } else if (decOutputIndex >= 0) {
            if (n > 2) {
                // We have successfully encoded and decoded an image !
                int length = info.size;
                mDecodedVideo[j] = new byte[length];
                decOutputBuffers[decOutputIndex].clear();
                decOutputBuffers[decOutputIndex].get(mDecodedVideo[j], 0, length);
                // Converts the decoded frame to NV21
                convertToNV21(j);
                if (j >= NB_DECODED - 1) {
                    flushMediaCodec(mDecoder);
                    if (VERBOSE)
                        Log.v(TAG, "Decoding " + n + " frames took " + elapsed / 1000 + " ms");
                    return elapsed;
                }
                j++;
            }
            mDecoder.releaseOutputBuffer(decOutputIndex, false);
            n++;
        }
        elapsed = timestamp() - now;
    }
    throw new RuntimeException("The decoder did not decode anything.");
}
Also used : BufferInfo(android.media.MediaCodec.BufferInfo) ByteBuffer(java.nio.ByteBuffer) SuppressLint(android.annotation.SuppressLint)

Example 10 with BufferInfo

use of android.media.MediaCodec.BufferInfo in project libstreaming by fyhertz.

the class EncoderDebugger method searchSPSandPPS.

/**
	 * Tries to obtain the SPS and the PPS for the encoder.
	 */
private long searchSPSandPPS() {
    ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
    BufferInfo info = new BufferInfo();
    byte[] csd = new byte[128];
    int len = 0, p = 4, q = 4;
    long elapsed = 0, now = timestamp();
    while (elapsed < 3000000 && (mSPS == null || mPPS == null)) {
        // Some encoders won't give us the SPS and PPS unless they receive something to encode first...
        int bufferIndex = mEncoder.dequeueInputBuffer(1000000 / FRAMERATE);
        if (bufferIndex >= 0) {
            check(inputBuffers[bufferIndex].capacity() >= mData.length, "The input buffer is not big enough.");
            inputBuffers[bufferIndex].clear();
            inputBuffers[bufferIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE)
                Log.e(TAG, "No buffer available !");
        }
        // We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
        // encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
        // But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...
        int index = mEncoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE);
        if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // The PPS and PPS shoud be there
            MediaFormat format = mEncoder.getOutputFormat();
            ByteBuffer spsb = format.getByteBuffer("csd-0");
            ByteBuffer ppsb = format.getByteBuffer("csd-1");
            mSPS = new byte[spsb.capacity() - 4];
            spsb.position(4);
            spsb.get(mSPS, 0, mSPS.length);
            mPPS = new byte[ppsb.capacity() - 4];
            ppsb.position(4);
            ppsb.get(mPPS, 0, mPPS.length);
            break;
        } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            outputBuffers = mEncoder.getOutputBuffers();
        } else if (index >= 0) {
            len = info.size;
            if (len < 128) {
                outputBuffers[index].get(csd, 0, len);
                if (len > 0 && csd[0] == 0 && csd[1] == 0 && csd[2] == 0 && csd[3] == 1) {
                    //depending on the phone so we don't make any assumption about that
                    while (p < len) {
                        while (!(csd[p + 0] == 0 && csd[p + 1] == 0 && csd[p + 2] == 0 && csd[p + 3] == 1) && p + 3 < len) p++;
                        if (p + 3 >= len)
                            p = len;
                        if ((csd[q] & 0x1F) == 7) {
                            mSPS = new byte[p - q];
                            System.arraycopy(csd, q, mSPS, 0, p - q);
                        } else {
                            mPPS = new byte[p - q];
                            System.arraycopy(csd, q, mPPS, 0, p - q);
                        }
                        p += 4;
                        q = p;
                    }
                }
            }
            mEncoder.releaseOutputBuffer(index, false);
        }
        elapsed = timestamp() - now;
    }
    check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
    mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
    mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);
    return elapsed;
}
Also used : MediaFormat(android.media.MediaFormat) BufferInfo(android.media.MediaCodec.BufferInfo) ByteBuffer(java.nio.ByteBuffer) SuppressLint(android.annotation.SuppressLint)

Aggregations

BufferInfo (android.media.MediaCodec.BufferInfo)14 MediaFormat (android.media.MediaFormat)6 SuppressLint (android.annotation.SuppressLint)5 IOException (java.io.IOException)5 Rect (android.graphics.Rect)4 Surface (android.view.Surface)4 ByteBuffer (java.nio.ByteBuffer)4 MediaCodec (android.media.MediaCodec)1 MediaExtractor (android.media.MediaExtractor)1 ParcelFileDescriptor (android.os.ParcelFileDescriptor)1 CalledByNative (org.chromium.base.CalledByNative)1