Search in sources :

Example 86 with MediaFormat

use of android.media.MediaFormat in project libstreaming by fyhertz.

the class EncoderDebugger method convertToNV21.

/**
 * Converts the image obtained from the decoder to NV21.
 */
private void convertToNV21(int k) {
    byte[] buffer = new byte[3 * mSize / 2];
    int stride = mWidth, sliceHeight = mHeight;
    int colorFormat = mDecoderColorFormat;
    boolean planar = false;
    if (mDecOutputFormat != null) {
        MediaFormat format = mDecOutputFormat;
        if (format != null) {
            if (format.containsKey("slice-height")) {
                sliceHeight = format.getInteger("slice-height");
                if (sliceHeight < mHeight)
                    sliceHeight = mHeight;
            }
            if (format.containsKey("stride")) {
                stride = format.getInteger("stride");
                if (stride < mWidth)
                    stride = mWidth;
            }
            if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT) && format.getInteger(MediaFormat.KEY_COLOR_FORMAT) > 0) {
                colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
            }
        }
    }
    switch(colorFormat) {
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
            planar = false;
            break;
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
            planar = true;
            break;
    }
    for (int i = 0; i < mSize; i++) {
        if (i % mWidth == 0)
            i += stride - mWidth;
        buffer[i] = mDecodedVideo[k][i];
    }
    if (!planar) {
        for (int i = 0, j = 0; j < mSize / 4; i += 1, j += 1) {
            if (i % mWidth / 2 == 0)
                i += (stride - mWidth) / 2;
            buffer[mSize + 2 * j + 1] = mDecodedVideo[k][stride * sliceHeight + 2 * i];
            buffer[mSize + 2 * j] = mDecodedVideo[k][stride * sliceHeight + 2 * i + 1];
        }
    } else {
        for (int i = 0, j = 0; j < mSize / 4; i += 1, j += 1) {
            if (i % mWidth / 2 == 0)
                i += (stride - mWidth) / 2;
            buffer[mSize + 2 * j + 1] = mDecodedVideo[k][stride * sliceHeight + i];
            buffer[mSize + 2 * j] = mDecodedVideo[k][stride * sliceHeight * 5 / 4 + i];
        }
    }
    mDecodedVideo[k] = buffer;
}
Also used : MediaFormat(android.media.MediaFormat) SuppressLint(android.annotation.SuppressLint)

Example 87 with MediaFormat

use of android.media.MediaFormat in project libstreaming by fyhertz.

the class VideoStream method encodeWithMediaCodecMethod2.

/**
 * Video encoding is done by a MediaCodec.
 * But here we will use the buffer-to-surface method
 */
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {
    Log.d(TAG, "Video encoded using the MediaCodec API with a surface");
    // Updates the parameters of the camera if needed
    createCamera();
    updateCamera();
    // Estimates the frame rate of the camera
    measureFramerate();
    EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
    mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    Surface surface = mMediaCodec.createInputSurface();
    ((SurfaceView) mSurfaceView).addMediaCodecSurface(surface);
    mMediaCodec.start();
    // The packetizer encapsulates the bit stream in an RTP stream and send it over the network
    mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
    mPacketizer.start();
    mStreaming = true;
}
Also used : MediaFormat(android.media.MediaFormat) EncoderDebugger(net.majorkernelpanic.streaming.hw.EncoderDebugger) MediaCodecInputStream(net.majorkernelpanic.streaming.rtp.MediaCodecInputStream) SurfaceView(net.majorkernelpanic.streaming.gl.SurfaceView) Surface(android.view.Surface) SuppressLint(android.annotation.SuppressLint)

Aggregations

MediaFormat (android.media.MediaFormat)87 IOException (java.io.IOException)27 ByteBuffer (java.nio.ByteBuffer)18 MediaExtractor (android.media.MediaExtractor)16 MediaCodec (android.media.MediaCodec)15 InputStream (java.io.InputStream)12 TargetApi (android.annotation.TargetApi)11 Context (android.content.Context)10 Handler (android.os.Handler)10 HandlerThread (android.os.HandlerThread)10 Message (android.os.Message)10 Pair (android.util.Pair)10 Runnable (java.lang.Runnable)10 SuppressLint (android.annotation.SuppressLint)9 File (java.io.File)8 BufferInfo (android.media.MediaCodec.BufferInfo)7 FileInputStream (java.io.FileInputStream)7 MediaPlayer (android.media.MediaPlayer)6 Surface (android.view.Surface)6 AssetFileDescriptor (android.content.res.AssetFileDescriptor)5