use of android.media.MediaFormat in project libstreaming by fyhertz.
the class EncoderDebugger method convertToNV21.
/**
* Converts the image obtained from the decoder to NV21.
*/
private void convertToNV21(int k) {
byte[] buffer = new byte[3 * mSize / 2];
int stride = mWidth, sliceHeight = mHeight;
int colorFormat = mDecoderColorFormat;
boolean planar = false;
if (mDecOutputFormat != null) {
MediaFormat format = mDecOutputFormat;
if (format != null) {
if (format.containsKey("slice-height")) {
sliceHeight = format.getInteger("slice-height");
if (sliceHeight < mHeight)
sliceHeight = mHeight;
}
if (format.containsKey("stride")) {
stride = format.getInteger("stride");
if (stride < mWidth)
stride = mWidth;
}
if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT) && format.getInteger(MediaFormat.KEY_COLOR_FORMAT) > 0) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
}
}
}
switch(colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
planar = false;
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
planar = true;
break;
}
for (int i = 0; i < mSize; i++) {
if (i % mWidth == 0)
i += stride - mWidth;
buffer[i] = mDecodedVideo[k][i];
}
if (!planar) {
for (int i = 0, j = 0; j < mSize / 4; i += 1, j += 1) {
if (i % mWidth / 2 == 0)
i += (stride - mWidth) / 2;
buffer[mSize + 2 * j + 1] = mDecodedVideo[k][stride * sliceHeight + 2 * i];
buffer[mSize + 2 * j] = mDecodedVideo[k][stride * sliceHeight + 2 * i + 1];
}
} else {
for (int i = 0, j = 0; j < mSize / 4; i += 1, j += 1) {
if (i % mWidth / 2 == 0)
i += (stride - mWidth) / 2;
buffer[mSize + 2 * j + 1] = mDecodedVideo[k][stride * sliceHeight + i];
buffer[mSize + 2 * j] = mDecodedVideo[k][stride * sliceHeight * 5 / 4 + i];
}
}
mDecodedVideo[k] = buffer;
}
use of android.media.MediaFormat in project libstreaming by fyhertz.
the class VideoStream method encodeWithMediaCodecMethod2.
/**
* Video encoding is done by a MediaCodec.
* But here we will use the buffer-to-surface method
*/
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {
Log.d(TAG, "Video encoded using the MediaCodec API with a surface");
// Updates the parameters of the camera if needed
createCamera();
updateCamera();
// Estimates the frame rate of the camera
measureFramerate();
EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface surface = mMediaCodec.createInputSurface();
((SurfaceView) mSurfaceView).addMediaCodecSurface(surface);
mMediaCodec.start();
// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
mPacketizer.start();
mStreaming = true;
}
Aggregations