use of android.media.MediaCodec.BufferInfo in project robolectric by robolectric.
the class ShadowMediaCodecTest method inSyncMode_flushDiscardsQueuedInputBuffer.
@Test
public void inSyncMode_flushDiscardsQueuedInputBuffer() throws IOException {
MediaCodec codec = createSyncEncoder();
// Dequeue the output format
codec.dequeueOutputBuffer(new BufferInfo(), /* timeoutUs= */
0);
int inputBufferIndex = codec.dequeueInputBuffer(/* timeoutUs= */
0);
codec.getInputBuffer(inputBufferIndex).put(generateByteArray(/* size= */
128));
codec.queueInputBuffer(inputBufferIndex, /* offset= */
0, /* size= */
128, /* presentationTimeUs= */
123456, /* flags= */
0);
codec.flush();
assertThat(codec.dequeueOutputBuffer(new BufferInfo(), /* timeoutUs= */
0)).isEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
assertThat(codec.dequeueInputBuffer(/* timeoutUs= */
0)).isEqualTo(inputBufferIndex);
assertThat(codec.getInputBuffer(inputBufferIndex).position()).isEqualTo(0);
}
use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by DirtyUnicorns.
the class DisplaySinkService method updateSurfaceFromUi.
private void updateSurfaceFromUi(SurfaceHolder holder) {
Surface surface = null;
int width = 0, height = 0;
if (holder != null && !holder.isCreating()) {
surface = holder.getSurface();
if (surface.isValid()) {
final Rect frame = holder.getSurfaceFrame();
width = frame.width();
height = frame.height();
} else {
surface = null;
}
}
synchronized (mSurfaceAndCodecLock) {
if (mSurface == surface && mSurfaceWidth == width && mSurfaceHeight == height) {
return;
}
mSurface = surface;
mSurfaceWidth = width;
mSurfaceHeight = height;
if (mCodec != null) {
mCodec.stop();
mCodec = null;
mCodecInputBuffers = null;
mCodecBufferInfo = null;
}
if (mSurface != null) {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
mCodec = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
throw new RuntimeException("failed to create video/avc decoder", e);
}
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
mCodecBufferInfo = new BufferInfo();
}
mTransportHandler.post(new Runnable() {
@Override
public void run() {
sendSinkStatus();
}
});
}
}
use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by ResurrectionRemix.
the class DisplaySinkService method updateSurfaceFromUi.
private void updateSurfaceFromUi(SurfaceHolder holder) {
Surface surface = null;
int width = 0, height = 0;
if (holder != null && !holder.isCreating()) {
surface = holder.getSurface();
if (surface.isValid()) {
final Rect frame = holder.getSurfaceFrame();
width = frame.width();
height = frame.height();
} else {
surface = null;
}
}
synchronized (mSurfaceAndCodecLock) {
if (mSurface == surface && mSurfaceWidth == width && mSurfaceHeight == height) {
return;
}
mSurface = surface;
mSurfaceWidth = width;
mSurfaceHeight = height;
if (mCodec != null) {
mCodec.stop();
mCodec = null;
mCodecInputBuffers = null;
mCodecBufferInfo = null;
}
if (mSurface != null) {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
mCodec = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
throw new RuntimeException("failed to create video/avc decoder", e);
}
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
mCodecBufferInfo = new BufferInfo();
}
mTransportHandler.post(new Runnable() {
@Override
public void run() {
sendSinkStatus();
}
});
}
}
use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by crdroidandroid.
the class DisplaySinkService method updateSurfaceFromUi.
private void updateSurfaceFromUi(SurfaceHolder holder) {
Surface surface = null;
int width = 0, height = 0;
if (holder != null && !holder.isCreating()) {
surface = holder.getSurface();
if (surface.isValid()) {
final Rect frame = holder.getSurfaceFrame();
width = frame.width();
height = frame.height();
} else {
surface = null;
}
}
synchronized (mSurfaceAndCodecLock) {
if (mSurface == surface && mSurfaceWidth == width && mSurfaceHeight == height) {
return;
}
mSurface = surface;
mSurfaceWidth = width;
mSurfaceHeight = height;
if (mCodec != null) {
mCodec.stop();
mCodec = null;
mCodecInputBuffers = null;
mCodecBufferInfo = null;
}
if (mSurface != null) {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
mCodec = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
throw new RuntimeException("failed to create video/avc decoder", e);
}
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
mCodecBufferInfo = new BufferInfo();
}
mTransportHandler.post(new Runnable() {
@Override
public void run() {
sendSinkStatus();
}
});
}
}
use of android.media.MediaCodec.BufferInfo in project libstreaming by fyhertz.
the class EncoderDebugger method decode.
/**
* @param withPrefix If set to true, the decoder will be fed with NALs preceeded with 0x00000001.
* @return How long it took to decode all the NALs
*/
private long decode(boolean withPrefix) {
int n = 0, i = 0, j = 0;
long elapsed = 0, now = timestamp();
int decInputIndex = 0, decOutputIndex = 0;
ByteBuffer[] decInputBuffers = mDecoder.getInputBuffers();
ByteBuffer[] decOutputBuffers = mDecoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
while (elapsed < 3000000) {
// Feeds the decoder with a NAL unit
if (i < NB_ENCODED) {
decInputIndex = mDecoder.dequeueInputBuffer(1000000 / FRAMERATE);
if (decInputIndex >= 0) {
int l1 = decInputBuffers[decInputIndex].capacity();
int l2 = mVideo[i].length;
decInputBuffers[decInputIndex].clear();
if ((withPrefix && hasPrefix(mVideo[i])) || (!withPrefix && !hasPrefix(mVideo[i]))) {
check(l1 >= l2, "The decoder input buffer is not big enough (nal=" + l2 + ", capacity=" + l1 + ").");
decInputBuffers[decInputIndex].put(mVideo[i], 0, mVideo[i].length);
} else if (withPrefix && !hasPrefix(mVideo[i])) {
check(l1 >= l2 + 4, "The decoder input buffer is not big enough (nal=" + (l2 + 4) + ", capacity=" + l1 + ").");
decInputBuffers[decInputIndex].put(new byte[] { 0, 0, 0, 1 });
decInputBuffers[decInputIndex].put(mVideo[i], 0, mVideo[i].length);
} else if (!withPrefix && hasPrefix(mVideo[i])) {
check(l1 >= l2 - 4, "The decoder input buffer is not big enough (nal=" + (l2 - 4) + ", capacity=" + l1 + ").");
decInputBuffers[decInputIndex].put(mVideo[i], 4, mVideo[i].length - 4);
}
mDecoder.queueInputBuffer(decInputIndex, 0, l2, timestamp(), 0);
i++;
} else {
if (VERBOSE)
Log.d(TAG, "No buffer available !");
}
}
// Tries to get a decoded image
decOutputIndex = mDecoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE);
if (decOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
decOutputBuffers = mDecoder.getOutputBuffers();
} else if (decOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mDecOutputFormat = mDecoder.getOutputFormat();
} else if (decOutputIndex >= 0) {
if (n > 2) {
// We have successfully encoded and decoded an image !
int length = info.size;
mDecodedVideo[j] = new byte[length];
decOutputBuffers[decOutputIndex].clear();
decOutputBuffers[decOutputIndex].get(mDecodedVideo[j], 0, length);
// Converts the decoded frame to NV21
convertToNV21(j);
if (j >= NB_DECODED - 1) {
flushMediaCodec(mDecoder);
if (VERBOSE)
Log.v(TAG, "Decoding " + n + " frames took " + elapsed / 1000 + " ms");
return elapsed;
}
j++;
}
mDecoder.releaseOutputBuffer(decOutputIndex, false);
n++;
}
elapsed = timestamp() - now;
}
throw new RuntimeException("The decoder did not decode anything.");
}
Aggregations