use of android.media.MediaCodec.BufferInfo in project chromeview by pwnall.
the class WebAudioMediaCodecBridge method decodeAudioFile.
@CalledByNative
private static boolean decodeAudioFile(Context ctx, int nativeMediaCodecBridge, int inputFD, long dataSize) {
if (dataSize < 0 || dataSize > 0x7fffffff)
return false;
MediaExtractor extractor = new MediaExtractor();
ParcelFileDescriptor encodedFD;
encodedFD = ParcelFileDescriptor.adoptFd(inputFD);
try {
extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize);
} catch (Exception e) {
e.printStackTrace();
encodedFD.detachFd();
return false;
}
if (extractor.getTrackCount() <= 0) {
encodedFD.detachFd();
return false;
}
MediaFormat format = extractor.getTrackFormat(0);
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
String mime = format.getString(MediaFormat.KEY_MIME);
long durationMicroseconds = 0;
if (format.containsKey(MediaFormat.KEY_DURATION)) {
try {
durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION);
} catch (Exception e) {
Log.d(LOG_TAG, "Cannot get duration");
}
}
if (DEBUG) {
Log.d(LOG_TAG, "Tracks: " + extractor.getTrackCount() + " Rate: " + sampleRate + " Channels: " + channelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec");
}
nativeInitializeDestination(nativeMediaCodecBridge, channelCount, sampleRate, durationMicroseconds);
// Create decoder
MediaCodec codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, /* surface */
null, /* crypto */
0);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
// A track must be selected and will be used to read samples.
extractor.selectTrack(0);
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
// Keep processing until the output is done.
while (!sawOutputEOS) {
if (!sawInputEOS) {
// Input side
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
long presentationTimeMicroSec = 0;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeMicroSec = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufIndex, 0, /* offset */
sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
}
// Output side
MediaCodec.BufferInfo info = new BufferInfo();
final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS);
if (outputBufIndex >= 0) {
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
if (info.size > 0) {
nativeOnChunkDecoded(nativeMediaCodecBridge, buf, info.size);
}
buf.clear();
codec.releaseOutputBuffer(outputBufIndex, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
}
}
encodedFD.detachFd();
codec.stop();
codec.release();
codec = null;
return true;
}
use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by DirtyUnicorns.
the class DisplaySinkService method updateSurfaceFromUi.
private void updateSurfaceFromUi(SurfaceHolder holder) {
Surface surface = null;
int width = 0, height = 0;
if (holder != null && !holder.isCreating()) {
surface = holder.getSurface();
if (surface.isValid()) {
final Rect frame = holder.getSurfaceFrame();
width = frame.width();
height = frame.height();
} else {
surface = null;
}
}
synchronized (mSurfaceAndCodecLock) {
if (mSurface == surface && mSurfaceWidth == width && mSurfaceHeight == height) {
return;
}
mSurface = surface;
mSurfaceWidth = width;
mSurfaceHeight = height;
if (mCodec != null) {
mCodec.stop();
mCodec = null;
mCodecInputBuffers = null;
mCodecBufferInfo = null;
}
if (mSurface != null) {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
mCodec = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
throw new RuntimeException("failed to create video/avc decoder", e);
}
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
mCodecBufferInfo = new BufferInfo();
}
mTransportHandler.post(new Runnable() {
@Override
public void run() {
sendSinkStatus();
}
});
}
}
use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by ResurrectionRemix.
the class DisplaySinkService method updateSurfaceFromUi.
private void updateSurfaceFromUi(SurfaceHolder holder) {
Surface surface = null;
int width = 0, height = 0;
if (holder != null && !holder.isCreating()) {
surface = holder.getSurface();
if (surface.isValid()) {
final Rect frame = holder.getSurfaceFrame();
width = frame.width();
height = frame.height();
} else {
surface = null;
}
}
synchronized (mSurfaceAndCodecLock) {
if (mSurface == surface && mSurfaceWidth == width && mSurfaceHeight == height) {
return;
}
mSurface = surface;
mSurfaceWidth = width;
mSurfaceHeight = height;
if (mCodec != null) {
mCodec.stop();
mCodec = null;
mCodecInputBuffers = null;
mCodecBufferInfo = null;
}
if (mSurface != null) {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
mCodec = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
throw new RuntimeException("failed to create video/avc decoder", e);
}
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
mCodecBufferInfo = new BufferInfo();
}
mTransportHandler.post(new Runnable() {
@Override
public void run() {
sendSinkStatus();
}
});
}
}
use of android.media.MediaCodec.BufferInfo in project android_frameworks_base by crdroidandroid.
the class DisplaySinkService method updateSurfaceFromUi.
private void updateSurfaceFromUi(SurfaceHolder holder) {
Surface surface = null;
int width = 0, height = 0;
if (holder != null && !holder.isCreating()) {
surface = holder.getSurface();
if (surface.isValid()) {
final Rect frame = holder.getSurfaceFrame();
width = frame.width();
height = frame.height();
} else {
surface = null;
}
}
synchronized (mSurfaceAndCodecLock) {
if (mSurface == surface && mSurfaceWidth == width && mSurfaceHeight == height) {
return;
}
mSurface = surface;
mSurfaceWidth = width;
mSurfaceHeight = height;
if (mCodec != null) {
mCodec.stop();
mCodec = null;
mCodecInputBuffers = null;
mCodecBufferInfo = null;
}
if (mSurface != null) {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
mCodec = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
throw new RuntimeException("failed to create video/avc decoder", e);
}
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
mCodecBufferInfo = new BufferInfo();
}
mTransportHandler.post(new Runnable() {
@Override
public void run() {
sendSinkStatus();
}
});
}
}
Aggregations