use of android.media.MediaCodec in project grafika by google.
the class MoviePlayer method play.
/**
* Decodes the video stream, sending frames to the surface.
* <p>
* Does not return until video playback is complete, or we get a "stop" signal from
* frameCallback.
*/
public void play() throws IOException {
MediaExtractor extractor = null;
MediaCodec decoder = null;
// file exists so we can throw a better one if it's not there.
if (!mSourceFile.canRead()) {
throw new FileNotFoundException("Unable to read " + mSourceFile);
}
try {
extractor = new MediaExtractor();
extractor.setDataSource(mSourceFile.toString());
int trackIndex = selectTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + mSourceFile);
}
extractor.selectTrack(trackIndex);
MediaFormat format = extractor.getTrackFormat(trackIndex);
// Create a MediaCodec decoder, and configure it with the MediaFormat from the
// extractor. It's very important to use the format from the extractor because
// it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
String mime = format.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, mOutputSurface, null, 0);
decoder.start();
doExtract(extractor, trackIndex, decoder, mFrameCallback);
} finally {
// release everything we grabbed
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
use of android.media.MediaCodec in project chromeview by pwnall.
the class WebAudioMediaCodecBridge method decodeAudioFile.
@CalledByNative
private static boolean decodeAudioFile(Context ctx, int nativeMediaCodecBridge, int inputFD, long dataSize) {
if (dataSize < 0 || dataSize > 0x7fffffff)
return false;
MediaExtractor extractor = new MediaExtractor();
ParcelFileDescriptor encodedFD;
encodedFD = ParcelFileDescriptor.adoptFd(inputFD);
try {
extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize);
} catch (Exception e) {
e.printStackTrace();
encodedFD.detachFd();
return false;
}
if (extractor.getTrackCount() <= 0) {
encodedFD.detachFd();
return false;
}
MediaFormat format = extractor.getTrackFormat(0);
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
String mime = format.getString(MediaFormat.KEY_MIME);
long durationMicroseconds = 0;
if (format.containsKey(MediaFormat.KEY_DURATION)) {
try {
durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION);
} catch (Exception e) {
Log.d(LOG_TAG, "Cannot get duration");
}
}
if (DEBUG) {
Log.d(LOG_TAG, "Tracks: " + extractor.getTrackCount() + " Rate: " + sampleRate + " Channels: " + channelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec");
}
nativeInitializeDestination(nativeMediaCodecBridge, channelCount, sampleRate, durationMicroseconds);
// Create decoder
MediaCodec codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, /* surface */
null, /* crypto */
0);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
// A track must be selected and will be used to read samples.
extractor.selectTrack(0);
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
// Keep processing until the output is done.
while (!sawOutputEOS) {
if (!sawInputEOS) {
// Input side
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
long presentationTimeMicroSec = 0;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeMicroSec = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufIndex, 0, /* offset */
sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
}
// Output side
MediaCodec.BufferInfo info = new BufferInfo();
final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS);
if (outputBufIndex >= 0) {
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
if (info.size > 0) {
nativeOnChunkDecoded(nativeMediaCodecBridge, buf, info.size);
}
buf.clear();
codec.releaseOutputBuffer(outputBufIndex, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
}
}
encodedFD.detachFd();
codec.stop();
codec.release();
codec = null;
return true;
}
use of android.media.MediaCodec in project platform_frameworks_base by android.
the class AudioTrackDecoder method initMediaCodec.
@Override
protected MediaCodec initMediaCodec(MediaFormat format) {
MediaCodec mediaCodec;
try {
mediaCodec = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
} catch (IOException e) {
throw new RuntimeException("failed to create decoder for " + format.getString(MediaFormat.KEY_MIME), e);
}
mediaCodec.configure(format, null, null, 0);
return mediaCodec;
}
use of android.media.MediaCodec in project platform_frameworks_base by android.
the class CpuVideoTrackDecoder method initMediaCodec.
@Override
protected MediaCodec initMediaCodec(MediaFormat format) {
// Find a codec for our video that can output to one of our supported color-spaces
MediaCodec mediaCodec = findDecoderCodec(format, new int[] { CodecCapabilities.COLOR_Format32bitARGB8888, CodecCapabilities.COLOR_FormatYUV420Planar });
if (mediaCodec == null) {
throw new RuntimeException("Could not find a suitable decoder for format: " + format + "!");
}
mediaCodec.configure(format, null, null, 0);
return mediaCodec;
}
use of android.media.MediaCodec in project platform_frameworks_base by android.
the class GpuVideoTrackDecoder method initMediaCodec.
@Override
protected MediaCodec initMediaCodec(MediaFormat format) {
MediaCodec mediaCodec;
try {
mediaCodec = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
} catch (IOException e) {
throw new RuntimeException("failed to create decoder for " + format.getString(MediaFormat.KEY_MIME), e);
}
Surface surface = new Surface(mSurfaceTexture);
mediaCodec.configure(format, surface, null, 0);
surface.release();
return mediaCodec;
}
Aggregations