use of android.media.MediaExtractor in project android_frameworks_base by ResurrectionRemix.
the class MediaDecoder method onStart.
private void onStart() throws Exception {
if (mOpenGLEnabled) {
getRenderTarget().focus();
}
mMediaExtractor = new MediaExtractor();
mMediaExtractor.setDataSource(mContext, mUri, null);
mVideoTrackIndex = -1;
mAudioTrackIndex = -1;
for (int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
MediaFormat format = mMediaExtractor.getTrackFormat(i);
if (DEBUG) {
Log.i(LOG_TAG, "Uri " + mUri + ", track " + i + ": " + format);
}
if (DecoderUtil.isVideoFormat(format) && mVideoTrackIndex == -1) {
mVideoTrackIndex = i;
} else if (DecoderUtil.isAudioFormat(format) && mAudioTrackIndex == -1) {
mAudioTrackIndex = i;
}
}
if (mVideoTrackIndex == -1 && mAudioTrackIndex == -1) {
throw new IllegalArgumentException("Couldn't find a video or audio track in the provided file");
}
if (mVideoTrackIndex != -1) {
MediaFormat videoFormat = mMediaExtractor.getTrackFormat(mVideoTrackIndex);
mVideoTrackDecoder = mOpenGLEnabled ? new GpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this) : new CpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this);
mVideoTrackDecoder.init();
mMediaExtractor.selectTrack(mVideoTrackIndex);
if (Build.VERSION.SDK_INT >= 17) {
retrieveDefaultRotation();
}
}
if (mAudioTrackIndex != -1) {
MediaFormat audioFormat = mMediaExtractor.getTrackFormat(mAudioTrackIndex);
mAudioTrackDecoder = new AudioTrackDecoder(mAudioTrackIndex, audioFormat, this);
mAudioTrackDecoder.init();
mMediaExtractor.selectTrack(mAudioTrackIndex);
}
if (mStartMicros > 0) {
mMediaExtractor.seekTo(mStartMicros, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
}
mStarted = true;
mListener.onDecodingStarted();
}
use of android.media.MediaExtractor in project grafika by google.
the class MoviePlayer method play.
/**
* Decodes the video stream, sending frames to the surface.
* <p>
* Does not return until video playback is complete, or we get a "stop" signal from
* frameCallback.
*/
public void play() throws IOException {
MediaExtractor extractor = null;
MediaCodec decoder = null;
// file exists so we can throw a better one if it's not there.
if (!mSourceFile.canRead()) {
throw new FileNotFoundException("Unable to read " + mSourceFile);
}
try {
extractor = new MediaExtractor();
extractor.setDataSource(mSourceFile.toString());
int trackIndex = selectTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + mSourceFile);
}
extractor.selectTrack(trackIndex);
MediaFormat format = extractor.getTrackFormat(trackIndex);
// Create a MediaCodec decoder, and configure it with the MediaFormat from the
// extractor. It's very important to use the format from the extractor because
// it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
String mime = format.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, mOutputSurface, null, 0);
decoder.start();
doExtract(extractor, trackIndex, decoder, mFrameCallback);
} finally {
// release everything we grabbed
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}
}
use of android.media.MediaExtractor in project platform_frameworks_base by android.
the class MediaDecoder method onStart.
private void onStart() throws Exception {
if (mOpenGLEnabled) {
getRenderTarget().focus();
}
mMediaExtractor = new MediaExtractor();
mMediaExtractor.setDataSource(mContext, mUri, null);
mVideoTrackIndex = -1;
mAudioTrackIndex = -1;
for (int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
MediaFormat format = mMediaExtractor.getTrackFormat(i);
if (DEBUG) {
Log.i(LOG_TAG, "Uri " + mUri + ", track " + i + ": " + format);
}
if (DecoderUtil.isVideoFormat(format) && mVideoTrackIndex == -1) {
mVideoTrackIndex = i;
} else if (DecoderUtil.isAudioFormat(format) && mAudioTrackIndex == -1) {
mAudioTrackIndex = i;
}
}
if (mVideoTrackIndex == -1 && mAudioTrackIndex == -1) {
throw new IllegalArgumentException("Couldn't find a video or audio track in the provided file");
}
if (mVideoTrackIndex != -1) {
MediaFormat videoFormat = mMediaExtractor.getTrackFormat(mVideoTrackIndex);
mVideoTrackDecoder = mOpenGLEnabled ? new GpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this) : new CpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this);
mVideoTrackDecoder.init();
mMediaExtractor.selectTrack(mVideoTrackIndex);
if (Build.VERSION.SDK_INT >= 17) {
retrieveDefaultRotation();
}
}
if (mAudioTrackIndex != -1) {
MediaFormat audioFormat = mMediaExtractor.getTrackFormat(mAudioTrackIndex);
mAudioTrackDecoder = new AudioTrackDecoder(mAudioTrackIndex, audioFormat, this);
mAudioTrackDecoder.init();
mMediaExtractor.selectTrack(mAudioTrackIndex);
}
if (mStartMicros > 0) {
mMediaExtractor.seekTo(mStartMicros, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
}
mStarted = true;
mListener.onDecodingStarted();
}
use of android.media.MediaExtractor in project chromeview by pwnall.
the class WebAudioMediaCodecBridge method decodeAudioFile.
@CalledByNative
private static boolean decodeAudioFile(Context ctx, int nativeMediaCodecBridge, int inputFD, long dataSize) {
if (dataSize < 0 || dataSize > 0x7fffffff)
return false;
MediaExtractor extractor = new MediaExtractor();
ParcelFileDescriptor encodedFD;
encodedFD = ParcelFileDescriptor.adoptFd(inputFD);
try {
extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize);
} catch (Exception e) {
e.printStackTrace();
encodedFD.detachFd();
return false;
}
if (extractor.getTrackCount() <= 0) {
encodedFD.detachFd();
return false;
}
MediaFormat format = extractor.getTrackFormat(0);
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
String mime = format.getString(MediaFormat.KEY_MIME);
long durationMicroseconds = 0;
if (format.containsKey(MediaFormat.KEY_DURATION)) {
try {
durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION);
} catch (Exception e) {
Log.d(LOG_TAG, "Cannot get duration");
}
}
if (DEBUG) {
Log.d(LOG_TAG, "Tracks: " + extractor.getTrackCount() + " Rate: " + sampleRate + " Channels: " + channelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec");
}
nativeInitializeDestination(nativeMediaCodecBridge, channelCount, sampleRate, durationMicroseconds);
// Create decoder
MediaCodec codec = MediaCodec.createDecoderByType(mime);
codec.configure(format, null, /* surface */
null, /* crypto */
0);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
// A track must be selected and will be used to read samples.
extractor.selectTrack(0);
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
// Keep processing until the output is done.
while (!sawOutputEOS) {
if (!sawInputEOS) {
// Input side
int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
long presentationTimeMicroSec = 0;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeMicroSec = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufIndex, 0, /* offset */
sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
}
// Output side
MediaCodec.BufferInfo info = new BufferInfo();
final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS);
if (outputBufIndex >= 0) {
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
if (info.size > 0) {
nativeOnChunkDecoded(nativeMediaCodecBridge, buf, info.size);
}
buf.clear();
codec.releaseOutputBuffer(outputBufIndex, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
}
}
encodedFD.detachFd();
codec.stop();
codec.release();
codec = null;
return true;
}
use of android.media.MediaExtractor in project platform_frameworks_base by android.
the class Camera2RecordingTest method validateRecording.
private void validateRecording(Size sz, int expectedDurationMs) throws Exception {
File outFile = new File(mOutMediaFileName);
assertTrue("No video is recorded", outFile.exists());
MediaExtractor extractor = new MediaExtractor();
try {
extractor.setDataSource(mOutMediaFileName);
long durationUs = 0;
int width = -1, height = -1;
int numTracks = extractor.getTrackCount();
final String VIDEO_MIME_TYPE = "video";
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.contains(VIDEO_MIME_TYPE)) {
Log.i(TAG, "video format is: " + format.toString());
durationUs = format.getLong(MediaFormat.KEY_DURATION);
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
break;
}
}
Size videoSz = new Size(width, height);
assertTrue("Video size doesn't match, expected " + sz.toString() + " got " + videoSz.toString(), videoSz.equals(sz));
int duration = (int) (durationUs / 1000);
if (VERBOSE) {
Log.v(TAG, String.format("Video duration: recorded %dms, expected %dms", duration, expectedDurationMs));
}
// TODO: Don't skip this for video snapshot
if (!mStaticInfo.isHardwareLevelLegacy()) {
assertTrue(String.format("Camera %s: Video duration doesn't match: recorded %dms, expected %dms.", mCamera.getId(), duration, expectedDurationMs), Math.abs(duration - expectedDurationMs) < DURATION_MARGIN * expectedDurationMs);
}
} finally {
extractor.release();
if (!DEBUG_DUMP) {
outFile.delete();
}
}
}
Aggregations