use of android.media.MediaExtractor in project android_frameworks_base by crdroidandroid.
the class Camera2RecordingTest method validateRecording.
private void validateRecording(Size sz, int expectedDurationMs) throws Exception {
File outFile = new File(mOutMediaFileName);
assertTrue("No video is recorded", outFile.exists());
MediaExtractor extractor = new MediaExtractor();
try {
extractor.setDataSource(mOutMediaFileName);
long durationUs = 0;
int width = -1, height = -1;
int numTracks = extractor.getTrackCount();
final String VIDEO_MIME_TYPE = "video";
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.contains(VIDEO_MIME_TYPE)) {
Log.i(TAG, "video format is: " + format.toString());
durationUs = format.getLong(MediaFormat.KEY_DURATION);
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
break;
}
}
Size videoSz = new Size(width, height);
assertTrue("Video size doesn't match, expected " + sz.toString() + " got " + videoSz.toString(), videoSz.equals(sz));
int duration = (int) (durationUs / 1000);
if (VERBOSE) {
Log.v(TAG, String.format("Video duration: recorded %dms, expected %dms", duration, expectedDurationMs));
}
// TODO: Don't skip this for video snapshot
if (!mStaticInfo.isHardwareLevelLegacy()) {
assertTrue(String.format("Camera %s: Video duration doesn't match: recorded %dms, expected %dms.", mCamera.getId(), duration, expectedDurationMs), Math.abs(duration - expectedDurationMs) < DURATION_MARGIN * expectedDurationMs);
}
} finally {
extractor.release();
if (!DEBUG_DUMP) {
outFile.delete();
}
}
}
use of android.media.MediaExtractor in project android_frameworks_base by crdroidandroid.
the class MediaDecoder method onStart.
private void onStart() throws Exception {
if (mOpenGLEnabled) {
getRenderTarget().focus();
}
mMediaExtractor = new MediaExtractor();
mMediaExtractor.setDataSource(mContext, mUri, null);
mVideoTrackIndex = -1;
mAudioTrackIndex = -1;
for (int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
MediaFormat format = mMediaExtractor.getTrackFormat(i);
if (DEBUG) {
Log.i(LOG_TAG, "Uri " + mUri + ", track " + i + ": " + format);
}
if (DecoderUtil.isVideoFormat(format) && mVideoTrackIndex == -1) {
mVideoTrackIndex = i;
} else if (DecoderUtil.isAudioFormat(format) && mAudioTrackIndex == -1) {
mAudioTrackIndex = i;
}
}
if (mVideoTrackIndex == -1 && mAudioTrackIndex == -1) {
throw new IllegalArgumentException("Couldn't find a video or audio track in the provided file");
}
if (mVideoTrackIndex != -1) {
MediaFormat videoFormat = mMediaExtractor.getTrackFormat(mVideoTrackIndex);
mVideoTrackDecoder = mOpenGLEnabled ? new GpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this) : new CpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this);
mVideoTrackDecoder.init();
mMediaExtractor.selectTrack(mVideoTrackIndex);
if (Build.VERSION.SDK_INT >= 17) {
retrieveDefaultRotation();
}
}
if (mAudioTrackIndex != -1) {
MediaFormat audioFormat = mMediaExtractor.getTrackFormat(mAudioTrackIndex);
mAudioTrackDecoder = new AudioTrackDecoder(mAudioTrackIndex, audioFormat, this);
mAudioTrackDecoder.init();
mMediaExtractor.selectTrack(mAudioTrackIndex);
}
if (mStartMicros > 0) {
mMediaExtractor.seekTo(mStartMicros, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
}
mStarted = true;
mListener.onDecodingStarted();
}
use of android.media.MediaExtractor in project Camera-Roll-Android-App by kollerlukas.
the class Video method retrieveFrameRate.
public int retrieveFrameRate() {
MediaExtractor extractor = new MediaExtractor();
int frameRate = -1;
try {
// Adjust data source as per the requirement if file, URI, etc.
extractor.setDataSource(getPath());
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
if (format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
// Release stuff
extractor.release();
}
return frameRate;
}
use of android.media.MediaExtractor in project ExoPlayer by google.
the class FrameEditorDataProcessingTest method setUpAndPrepareFirstFrame.
private void setUpAndPrepareFirstFrame(Matrix transformationMatrix) throws Exception {
// Set up the extractor to read the first video frame and get its format.
MediaExtractor mediaExtractor = new MediaExtractor();
@Nullable MediaCodec mediaCodec = null;
try (AssetFileDescriptor afd = getApplicationContext().getAssets().openFd(INPUT_MP4_ASSET_STRING)) {
mediaExtractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
for (int i = 0; i < mediaExtractor.getTrackCount(); i++) {
if (MimeTypes.isVideo(mediaExtractor.getTrackFormat(i).getString(MediaFormat.KEY_MIME))) {
mediaFormat = mediaExtractor.getTrackFormat(i);
mediaExtractor.selectTrack(i);
break;
}
}
int width = checkNotNull(mediaFormat).getInteger(MediaFormat.KEY_WIDTH);
int height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
frameEditorOutputImageReader = ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, /* maxImages= */
1);
frameEditor = FrameEditor.create(getApplicationContext(), width, height, PIXEL_WIDTH_HEIGHT_RATIO, transformationMatrix, frameEditorOutputImageReader.getSurface(), /* enableExperimentalHdrEditing= */
false, Transformer.DebugViewProvider.NONE);
frameEditor.registerInputFrame();
// Queue the first video frame from the extractor.
String mimeType = checkNotNull(mediaFormat.getString(MediaFormat.KEY_MIME));
mediaCodec = MediaCodec.createDecoderByType(mimeType);
mediaCodec.configure(mediaFormat, frameEditor.getInputSurface(), /* crypto= */
null, /* flags= */
0);
mediaCodec.start();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
assertThat(inputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
ByteBuffer inputBuffer = checkNotNull(mediaCodec.getInputBuffers()[inputBufferIndex]);
int sampleSize = mediaExtractor.readSampleData(inputBuffer, /* offset= */
0);
mediaCodec.queueInputBuffer(inputBufferIndex, /* offset= */
0, sampleSize, mediaExtractor.getSampleTime(), mediaExtractor.getSampleFlags());
// Queue an end-of-stream buffer to force the codec to produce output.
inputBufferIndex = mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
assertThat(inputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
mediaCodec.queueInputBuffer(inputBufferIndex, /* offset= */
0, /* size= */
0, /* presentationTimeUs= */
0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
// Dequeue and render the output video frame.
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex;
do {
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, DEQUEUE_TIMEOUT_US);
assertThat(outputBufferIndex).isNotEqualTo(MediaCodec.INFO_TRY_AGAIN_LATER);
} while (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED);
mediaCodec.releaseOutputBuffer(outputBufferIndex, /* render= */
true);
// Sleep to give time for the surface texture to be populated.
Thread.sleep(SURFACE_WAIT_MS);
assertThat(frameEditor.canProcessData()).isTrue();
} finally {
mediaExtractor.release();
if (mediaCodec != null) {
mediaCodec.release();
}
}
}
use of android.media.MediaExtractor in project Signal-Android by WhisperSystems.
the class AudioWaveForm method generateWaveForm.
/**
* Based on decode sample from:
* <p>
* https://android.googlesource.com/platform/cts/+/jb-mr2-release/tests/tests/media/src/android/media/cts/DecoderTest.java
*/
@WorkerThread
@RequiresApi(api = 23)
@NonNull
private AudioFileInfo generateWaveForm(@NonNull Uri uri) throws IOException {
try (MediaInput dataSource = DecryptableUriMediaInput.createForUri(context, uri)) {
long[] wave = new long[BAR_COUNT];
int[] waveSamples = new int[BAR_COUNT];
MediaExtractor extractor = dataSource.createExtractor();
if (extractor.getTrackCount() == 0) {
throw new IOException("No audio track");
}
MediaFormat format = extractor.getTrackFormat(0);
if (!format.containsKey(MediaFormat.KEY_DURATION)) {
throw new IOException("Unknown duration");
}
long totalDurationUs = format.getLong(MediaFormat.KEY_DURATION);
String mime = format.getString(MediaFormat.KEY_MIME);
if (!mime.startsWith("audio/")) {
throw new IOException("Mime not audio");
}
MediaCodec codec = MediaCodec.createDecoderByType(mime);
if (totalDurationUs == 0) {
throw new IOException("Zero duration");
}
codec.configure(format, null, null, 0);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
extractor.selectTrack(0);
long kTimeOutUs = 5000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
int noOutputCounter = 0;
while (!sawOutputEOS && noOutputCounter < 50) {
noOutputCounter++;
if (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
long presentationTimeUs = 0;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufIndex, 0, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
int barSampleIndex = (int) (SAMPLES_PER_BAR * (wave.length * extractor.getSampleTime()) / totalDurationUs);
sawInputEOS = !extractor.advance();
int nextBarSampleIndex = (int) (SAMPLES_PER_BAR * (wave.length * extractor.getSampleTime()) / totalDurationUs);
while (!sawInputEOS && nextBarSampleIndex == barSampleIndex) {
sawInputEOS = !extractor.advance();
if (!sawInputEOS) {
nextBarSampleIndex = (int) (SAMPLES_PER_BAR * (wave.length * extractor.getSampleTime()) / totalDurationUs);
}
}
}
}
}
int outputBufferIndex;
do {
outputBufferIndex = codec.dequeueOutputBuffer(info, kTimeOutUs);
if (outputBufferIndex >= 0) {
if (info.size > 0) {
noOutputCounter = 0;
}
ByteBuffer buf = codecOutputBuffers[outputBufferIndex];
int barIndex = (int) ((wave.length * info.presentationTimeUs) / totalDurationUs);
long total = 0;
for (int i = 0; i < info.size; i += 2 * 4) {
short aShort = buf.getShort(i);
total += Math.abs(aShort);
}
if (barIndex >= 0 && barIndex < wave.length) {
wave[barIndex] += total;
waveSamples[barIndex] += info.size / 2;
}
codec.releaseOutputBuffer(outputBufferIndex, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.d(TAG, "output format has changed to " + codec.getOutputFormat());
}
} while (outputBufferIndex >= 0);
}
codec.stop();
codec.release();
extractor.release();
float[] floats = new float[BAR_COUNT];
byte[] bytes = new byte[BAR_COUNT];
float max = 0;
for (int i = 0; i < BAR_COUNT; i++) {
if (waveSamples[i] == 0)
continue;
floats[i] = wave[i] / (float) waveSamples[i];
if (floats[i] > max) {
max = floats[i];
}
}
for (int i = 0; i < BAR_COUNT; i++) {
float normalized = floats[i] / max;
bytes[i] = (byte) (255 * normalized);
}
return new AudioFileInfo(totalDurationUs, bytes);
}
}
Aggregations