use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorPreviewTest method testPreviewDuringExport.
/**
*To test Preview : Export during preview
*/
@LargeTest
public void testPreviewDuringExport() throws Exception {
final String videoItemFileName = INPUT_FILE_PATH + "H264_BP_640x480_30fps_192kbps_1_5.mp4";
final Semaphore blockTillPreviewCompletes = new Semaphore(1);
previewStart = false;
previewStop = false;
previewError = false;
final MediaVideoItem mediaVideoItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFileName, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem1.setExtractBoundaries(0, 20000);
mVideoEditor.addMediaItem(mediaVideoItem1);
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
long waitingTime = minWaitingTime + mVideoEditor.getDuration();
blockTillPreviewCompletes.acquire();
final String fileName = mVideoEditor.getPath() + "/test.3gp";
final int height = MediaProperties.HEIGHT_480;
final int bitrate = MediaProperties.BITRATE_512K;
try {
mVideoEditor.export(fileName, height, bitrate, new ExportProgressListener() {
public void onProgress(VideoEditor ve, String outFileName, int progress) {
}
});
} catch (IOException e) {
assertTrue("UnExpected Error in Export" + e.toString(), false);
}
final SurfaceHolder surfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
try {
mVideoEditor.startPreview(surfaceHolder, 5000, -1, false, 1, new PreviewProgressListener() {
public void onProgress(VideoEditor videoEditor, long timeMs, OverlayData overlayData) {
}
public void onStart(VideoEditor videoEditor) {
setPreviewStart();
}
public void onStop(VideoEditor videoEditor) {
setPreviewStop();
blockTillPreviewCompletes.release();
}
public void onError(VideoEditor videoEditor, int error) {
setPreviewError();
blockTillPreviewCompletes.release();
}
});
} catch (Exception e) {
blockTillPreviewCompletes.release();
}
blockTillPreviewCompletes.tryAcquire(waitingTime, TimeUnit.MILLISECONDS);
mVideoEditor.stopPreview();
assertTrue("Preview Failed to start", previewStart);
assertTrue("Preview Failed to stop", previewStop);
assertFalse("Preview Error occurred", previewError);
blockTillPreviewCompletes.release();
}
use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorPreviewTest method testPreviewWithRenderPreviewFrame.
/**
* To test Preview : Preview of current Effects applied (with Render Preview
* Frame)
*/
@LargeTest
public void testPreviewWithRenderPreviewFrame() throws Exception {
final String videoItemFileName = INPUT_FILE_PATH + "H264_BP_640x480_30fps_256kbps_1_17.mp4";
final Semaphore blockTillPreviewCompletes = new Semaphore(1);
boolean flagForException = false;
OverlayData overlayData1 = new OverlayData();
previewStart = false;
previewStop = false;
final String overlayFilename1 = INPUT_FILE_PATH + "IMG_640x480_Overlay1.png";
final MediaVideoItem mediaVideoItem = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFileName, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaVideoItem);
final EffectColor effectPink = mVideoEditorHelper.createEffectItem(mediaVideoItem, "effectNegativeOnMvi", 1000, 3000, EffectColor.TYPE_COLOR, EffectColor.PINK);
mediaVideoItem.addEffect(effectPink);
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
final SurfaceHolder surfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
assertEquals("Render preview Frame at 5 Sec", 5000, mVideoEditor.renderPreviewFrame(surfaceHolder, 5000, overlayData1));
assertEquals("Render preview Frame at 7 Sec", 7000, mVideoEditor.renderPreviewFrame(surfaceHolder, 7000, overlayData1));
validatePreviewProgress(5000, -1, false, mVideoEditor.getDuration());
}
use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorStressTest method testStressAudioTrackVideo.
/**
* To test the performance : With an audio track with Video
*
* @throws Exception
*/
@LargeTest
public void testStressAudioTrackVideo() throws Exception {
final String videoItemFileName1 = INPUT_FILE_PATH + "H264_BP_1080x720_30fps_800kbps_1_17.mp4";
final String audioFilename1 = INPUT_FILE_PATH + "AACLC_44.1kHz_256kbps_s_1_17.mp4";
final String audioFilename2 = INPUT_FILE_PATH + "AMRNB_8KHz_12.2Kbps_m_1_17.3gp";
final int renderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
final int audioVolume = 50;
final String[] loggingInfo = new String[1];
int i = 1;
writeTestCaseHeader("testStressAudioTrackVideo");
getBeginMemory();
final MediaVideoItem mediaVideoItem = new MediaVideoItem(mVideoEditor, "mediaItem1", videoItemFileName1, renderingMode);
mVideoEditor.addMediaItem(mediaVideoItem);
final AudioTrack audioTrack1 = new AudioTrack(mVideoEditor, "Audio Track1", audioFilename1);
audioTrack1.disableDucking();
audioTrack1.setVolume(audioVolume);
mVideoEditor.addAudioTrack(audioTrack1);
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
mVideoEditor.removeAudioTrack("Audio Track1");
final AudioTrack audioTrack2 = new AudioTrack(mVideoEditor, "Audio Track2", audioFilename2);
audioTrack2.enableLoop();
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
getAfterMemory_updateLog(loggingInfo, false, i);
/** Remove items and check for memory leak if any */
mVideoEditor.removeMediaItem("mediaItem1");
getAfterMemory_updateLog(loggingInfo, true, i);
}
use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testAudioTrackWaveFormData.
/**
* To Test the Audio Track API:Extract waveform data
*/
@LargeTest
public void testAudioTrackWaveFormData() throws Exception {
/** Image item is added as dummy as Audio track cannot be added without
* a media item in the story board
*/
final String imageItemFileName = INPUT_FILE_PATH + "IMG_640x480.jpg";
final int imageItemRenderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
final MediaImageItem mediaImageItem = mVideoEditorHelper.createMediaItem(mVideoEditor, "mediaImageItem1", imageItemFileName, 5000, imageItemRenderingMode);
mVideoEditor.addMediaItem(mediaImageItem);
final String audioFileName = INPUT_FILE_PATH + "AACLC_48KHz_256Kbps_s_1_17.3gp";
final AudioTrack audioTrack = mVideoEditorHelper.createAudio(mVideoEditor, "audioTrack", audioFileName);
mVideoEditor.addAudioTrack(audioTrack);
assertNull("WaveForm data", audioTrack.getWaveformData());
final int[] progressUpdate = new int[105];
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
int i = 0;
public void onProgress(Object item, int action, int progress) {
progressUpdate[i++] = progress;
}
});
final int[] progressWaveform = new int[105];
audioTrack.extractAudioWaveform(new ExtractAudioWaveformProgressListener() {
int i = 0;
public void onProgress(int progress) {
Log.i("AudioWaveformData", "progress=" + progress);
progressWaveform[i++] = progress;
}
});
assertTrue("Progress of WaveForm data", mVideoEditorHelper.checkProgressCBValues(progressWaveform));
assertNotNull("WaveForm data", audioTrack.getWaveformData());
assertTrue("WaveForm Frame Duration", (audioTrack.getWaveformData().getFrameDuration() > 0 ? true : false));
assertTrue("WaveForm Frame Count", (audioTrack.getWaveformData().getFramesCount() > 0 ? true : false));
assertTrue("WaveForm Gain", (audioTrack.getWaveformData().getFrameGains().length > 0 ? true : false));
}
use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorExportTest method testExportMergeTrim.
/**
* To Test export : Merge and Trim different types of Video and Image files
*/
@LargeTest
public void testExportMergeTrim() throws Exception {
final String videoItemFilename1 = INPUT_FILE_PATH + "H263_profile0_176x144_15fps_256kbps_AACLC_32kHz_128kbps_s_0_26.3gp";
final String imageItemFilename1 = INPUT_FILE_PATH + "IMG_1600x1200.jpg";
final String videoItemFilename2 = INPUT_FILE_PATH + "H264_BP_640x480_12.5fps_256kbps_AACLC_16khz_24kbps_s_0_26.mp4";
final String videoItemFilename3 = INPUT_FILE_PATH + "MPEG4_SP_720x480_30fps_280kbps_AACLC_48kHz_96kbps_s_0_21.mp4";
final String imageItemFilename2 = INPUT_FILE_PATH + "IMG_176x144.jpg";
final String imageItemFilename3 = INPUT_FILE_PATH + "IMG_640x480.jpg";
final String outFilename = mVideoEditorHelper.createRandomFile(mVideoEditor.getPath() + "/") + ".3gp";
final MediaVideoItem mediaVideoItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFilename1, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem1.setExtractBoundaries(2000, 7000);
mVideoEditor.addMediaItem(mediaVideoItem1);
final MediaImageItem mediaImageItem2 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m2", imageItemFilename1, 3000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem2);
final MediaVideoItem mediaVideoItem3 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m3", videoItemFilename2, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem3.setExtractBoundaries(0, 2000);
mVideoEditor.addMediaItem(mediaVideoItem3);
final MediaVideoItem mediaVideoItem4 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m4", videoItemFilename3, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem4.setExtractBoundaries(mediaVideoItem4.getDuration() - 5000, mediaVideoItem4.getDuration());
mVideoEditor.addMediaItem(mediaVideoItem4);
final MediaImageItem mediaImageItem5 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m5", imageItemFilename2, 4000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem5);
final MediaImageItem mediaImageItem6 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m6", imageItemFilename3, 2000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem6);
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
try {
final int[] progressUpdate = new int[100];
mVideoEditor.export(outFilename, MediaProperties.HEIGHT_720, MediaProperties.BITRATE_800K, new ExportProgressListener() {
int i = 0;
public void onProgress(VideoEditor ve, String outFileName, int progress) {
progressUpdate[i++] = progress;
}
});
mVideoEditorHelper.checkProgressCBValues(progressUpdate);
} catch (Exception e) {
assertTrue("Error in Export" + e.toString(), false);
}
final long storyBoardDuration = mediaVideoItem1.getTimelineDuration() + mediaImageItem2.getDuration() + mediaVideoItem3.getTimelineDuration() + mediaVideoItem4.getTimelineDuration() + mediaImageItem5.getDuration() + mediaImageItem6.getDuration();
mVideoEditorHelper.validateExport(mVideoEditor, outFilename, MediaProperties.HEIGHT_720, 0, storyBoardDuration, MediaProperties.VCODEC_H264, MediaProperties.ACODEC_AAC_LC);
mVideoEditorHelper.checkDeleteExistingFile(outFilename);
}
Aggregations