use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorExportTest method testExportEffectOverlay.
/**
*To Test export : With Effect and Overlays on Different Media Items
*/
@LargeTest
public void testExportEffectOverlay() throws Exception {
final String videoItemFilename1 = INPUT_FILE_PATH + "H263_profile0_176x144_15fps_256kbps_AACLC_32kHz_128kbps_s_0_26.3gp";
final String imageItemFilename1 = INPUT_FILE_PATH + "IMG_1600x1200.jpg";
final String videoItemFilename2 = INPUT_FILE_PATH + "H264_BP_640x480_15fps_1200Kbps_AACLC_48KHz_64kps_m_0_27.3gp";
final String videoItemFilename3 = INPUT_FILE_PATH + "MPEG4_SP_720x480_30fps_280kbps_AACLC_48kHz_96kbps_s_0_21.mp4";
final String imageItemFilename2 = INPUT_FILE_PATH + "IMG_176x144.jpg";
final String imageItemFilename3 = INPUT_FILE_PATH + "IMG_640x480.jpg";
final String outFilename = mVideoEditorHelper.createRandomFile(mVideoEditor.getPath() + "/") + ".3gp";
final String overlayFile = INPUT_FILE_PATH + "IMG_640x480_Overlay1.png";
final MediaVideoItem mediaVideoItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFilename1, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem1.setExtractBoundaries(2000, 7000);
mVideoEditor.addMediaItem(mediaVideoItem1);
final EffectColor effectPink = mVideoEditorHelper.createEffectItem(mediaVideoItem1, "effectPink", 0, 2000, EffectColor.TYPE_COLOR, EffectColor.PINK);
mediaVideoItem1.addEffect(effectPink);
final EffectColor effectNegative = mVideoEditorHelper.createEffectItem(mediaVideoItem1, "effectNegative", 3000, 4000, EffectColor.TYPE_NEGATIVE, 0);
mediaVideoItem1.addEffect(effectNegative);
final MediaImageItem mediaImageItem2 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m2", imageItemFilename1, 3000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem2);
final EffectColor effectFifties = mVideoEditorHelper.createEffectItem(mediaImageItem2, "effectFifties", 0, 3000, EffectColor.TYPE_FIFTIES, 0);
mediaImageItem2.addEffect(effectFifties);
final MediaVideoItem mediaVideoItem3 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m3", videoItemFilename2, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaVideoItem3);
mediaVideoItem3.setExtractBoundaries(0, 8000);
final Bitmap mBitmap = mVideoEditorHelper.getBitmap(overlayFile, 640, 480);
final OverlayFrame overlayFrame = mVideoEditorHelper.createOverlay(mediaVideoItem3, "overlay", mBitmap, 2000, 5000);
mediaVideoItem3.addOverlay(overlayFrame);
final EffectColor effectGreen = mVideoEditorHelper.createEffectItem(mediaVideoItem3, "effectGreen", 0, 2000, EffectColor.TYPE_COLOR, EffectColor.GREEN);
mediaVideoItem3.addEffect(effectGreen);
final MediaVideoItem mediaVideoItem4 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m4", videoItemFilename3, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem4.setExtractBoundaries(mediaVideoItem4.getDuration() - 5000, mediaVideoItem4.getDuration());
mVideoEditor.addMediaItem(mediaVideoItem4);
final EffectColor effectSepia = mVideoEditorHelper.createEffectItem(mediaVideoItem4, "effectSepia", 0, 2000, EffectColor.TYPE_SEPIA, 0);
mediaVideoItem4.addEffect(effectSepia);
final MediaImageItem mediaImageItem5 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m5", imageItemFilename2, 4000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem5);
final EffectColor effectGray = mVideoEditorHelper.createEffectItem(mediaImageItem5, "effectGray", 0, 2000, EffectColor.TYPE_COLOR, EffectColor.GRAY);
mediaImageItem5.addEffect(effectGray);
final MediaImageItem mediaImageItem6 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m6", imageItemFilename3, 2000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem6);
final EffectColor effectGradient = mVideoEditorHelper.createEffectItem(mediaImageItem6, "effectGradient", 0, 2000, EffectColor.TYPE_GRADIENT, EffectColor.PINK);
mediaImageItem6.addEffect(effectGradient);
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
try {
final int[] progressUpdate = new int[100];
mVideoEditor.export(outFilename, MediaProperties.HEIGHT_720, MediaProperties.BITRATE_800K, new ExportProgressListener() {
int i = 0;
public void onProgress(VideoEditor ve, String outFileName, int progress) {
progressUpdate[i++] = progress;
}
});
mVideoEditorHelper.checkProgressCBValues(progressUpdate);
} catch (Exception e) {
assertTrue("Error in Export" + e.toString(), false);
}
final long storyBoardDuration = mediaVideoItem1.getTimelineDuration() + mediaImageItem2.getDuration() + mediaVideoItem3.getTimelineDuration() + mediaVideoItem4.getTimelineDuration() + mediaImageItem5.getDuration() + mediaImageItem6.getDuration();
mVideoEditorHelper.validateExport(mVideoEditor, outFilename, MediaProperties.HEIGHT_720, 0, storyBoardDuration, MediaProperties.VCODEC_H264, MediaProperties.ACODEC_AAC_LC);
mVideoEditorHelper.checkDeleteExistingFile(outFilename);
}
use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorExportTest method testExportAudio.
/**
* To Test Export : With Video and Image and An Audio BackGround Track
*/
@LargeTest
public void testExportAudio() throws Exception {
final String videoItemFileName = INPUT_FILE_PATH + "H263_profile0_176x144_15fps_256kbps_AACLC_32kHz_128kbps_s_0_26.3gp";
final String imageItemFileName = INPUT_FILE_PATH + "IMG_1600x1200.jpg";
final String outFilename = mVideoEditorHelper.createRandomFile(mVideoEditor.getPath() + "/") + ".3gp";
final String audioTrackFilename = INPUT_FILE_PATH + "AMRNB_8KHz_12.2Kbps_m_1_17.3gp";
final MediaVideoItem mediaVideoItem = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFileName, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem.setExtractBoundaries(0, 10000);
mVideoEditor.addMediaItem(mediaVideoItem);
final MediaImageItem mediaImageItem = mVideoEditorHelper.createMediaItem(mVideoEditor, "m2", imageItemFileName, 5000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem);
final AudioTrack audioTrack = mVideoEditorHelper.createAudio(mVideoEditor, "a1", audioTrackFilename);
audioTrack.setExtractBoundaries(2000, 5000);
mVideoEditor.addAudioTrack(audioTrack);
audioTrack.disableDucking();
audioTrack.enableLoop();
audioTrack.setVolume(75);
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
try {
final int[] progressUpdate = new int[100];
mVideoEditor.export(outFilename, MediaProperties.HEIGHT_720, MediaProperties.BITRATE_800K, new ExportProgressListener() {
int i = 0;
public void onProgress(VideoEditor ve, String outFileName, int progress) {
progressUpdate[i++] = progress;
}
});
mVideoEditorHelper.checkProgressCBValues(progressUpdate);
} catch (Exception e) {
assertTrue("Error in Export" + e.toString(), false);
}
mVideoEditorHelper.validateExport(mVideoEditor, outFilename, MediaProperties.HEIGHT_720, 0, (mediaVideoItem.getTimelineDuration() + mediaImageItem.getDuration()), MediaProperties.VCODEC_H264, MediaProperties.ACODEC_AAC_LC);
mVideoEditorHelper.checkDeleteExistingFile(outFilename);
}
use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorPerformance method testPerformanceWithKenBurn.
/**
* To test the performance of generatePreview : with KenBurn
*
* @throws Exception
*/
@LargeTest
public void testPerformanceWithKenBurn() throws Exception {
final String videoItemFileName = INPUT_FILE_PATH + "H264_BP_1080x720_30fps_800kbps_1_17.mp4";
final String imageItemFileName = INPUT_FILE_PATH + "IMG_1600x1200.jpg";
final int renderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
long averageTime = 0;
final String[] loggingInfo = new String[1];
final MediaVideoItem mediaVideoItem = new MediaVideoItem(mVideoEditor, "mediaItem1", videoItemFileName, renderingMode);
mediaVideoItem.setExtractBoundaries(0, 10000);
mVideoEditor.addMediaItem(mediaVideoItem);
final MediaImageItem mediaImageItem = new MediaImageItem(mVideoEditor, "mediaItem2", imageItemFileName, 10000, renderingMode);
mVideoEditor.addMediaItem(mediaImageItem);
final Rect startRect = new Rect((mediaImageItem.getHeight() / 3), (mediaImageItem.getWidth() / 3), (mediaImageItem.getHeight() / 2), (mediaImageItem.getWidth() / 2));
final Rect endRect = new Rect(0, 0, mediaImageItem.getWidth(), mediaImageItem.getHeight());
final EffectKenBurns kbEffectOnMediaItem = new EffectKenBurns(mediaImageItem, "KBOnM2", startRect, endRect, 500, 3000);
mediaImageItem.addEffect(kbEffectOnMediaItem);
for (int i = 0; i < NUM_OF_ITERATIONS; i++) {
final long duration1 = SystemClock.uptimeMillis();
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
final long duration2 = SystemClock.uptimeMillis();
mediaImageItem.removeEffect(kbEffectOnMediaItem.getId());
mediaImageItem.addEffect(kbEffectOnMediaItem);
averageTime += duration2 - duration1;
}
final long durationToAddObjects = (averageTime);
final float timeTaken = (float) durationToAddObjects * 1.0f / (float) NUM_OF_ITERATIONS;
loggingInfo[0] = "Time taken to Generate KenBurn Effect :" + timeTaken;
writeTimingInfo("testPerformanceWithKenBurn", loggingInfo);
}
use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorExportTest method testExportEffectKenBurn.
/**
* To test export : with Image with KenBurnEffect
*/
@LargeTest
public void testExportEffectKenBurn() throws Exception {
final String imageItemFileName = INPUT_FILE_PATH + "IMG_640x480.jpg";
final int imageItemRenderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
final String outFilename = mVideoEditorHelper.createRandomFile(mVideoEditor.getPath() + "/") + ".3gp";
final MediaImageItem mediaImageItem = mVideoEditorHelper.createMediaItem(mVideoEditor, "mediaImageItem1", imageItemFileName, 5000, imageItemRenderingMode);
mVideoEditor.addMediaItem(mediaImageItem);
final Rect startRect = new Rect((mediaImageItem.getHeight() / 3), (mediaImageItem.getWidth() / 3), (mediaImageItem.getHeight() / 2), (mediaImageItem.getWidth() / 2));
final Rect endRect = new Rect(0, 0, mediaImageItem.getWidth(), mediaImageItem.getHeight());
final EffectKenBurns kbEffectOnMediaItem = new EffectKenBurns(mediaImageItem, "KBOnM2", startRect, endRect, 500, 3000);
assertNotNull("EffectKenBurns", kbEffectOnMediaItem);
mediaImageItem.addEffect(kbEffectOnMediaItem);
assertEquals("KenBurn Start Rect", startRect, kbEffectOnMediaItem.getStartRect());
assertEquals("KenBurn End Rect", endRect, kbEffectOnMediaItem.getEndRect());
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
try {
final int[] progressUpdate = new int[100];
mVideoEditor.export(outFilename, MediaProperties.HEIGHT_720, MediaProperties.BITRATE_800K, new ExportProgressListener() {
int i = 0;
public void onProgress(VideoEditor ve, String outFileName, int progress) {
progressUpdate[i++] = progress;
}
});
mVideoEditorHelper.checkProgressCBValues(progressUpdate);
} catch (Exception e) {
assertTrue("Error in Export" + e.toString(), false);
}
mVideoEditorHelper.validateExport(mVideoEditor, outFilename, MediaProperties.HEIGHT_720, 0, mediaImageItem.getDuration(), MediaProperties.VCODEC_H264, MediaProperties.ACODEC_AAC_LC);
mVideoEditorHelper.checkDeleteExistingFile(outFilename);
}
use of android.media.videoeditor.VideoEditor.MediaProcessingProgressListener in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorPreviewTest method testRenderPreviewFrame.
/**
* To test render Preview Frame
*/
@LargeTest
public void testRenderPreviewFrame() throws Exception {
final String videoItemFileName1 = INPUT_FILE_PATH + "H264_BP_1080x720_30fps_800kbps_1_17.mp4";
final String videoItemFileName2 = INPUT_FILE_PATH + "MPEG4_SP_800x480_515kbps_15fps_AMR_NB_8KHz_12.2kbps_m_0_26.mp4";
final String videoItemFileName3 = INPUT_FILE_PATH + "H264_BP_640x480_30fps_256kbps_1_17.mp4";
final String imageItemFilename1 = INPUT_FILE_PATH + "IMG_1600x1200.jpg";
final String imageItemFilename2 = INPUT_FILE_PATH + "IMG_176x144.jpg";
final String audioFilename = INPUT_FILE_PATH + "AMRNB_8KHz_12.2Kbps_m_1_17.3gp";
OverlayData overlayData1 = new OverlayData();
previewStart = false;
previewStop = false;
final MediaVideoItem mediaVideoItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFileName1, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem1.setExtractBoundaries(0, 10000);
mVideoEditor.addMediaItem(mediaVideoItem1);
final MediaVideoItem mediaVideoItem2 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m2", videoItemFileName2, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem1.setExtractBoundaries(mediaVideoItem2.getDuration() / 4, mediaVideoItem2.getDuration() / 2);
mVideoEditor.addMediaItem(mediaVideoItem2);
final MediaVideoItem mediaVideoItem3 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m3", videoItemFileName3, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem1.setExtractBoundaries(mediaVideoItem2.getDuration() / 2, mediaVideoItem2.getDuration());
mVideoEditor.addMediaItem(mediaVideoItem3);
final MediaImageItem mediaImageItem4 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m4", imageItemFilename1, 5000, MediaItem.RENDERING_MODE_BLACK_BORDER);
final MediaImageItem mediaImageItem5 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m5", imageItemFilename2, 5000, MediaItem.RENDERING_MODE_BLACK_BORDER);
List<MediaItem> mediaList = mVideoEditor.getAllMediaItems();
assertEquals("Media Item List Size", 3, mediaList.size());
mVideoEditor.insertMediaItem(mediaImageItem4, mediaVideoItem2.getId());
mediaList = mVideoEditor.getAllMediaItems();
assertEquals("Media Item List Size", 4, mediaList.size());
assertEquals("Media item 1", mediaVideoItem1, mediaList.get(0));
assertEquals("Media item 2", mediaVideoItem2, mediaList.get(1));
assertEquals("Media item 4", mediaImageItem4, mediaList.get(2));
assertEquals("Media item 3", mediaVideoItem3, mediaList.get(3));
mVideoEditor.insertMediaItem(mediaImageItem5, mediaImageItem4.getId());
mediaList = mVideoEditor.getAllMediaItems();
assertEquals("Media Item List Size", 5, mediaList.size());
assertEquals("Media item 1", mediaVideoItem1, mediaList.get(0));
assertEquals("Media item 2", mediaVideoItem2, mediaList.get(1));
assertEquals("Media item 4", mediaImageItem4, mediaList.get(2));
assertEquals("Media item 5", mediaImageItem5, mediaList.get(3));
assertEquals("Media item 3", mediaVideoItem3, mediaList.get(4));
mVideoEditor.moveMediaItem(mediaVideoItem1.getId(), mediaImageItem5.getId());
mediaList = mVideoEditor.getAllMediaItems();
assertEquals("Media Item List Size", 5, mediaList.size());
assertEquals("Media item 2", mediaVideoItem2, mediaList.get(0));
assertEquals("Media item 4", mediaImageItem4, mediaList.get(1));
assertEquals("Media item 5", mediaImageItem5, mediaList.get(2));
assertEquals("Media item 1", mediaVideoItem1, mediaList.get(3));
assertEquals("Media item 3", mediaVideoItem3, mediaList.get(4));
final TransitionCrossfade transition2And4CrossFade = mVideoEditorHelper.createTCrossFade("transition2And4CrossFade", mediaVideoItem2, mediaImageItem4, 2000, Transition.BEHAVIOR_MIDDLE_FAST);
mVideoEditor.addTransition(transition2And4CrossFade);
final TransitionCrossfade transition1And3CrossFade = mVideoEditorHelper.createTCrossFade("transition1And3CrossFade", mediaVideoItem1, mediaVideoItem3, 5000, Transition.BEHAVIOR_MIDDLE_FAST);
mVideoEditor.addTransition(transition1And3CrossFade);
final AudioTrack audioTrack = mVideoEditorHelper.createAudio(mVideoEditor, "a1", audioFilename);
audioTrack.setExtractBoundaries(0, 2000);
mVideoEditor.addAudioTrack(audioTrack);
audioTrack.enableLoop();
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
final SurfaceHolder surfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
mVideoEditor.renderPreviewFrame(surfaceHolder, mVideoEditor.getDuration() / 4, overlayData1);
Thread.sleep(1000);
mVideoEditor.renderPreviewFrame(surfaceHolder, mVideoEditor.getDuration() / 2, overlayData1);
Thread.sleep(1000);
mVideoEditor.renderPreviewFrame(surfaceHolder, mVideoEditor.getDuration(), overlayData1);
}
Aggregations