use of android.test.suitebuilder.annotation.LargeTest in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testMediaVideoItemGetWaveformData.
/**
* To Test the Media Video API : GetWaveFormData and
* extractAudioWaveFormData
*/
@LargeTest
public void testMediaVideoItemGetWaveformData() throws Exception {
final String videoItemFileName = INPUT_FILE_PATH + "H263_profile0_176x144_15fps_256kbps_AACLC_32kHz_128kbps_s_0_26.3gp";
final int videoItemRenderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
final MediaVideoItem mediaVideoItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "mediaVideoItem1", videoItemFileName, videoItemRenderingMode);
mVideoEditor.addMediaItem(mediaVideoItem1);
assertNull("WaveForm data", mediaVideoItem1.getWaveformData());
final int[] progressWaveform = new int[105];
mediaVideoItem1.extractAudioWaveform(new ExtractAudioWaveformProgressListener() {
int i = 0;
public void onProgress(int progress) {
Log.i("WaveformData", "progress=" + progress);
progressWaveform[i++] = progress;
}
});
assertTrue("Progress of WaveForm data", mVideoEditorHelper.checkProgressCBValues(progressWaveform));
assertNotNull("WaveForm data", mediaVideoItem1.getWaveformData());
assertTrue("WaveForm Frame Duration", (mediaVideoItem1.getWaveformData().getFrameDuration() > 0 ? true : false));
assertTrue("WaveForm Frame Count", (mediaVideoItem1.getWaveformData().getFramesCount() > 0 ? true : false));
assertTrue("WaveForm Gain", (mediaVideoItem1.getWaveformData().getFrameGains().length > 0 ? true : false));
}
use of android.test.suitebuilder.annotation.LargeTest in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testFrameOverlaySetAndGetImage.
/**
* To test Frame Overlay for Media Image Item : Set duration and Get
* Duration
*/
@LargeTest
public void testFrameOverlaySetAndGetImage() throws Exception {
final String videoItemFilename1 = INPUT_FILE_PATH + "IMG_640x480.jpg";
final String overlayFile1 = INPUT_FILE_PATH + "IMG_640x480_Overlay1.png";
boolean flagForException = false;
final MediaImageItem mediaImageItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFilename1, 10000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem1);
final Bitmap mBitmap = mVideoEditorHelper.getBitmap(overlayFile1, 640, 480);
final OverlayFrame overlayFrame1 = mVideoEditorHelper.createOverlay(mediaImageItem1, "overlayId1", mBitmap, 5000, 5000);
mediaImageItem1.addOverlay(overlayFrame1);
overlayFrame1.setDuration(5000);
assertEquals("Overlay Duration", 5000, overlayFrame1.getDuration());
try {
overlayFrame1.setDuration(mediaImageItem1.getDuration() + 10000);
} catch (IllegalArgumentException e) {
flagForException = true;
}
assertTrue("Overlay Duration > Media Item Duration", flagForException);
assertEquals("Overlay Duration", 5000, overlayFrame1.getDuration());
flagForException = false;
try {
overlayFrame1.setDuration(-1);
} catch (IllegalArgumentException e) {
flagForException = true;
}
assertTrue("Overlay Duration = -1", flagForException);
}
use of android.test.suitebuilder.annotation.LargeTest in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testMediaVideoItemRenderingModes.
/**
* To test creation of Media Video Item with Set and Get rendering Mode
*/
@LargeTest
public void testMediaVideoItemRenderingModes() throws Exception {
final String videoItemFileName = INPUT_FILE_PATH + "H263_profile0_176x144_15fps_256kbps_AACLC_32kHz_128kbps_s_0_26.3gp";
final int videoItemRenderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
boolean flagForException = false;
final MediaVideoItem mediaVideoItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "mediaVideoItem1", videoItemFileName, videoItemRenderingMode);
mVideoEditor.addMediaItem(mediaVideoItem1);
mediaVideoItem1.setRenderingMode(MediaItem.RENDERING_MODE_CROPPING);
assertEquals("MediaVideo Item rendering Mode", MediaItem.RENDERING_MODE_CROPPING, mediaVideoItem1.getRenderingMode());
try {
mediaVideoItem1.setRenderingMode(MediaItem.RENDERING_MODE_CROPPING + 911);
} catch (IllegalArgumentException e) {
flagForException = true;
}
assertTrue("Media Item Invalid rendering Mode", flagForException);
flagForException = false;
try {
mediaVideoItem1.setRenderingMode(MediaItem.RENDERING_MODE_BLACK_BORDER - 11);
} catch (IllegalArgumentException e) {
flagForException = true;
}
assertTrue("Media Item Invalid rendering Mode", flagForException);
assertEquals("MediaVideo Item rendering Mode", MediaItem.RENDERING_MODE_CROPPING, mediaVideoItem1.getRenderingMode());
mediaVideoItem1.setRenderingMode(MediaItem.RENDERING_MODE_STRETCH);
assertEquals("MediaVideo Item rendering Mode", MediaItem.RENDERING_MODE_STRETCH, mediaVideoItem1.getRenderingMode());
}
use of android.test.suitebuilder.annotation.LargeTest in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testAudioTrackLooping.
/**
* To Test the Audio Track API: Looping
*/
@LargeTest
public void testAudioTrackLooping() throws Exception {
final String audioFileName = INPUT_FILE_PATH + "AACLC_48KHz_256Kbps_s_1_17.3gp";
final AudioTrack audioTrack = mVideoEditorHelper.createAudio(mVideoEditor, "audioTrack", audioFileName);
mVideoEditor.addAudioTrack(audioTrack);
assertFalse("Audio Looping", audioTrack.isLooping());
audioTrack.enableLoop();
assertTrue("Audio Looping", audioTrack.isLooping());
audioTrack.disableLoop();
assertFalse("Audio Looping", audioTrack.isLooping());
}
use of android.test.suitebuilder.annotation.LargeTest in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testFrameOverlayVideoItem.
/**
* To test Frame Overlay for Media Video Item
*/
@LargeTest
public void testFrameOverlayVideoItem() throws Exception {
final String videoItemFilename1 = INPUT_FILE_PATH + "H263_profile0_176x144_10fps_256kbps_0_25.3gp";
final String overlayFile1 = INPUT_FILE_PATH + "IMG_176x144_Overlay1.png";
final String overlayFile2 = INPUT_FILE_PATH + "IMG_176x144_Overlay2.png";
final MediaVideoItem mediaVideoItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFilename1, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaVideoItem1);
final Bitmap mBitmap1 = mVideoEditorHelper.getBitmap(overlayFile1, 176, 144);
final OverlayFrame overlayFrame1 = mVideoEditorHelper.createOverlay(mediaVideoItem1, "overlayId1", mBitmap1, 5000, 5000);
mediaVideoItem1.addOverlay(overlayFrame1);
assertEquals("Overlay : Media Item", mediaVideoItem1, overlayFrame1.getMediaItem());
assertTrue("Overlay Id", overlayFrame1.getId().equals("overlayId1"));
assertEquals("Overlay Bitmap", mBitmap1, overlayFrame1.getBitmap());
assertEquals("Overlay Start Time", 5000, overlayFrame1.getStartTime());
assertEquals("Overlay Duration", 5000, overlayFrame1.getDuration());
Bitmap upddateBmp = mVideoEditorHelper.getBitmap(overlayFile2, 176, 144);
overlayFrame1.setBitmap(upddateBmp);
assertEquals("Overlay Update Bitmap", upddateBmp, overlayFrame1.getBitmap());
upddateBmp.recycle();
}
Aggregations