use of android.media.videoeditor.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorStressTest method testStressAudioTrackVideo.
/**
* To test the performance : With an audio track with Video
*
* @throws Exception
*/
@LargeTest
public void testStressAudioTrackVideo() throws Exception {
final String videoItemFileName1 = INPUT_FILE_PATH + "H264_BP_1080x720_30fps_800kbps_1_17.mp4";
final String audioFilename1 = INPUT_FILE_PATH + "AACLC_44.1kHz_256kbps_s_1_17.mp4";
final String audioFilename2 = INPUT_FILE_PATH + "AMRNB_8KHz_12.2Kbps_m_1_17.3gp";
final int renderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
final int audioVolume = 50;
final String[] loggingInfo = new String[1];
int i = 1;
writeTestCaseHeader("testStressAudioTrackVideo");
getBeginMemory();
final MediaVideoItem mediaVideoItem = new MediaVideoItem(mVideoEditor, "mediaItem1", videoItemFileName1, renderingMode);
mVideoEditor.addMediaItem(mediaVideoItem);
final AudioTrack audioTrack1 = new AudioTrack(mVideoEditor, "Audio Track1", audioFilename1);
audioTrack1.disableDucking();
audioTrack1.setVolume(audioVolume);
mVideoEditor.addAudioTrack(audioTrack1);
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
mVideoEditor.removeAudioTrack("Audio Track1");
final AudioTrack audioTrack2 = new AudioTrack(mVideoEditor, "Audio Track2", audioFilename2);
audioTrack2.enableLoop();
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
public void onProgress(Object item, int action, int progress) {
}
});
getAfterMemory_updateLog(loggingInfo, false, i);
/** Remove items and check for memory leak if any */
mVideoEditor.removeMediaItem("mediaItem1");
getAfterMemory_updateLog(loggingInfo, true, i);
}
use of android.media.videoeditor.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testAudioTrackWaveFormData.
/**
* To Test the Audio Track API:Extract waveform data
*/
@LargeTest
public void testAudioTrackWaveFormData() throws Exception {
/** Image item is added as dummy as Audio track cannot be added without
* a media item in the story board
*/
final String imageItemFileName = INPUT_FILE_PATH + "IMG_640x480.jpg";
final int imageItemRenderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
final MediaImageItem mediaImageItem = mVideoEditorHelper.createMediaItem(mVideoEditor, "mediaImageItem1", imageItemFileName, 5000, imageItemRenderingMode);
mVideoEditor.addMediaItem(mediaImageItem);
final String audioFileName = INPUT_FILE_PATH + "AACLC_48KHz_256Kbps_s_1_17.3gp";
final AudioTrack audioTrack = mVideoEditorHelper.createAudio(mVideoEditor, "audioTrack", audioFileName);
mVideoEditor.addAudioTrack(audioTrack);
assertNull("WaveForm data", audioTrack.getWaveformData());
final int[] progressUpdate = new int[105];
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
int i = 0;
public void onProgress(Object item, int action, int progress) {
progressUpdate[i++] = progress;
}
});
final int[] progressWaveform = new int[105];
audioTrack.extractAudioWaveform(new ExtractAudioWaveformProgressListener() {
int i = 0;
public void onProgress(int progress) {
Log.i("AudioWaveformData", "progress=" + progress);
progressWaveform[i++] = progress;
}
});
assertTrue("Progress of WaveForm data", mVideoEditorHelper.checkProgressCBValues(progressWaveform));
assertNotNull("WaveForm data", audioTrack.getWaveformData());
assertTrue("WaveForm Frame Duration", (audioTrack.getWaveformData().getFrameDuration() > 0 ? true : false));
assertTrue("WaveForm Frame Count", (audioTrack.getWaveformData().getFramesCount() > 0 ? true : false));
assertTrue("WaveForm Gain", (audioTrack.getWaveformData().getFrameGains().length > 0 ? true : false));
}
use of android.media.videoeditor.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testAudioTrackGetSetVolume.
/**
* To Test the Audio Track API: Get Volume and Set Volume
*/
@LargeTest
public void testAudioTrackGetSetVolume() throws Exception {
final String audioFileName = INPUT_FILE_PATH + "AACLC_48KHz_256Kbps_s_1_17.3gp";
boolean flagForException = false;
final AudioTrack audioTrack = mVideoEditorHelper.createAudio(mVideoEditor, "audioTrack", audioFileName);
audioTrack.setVolume(0);
assertEquals("Audio Volume", 0, audioTrack.getVolume());
assertFalse("Audio Track UnMute", audioTrack.isMuted());
audioTrack.setVolume(45);
assertEquals("Audio Volume", 45, audioTrack.getVolume());
assertFalse("Audio Track UnMute", audioTrack.isMuted());
try {
audioTrack.setVolume(-1);
} catch (IllegalArgumentException e) {
flagForException = true;
}
assertTrue("Volume = -1", flagForException);
assertEquals("Audio Volume", 45, audioTrack.getVolume());
flagForException = false;
try {
audioTrack.setVolume(101);
} catch (IllegalArgumentException e) {
flagForException = true;
}
assertTrue("Volume = 101", flagForException);
flagForException = false;
try {
audioTrack.setVolume(1000);
} catch (IllegalArgumentException e) {
flagForException = true;
}
assertTrue("Volume = 10000", flagForException);
assertEquals("Audio Volume", 45, audioTrack.getVolume());
}
use of android.media.videoeditor.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorAPITest method testAudioTrackMute.
/**
* To Test the Audio Track API: Mute
*/
@LargeTest
public void testAudioTrackMute() throws Exception {
final String audioFileName = INPUT_FILE_PATH + "AACLC_48KHz_256Kbps_s_1_17.3gp";
final AudioTrack audioTrack = mVideoEditorHelper.createAudio(mVideoEditor, "audioTrack", audioFileName);
assertFalse("Audio Track UnMute", audioTrack.isMuted());
audioTrack.setMute(true);
assertTrue("Audio Track Mute", audioTrack.isMuted());
audioTrack.setMute(false);
assertFalse("Audio Track UnMute", audioTrack.isMuted());
}
use of android.media.videoeditor.AudioTrack in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorStressTest method testStressMediaProperties.
/**
* To stress test media properties
*
* @throws Exception
*/
@LargeTest
public void testStressMediaProperties() throws Exception {
final int renderingMode = MediaItem.RENDERING_MODE_BLACK_BORDER;
final String VideoItemFileName1 = INPUT_FILE_PATH + "H264_BP_1080x720_30fps_800kbps_1_17.mp4";
final String ImageItemFileName2 = INPUT_FILE_PATH + "IMG_640x480.jpg";
final String AudioItemFileName3 = INPUT_FILE_PATH + "AACLC_44.1kHz_256kbps_s_1_17.mp4";
final String[] loggingInfo = new String[1];
int i = 0;
final int videoAspectRatio = MediaProperties.ASPECT_RATIO_3_2;
final int videoFileType = MediaProperties.FILE_MP4;
final int videoCodecType = MediaProperties.VCODEC_H264;
final int videoDuration = 77366;
final int videoProfile = MediaProperties.H264Profile.H264ProfileBaseline;
final int videoLevel = MediaProperties.H264Level.H264Level13;
final int videoHeight = MediaProperties.HEIGHT_720;
final int videoWidth = 1080;
final int imageAspectRatio = MediaProperties.ASPECT_RATIO_4_3;
final int imageFileType = MediaProperties.FILE_JPEG;
final int imageWidth = 640;
final int imageHeight = MediaProperties.HEIGHT_480;
final int audioDuration = 77554;
final int audioCodecType = MediaProperties.ACODEC_AAC_LC;
final int audioSamplingFrequency = 44100;
final int audioChannel = 2;
writeTestCaseHeader("testStressMediaProperties");
getBeginMemory();
for (i = 0; i < 50; i++) {
if (i % 3 == 0) {
final MediaVideoItem mediaItem1 = new MediaVideoItem(mVideoEditor, "m1" + i, VideoItemFileName1, renderingMode);
mVideoEditor.addMediaItem(mediaItem1);
mediaItem1.setExtractBoundaries(0, 20000);
assertEquals("Aspect Ratio Mismatch", videoAspectRatio, mediaItem1.getAspectRatio());
assertEquals("File Type Mismatch", videoFileType, mediaItem1.getFileType());
assertEquals("VideoCodec Mismatch", videoCodecType, mediaItem1.getVideoType());
assertEquals("duration Mismatch", videoDuration, mediaItem1.getDuration());
assertEquals("Video Profile ", videoProfile, mediaItem1.getVideoProfile());
assertEquals("Video Level ", videoLevel, mediaItem1.getVideoLevel());
assertEquals("Video height ", videoHeight, mediaItem1.getHeight());
assertEquals("Video width ", videoWidth, mediaItem1.getWidth());
mVideoEditor.removeMediaItem("m1" + i);
}
if (i % 3 == 1) {
final MediaImageItem mediaItem2 = new MediaImageItem(mVideoEditor, "m2" + i, ImageItemFileName2, 10000, renderingMode);
mVideoEditor.addMediaItem(mediaItem2);
assertEquals("Aspect Ratio Mismatch", imageAspectRatio, mediaItem2.getAspectRatio());
assertEquals("File Type Mismatch", imageFileType, mediaItem2.getFileType());
assertEquals("Image height", imageHeight, mediaItem2.getHeight());
assertEquals("Image width", imageWidth, mediaItem2.getWidth());
mVideoEditor.removeMediaItem("m2" + i);
}
if (i % 3 == 2) {
final AudioTrack mediaItem3 = new AudioTrack(mVideoEditor, "m3" + i, AudioItemFileName3);
mVideoEditor.addAudioTrack(mediaItem3);
assertEquals("AudioType Mismatch", audioCodecType, mediaItem3.getAudioType());
assertEquals("Audio Sampling", audioSamplingFrequency, mediaItem3.getAudioSamplingFrequency());
assertEquals("Audio Channels", audioChannel, mediaItem3.getAudioChannels());
assertEquals("duration Mismatch", audioDuration, mediaItem3.getDuration());
mVideoEditor.removeAudioTrack("m3" + i);
}
if (i % 10 == 0) {
getAfterMemory_updateLog(loggingInfo, false, i);
}
}
getAfterMemory_updateLog(loggingInfo, false, i);
}
Aggregations