use of android.view.SurfaceHolder in project android_frameworks_base by ParanoidAndroid.
the class CameraFunctionalTest method testFunctionalCameraExposureCompensation.
/**
* Functional test iterating on the range of supported exposure compensation levels
*/
@LargeTest
public void testFunctionalCameraExposureCompensation() throws Exception {
try {
SurfaceHolder surfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
Parameters params = mCameraTestHelper.getCameraParameters();
int min = params.getMinExposureCompensation();
int max = params.getMaxExposureCompensation();
assertFalse("Adjusting exposure not supported", (max == 0 && min == 0));
float step = params.getExposureCompensationStep();
int stepsPerEV = (int) Math.round(Math.pow((double) step, -1));
// only get integer values for exposure compensation
for (int i = min; i <= max; i += stepsPerEV) {
runOnLooper(new Runnable() {
@Override
public void run() {
mCameraTestHelper.setupCameraTest();
}
});
Log.v(TAG, "Setting exposure compensation index to " + i);
params.setExposureCompensation(i);
mCameraTestHelper.setParameters(params);
mCameraTestHelper.startCameraPreview(surfaceHolder);
mCameraTestHelper.capturePhoto();
}
mCameraTestHelper.cleanupTestImages();
} catch (Exception e) {
Log.e(TAG, e.toString());
fail("Camera exposure compensation test Exception");
}
}
use of android.view.SurfaceHolder in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorPreviewTest method testPreviewWithRenderPreviewFrameWithoutGenerate.
/**
* To test Preview : Preview of current Effects applied (with Render Preview
* Frame)
*/
@LargeTest
public void testPreviewWithRenderPreviewFrameWithoutGenerate() throws Exception {
final String videoItemFileName = INPUT_FILE_PATH + "H264_BP_640x480_30fps_256kbps_1_17.mp4";
boolean flagForException = false;
long duration = 0;
OverlayData overlayData1 = new OverlayData();
final MediaVideoItem mediaVideoItem = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFileName, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaVideoItem);
final SurfaceHolder surfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
duration = mVideoEditor.getDuration();
/* RenderPreviewFrame returns -1 to indicate last frame */
try {
mVideoEditor.renderPreviewFrame(surfaceHolder, duration, overlayData1);
} catch (IllegalStateException e) {
flagForException = true;
}
assertTrue(" Render Preview Frame without generate", flagForException);
duration = mVideoEditor.getDuration() + 1000;
try {
mVideoEditor.renderPreviewFrame(surfaceHolder, duration, overlayData1);
} catch (IllegalArgumentException e) {
flagForException = true;
}
assertTrue(" Preview time greater than duration", flagForException);
}
use of android.view.SurfaceHolder in project android_frameworks_base by ParanoidAndroid.
the class VideoEditorPreviewTest method testPreviewWithTransition.
/**
*To test Preview : Preview of current Transitions applied (with multiple
* generatePreview)
*/
@LargeTest
public void testPreviewWithTransition() throws Exception {
final String videoItemFileName1 = INPUT_FILE_PATH + "H263_profile0_176x144_10fps_96kbps_0_25.3gp";
final String imageItemFileName1 = INPUT_FILE_PATH + "IMG_1600x1200.jpg";
final String videoItemFileName2 = INPUT_FILE_PATH + "MPEG4_SP_800x480_515kbps_15fps_AMR_NB_8KHz_12.2kbps_m_0_26.mp4";
final String maskFilename = INPUT_FILE_PATH + "TransitionSpiral_QVGA.jpg";
previewStart = false;
previewStop = false;
previewError = false;
final Semaphore blockTillPreviewCompletes = new Semaphore(1);
final MediaVideoItem mediaVideoItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m1", videoItemFileName1, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem1.setExtractBoundaries(0, 10000);
mVideoEditor.addMediaItem(mediaVideoItem1);
final MediaImageItem mediaImageItem1 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m2", imageItemFileName1, 10000, MediaItem.RENDERING_MODE_BLACK_BORDER);
mVideoEditor.addMediaItem(mediaImageItem1);
final MediaVideoItem mediaVideoItem2 = mVideoEditorHelper.createMediaItem(mVideoEditor, "m3", videoItemFileName2, MediaItem.RENDERING_MODE_BLACK_BORDER);
mediaVideoItem2.setExtractBoundaries(0, 10000);
mVideoEditor.addMediaItem(mediaVideoItem2);
final TransitionCrossfade transition1And2CrossFade = mVideoEditorHelper.createTCrossFade("transition_1_2_CF", mediaVideoItem1, mediaImageItem1, 2000, Transition.BEHAVIOR_MIDDLE_FAST);
mVideoEditor.addTransition(transition1And2CrossFade);
final TransitionAlpha transition2And3Alpha = mVideoEditorHelper.createTAlpha("transition_2_3", mediaImageItem1, mediaVideoItem2, 4000, Transition.BEHAVIOR_SPEED_UP, maskFilename, 50, true);
mVideoEditor.addTransition(transition2And3Alpha);
final TransitionFadeBlack transition1FadeBlack = mVideoEditorHelper.createTFadeBlack("transition_1FB", null, mediaVideoItem1, 2000, Transition.BEHAVIOR_MIDDLE_FAST);
mVideoEditor.addTransition(transition1FadeBlack);
List<Transition> transitionList = mVideoEditor.getAllTransitions();
assertEquals("Transition List Size", 3, transitionList.size());
assertEquals("Transition 1", transition1And2CrossFade, transitionList.get(0));
assertEquals("Transition 2", transition2And3Alpha, transitionList.get(1));
assertEquals("Transition 3", transition1FadeBlack, transitionList.get(2));
mVideoEditor.setAspectRatio(MediaProperties.ASPECT_RATIO_3_2);
final int[] progressValues = new int[300];
mVideoEditor.generatePreview(new MediaProcessingProgressListener() {
int i = 0;
public void onProgress(Object item, int action, int progress) {
if (item instanceof TransitionCrossfade) {
progressValues[i] = progress;
assertEquals("Object", item, transition1And2CrossFade);
assertEquals("Action", action, MediaProcessingProgressListener.ACTION_ENCODE);
} else if (item instanceof TransitionAlpha) {
progressValues[i] = progress;
assertEquals("Object", item, transition2And3Alpha);
assertEquals("Action", action, MediaProcessingProgressListener.ACTION_ENCODE);
} else if (item instanceof TransitionFadeBlack) {
progressValues[i] = progress;
assertEquals("Object", item, transition1FadeBlack);
assertEquals("Action", action, MediaProcessingProgressListener.ACTION_ENCODE);
}
i++;
}
});
mVideoEditorHelper.checkProgressCBValues(progressValues);
final SurfaceHolder surfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
/* As transition takes more time buffer of 10 sec is added */
long waitingTime = minWaitingTime + 10000 + 10000;
blockTillPreviewCompletes.acquire();
try {
mVideoEditor.startPreview(surfaceHolder, 0, 10000, false, 1, new PreviewProgressListener() {
public void onProgress(VideoEditor videoEditor, long timeMs, OverlayData overlayData) {
}
public void onStart(VideoEditor videoEditor) {
setPreviewStart();
}
public void onStop(VideoEditor videoEditor) {
setPreviewStop();
blockTillPreviewCompletes.release();
}
public void onError(VideoEditor videoEditor, int error) {
setPreviewError();
blockTillPreviewCompletes.release();
}
});
} catch (Exception e) {
blockTillPreviewCompletes.release();
}
blockTillPreviewCompletes.tryAcquire(waitingTime, TimeUnit.MILLISECONDS);
mVideoEditor.stopPreview();
blockTillPreviewCompletes.release();
assertTrue("Preview Failed to start", previewStart);
assertTrue("Preview Failed to stop", previewStop);
assertFalse("Preview Error occurred", previewError);
assertEquals("Removing Transition " + transition1And2CrossFade.getId(), transition1And2CrossFade, mVideoEditor.removeTransition(transition1And2CrossFade.getId()));
transitionList = mVideoEditor.getAllTransitions();
assertEquals("Transition List Size", 2, transitionList.size());
assertEquals("Transition 1", transition2And3Alpha, transitionList.get(0));
assertEquals("Transition 2", transition1FadeBlack, transitionList.get(1));
validatePreviewProgress(0, -1, false, mVideoEditor.getDuration());
final TransitionSliding transition1And2Sliding = mVideoEditorHelper.createTSliding("transition_1_2Sliding", mediaVideoItem1, mediaImageItem1, 4000, Transition.BEHAVIOR_MIDDLE_FAST, TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN);
mVideoEditor.addTransition(transition1And2Sliding);
transitionList = mVideoEditor.getAllTransitions();
assertEquals("Transition List Size", 3, transitionList.size());
assertEquals("Transition 1", transition2And3Alpha, transitionList.get(0));
assertEquals("Transition 2", transition1FadeBlack, transitionList.get(1));
assertEquals("Transition 3", transition1And2Sliding, transitionList.get(2));
validatePreviewProgress(5000, -1, false, (mVideoEditor.getDuration()));
}
use of android.view.SurfaceHolder in project android_frameworks_base by ParanoidAndroid.
the class MediaRecorderStressTest method recordVideoAndPlayback.
// Helper method for record & playback testing with different camcorder profiles
private void recordVideoAndPlayback(int profile) throws Exception {
int iterations;
int recordDuration;
boolean removeVideo;
int videoEncoder;
int audioEncoder;
int frameRate;
int videoWidth;
int videoHeight;
int bitRate;
if (profile != USE_TEST_RUNNER_PROFILE) {
assertTrue(String.format("Camera doesn't support profile %d", profile), CamcorderProfile.hasProfile(CAMERA_ID, profile));
CamcorderProfile camcorderProfile = CamcorderProfile.get(CAMERA_ID, profile);
videoEncoder = camcorderProfile.videoCodec;
audioEncoder = camcorderProfile.audioCodec;
frameRate = camcorderProfile.videoFrameRate;
videoWidth = camcorderProfile.videoFrameWidth;
videoHeight = camcorderProfile.videoFrameHeight;
bitRate = camcorderProfile.videoBitRate;
} else {
videoEncoder = MediaRecorderStressTestRunner.mVideoEncoder;
audioEncoder = MediaRecorderStressTestRunner.mAudioEncoder;
frameRate = MediaRecorderStressTestRunner.mFrameRate;
videoWidth = MediaRecorderStressTestRunner.mVideoWidth;
videoHeight = MediaRecorderStressTestRunner.mVideoHeight;
bitRate = MediaRecorderStressTestRunner.mBitRate;
}
iterations = MediaRecorderStressTestRunner.mIterations;
recordDuration = MediaRecorderStressTestRunner.mDuration;
removeVideo = MediaRecorderStressTestRunner.mRemoveVideo;
SurfaceHolder surfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
mOutput.write("Total number of loops: " + iterations + "\n");
try {
mOutput.write("No of loop: ");
for (int i = 0; i < iterations; i++) {
String fileName = String.format("%s/temp%d%s", Environment.getExternalStorageDirectory(), i, OUTPUT_FILE_EXT);
Log.v(TAG, fileName);
runOnLooper(new Runnable() {
@Override
public void run() {
mRecorder = new MediaRecorder();
}
});
Log.v(TAG, "iterations : " + iterations);
Log.v(TAG, "video encoder : " + videoEncoder);
Log.v(TAG, "audio encoder : " + audioEncoder);
Log.v(TAG, "frame rate : " + frameRate);
Log.v(TAG, "video width : " + videoWidth);
Log.v(TAG, "video height : " + videoHeight);
Log.v(TAG, "bit rate : " + bitRate);
Log.v(TAG, "record duration : " + recordDuration);
mRecorder.setOnErrorListener(mRecorderErrorCallback);
mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setOutputFile(fileName);
mRecorder.setVideoFrameRate(frameRate);
mRecorder.setVideoSize(videoWidth, videoHeight);
mRecorder.setVideoEncoder(videoEncoder);
mRecorder.setAudioEncoder(audioEncoder);
mRecorder.setVideoEncodingBitRate(bitRate);
Log.v(TAG, "mediaRecorder setPreview");
mRecorder.setPreviewDisplay(surfaceHolder.getSurface());
mRecorder.prepare();
mRecorder.start();
Thread.sleep(recordDuration);
Log.v(TAG, "Before stop");
mRecorder.stop();
mRecorder.release();
//start the playback
MediaPlayer mp = new MediaPlayer();
mp.setDataSource(fileName);
mp.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder());
mp.prepare();
mp.start();
Thread.sleep(recordDuration);
mp.release();
validateRecordedVideo(fileName);
if (removeVideo) {
removeRecordedVideo(fileName);
}
if (i == 0) {
mOutput.write(i + 1);
} else {
mOutput.write(String.format(", %d", (i + 1)));
}
}
} catch (Exception e) {
Log.e(TAG, e.toString());
fail("Record and playback");
}
}
use of android.view.SurfaceHolder in project android_frameworks_base by ParanoidAndroid.
the class MediaRecorderStressTest method testStressTimeLapse.
// Test case for stressing time lapse
@LargeTest
public void testStressTimeLapse() throws Exception {
SurfaceHolder mSurfaceHolder;
mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
int recordDuration = MediaRecorderStressTestRunner.mTimeLapseDuration;
boolean removeVideo = MediaRecorderStressTestRunner.mRemoveVideo;
double captureRate = MediaRecorderStressTestRunner.mCaptureRate;
Log.v(TAG, "Start camera time lapse stress:");
mOutput.write("Total number of loops: " + NUMBER_OF_TIME_LAPSE_LOOPS + "\n");
try {
for (int i = 0, n = Camera.getNumberOfCameras(); i < n; i++) {
mOutput.write("No of loop: camera " + i);
for (int j = 0; j < NUMBER_OF_TIME_LAPSE_LOOPS; j++) {
String fileName = String.format("%s/temp%d_%d%s", Environment.getExternalStorageDirectory(), i, j, OUTPUT_FILE_EXT);
Log.v(TAG, fileName);
runOnLooper(new Runnable() {
@Override
public void run() {
mRecorder = new MediaRecorder();
}
});
// Set callback
mRecorder.setOnErrorListener(mRecorderErrorCallback);
// Set video source
mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
// Set camcorder profile for time lapse
CamcorderProfile profile = CamcorderProfile.get(j, CamcorderProfile.QUALITY_TIME_LAPSE_HIGH);
mRecorder.setProfile(profile);
// Set the timelapse setting; 0.1 = 10 sec timelapse, 0.5 = 2 sec timelapse, etc
// http://developer.android.com/guide/topics/media/camera.html#time-lapse-video
mRecorder.setCaptureRate(captureRate);
// Set output file
mRecorder.setOutputFile(fileName);
// Set the preview display
Log.v(TAG, "mediaRecorder setPreviewDisplay");
mRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
mRecorder.prepare();
mRecorder.start();
Thread.sleep(recordDuration);
Log.v(TAG, "Before stop");
mRecorder.stop();
mRecorder.release();
// Start the playback
MediaPlayer mp = new MediaPlayer();
mp.setDataSource(fileName);
mp.setDisplay(mSurfaceHolder);
mp.prepare();
mp.start();
Thread.sleep(TIME_LAPSE_PLAYBACK_WAIT_TIME);
mp.release();
validateRecordedVideo(fileName);
if (removeVideo) {
removeRecordedVideo(fileName);
}
if (j == 0) {
mOutput.write(j + 1);
} else {
mOutput.write(String.format(", %d", (j + 1)));
}
}
}
} catch (IllegalStateException e) {
Log.e(TAG, e.toString());
fail("Camera time lapse stress test IllegalStateException");
} catch (IOException e) {
Log.e(TAG, e.toString());
fail("Camera time lapse stress test IOException");
} catch (Exception e) {
Log.e(TAG, e.toString());
fail("Camera time lapse stress test Exception");
}
}
Aggregations