use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project android_frameworks_base by ResurrectionRemix.
the class Camera2SwitchPreviewTest method basicRecordingPreviewTestByCamera.
/**
* Test camera recording preview by using each available CamcorderProfile for a
* given camera. preview size is set to the video size.
*/
private void basicRecordingPreviewTestByCamera(int[] camcorderProfileList) throws Exception {
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
List<Range<Integer>> fpsRanges = Arrays.asList(mStaticInfo.getAeAvailableTargetFpsRangesChecked());
int cameraId = Integer.parseInt(mCamera.getId());
int maxVideoFrameRate = -1;
int profileId = camcorderProfileList[0];
if (!CamcorderProfile.hasProfile(cameraId, profileId) || allowedUnsupported(cameraId, profileId)) {
return;
}
CamcorderProfile profile = CamcorderProfile.get(cameraId, profileId);
Size videoSz = new Size(profile.videoFrameWidth, profile.videoFrameHeight);
Range<Integer> fpsRange = new Range(profile.videoFrameRate, profile.videoFrameRate);
if (maxVideoFrameRate < profile.videoFrameRate) {
maxVideoFrameRate = profile.videoFrameRate;
}
if (mStaticInfo.isHardwareLevelLegacy() && (videoSz.getWidth() > maxPreviewSize.getWidth() || videoSz.getHeight() > maxPreviewSize.getHeight())) {
// Skip. Legacy mode can only do recording up to max preview size
return;
}
assertTrue("Video size " + videoSz.toString() + " for profile ID " + profileId + " must be one of the camera device supported video size!", mSupportedVideoSizes.contains(videoSz));
assertTrue("Frame rate range " + fpsRange + " (for profile ID " + profileId + ") must be one of the camera device available FPS range!", fpsRanges.contains(fpsRange));
if (VERBOSE) {
Log.v(TAG, "Testing camera recording with video size " + videoSz.toString());
}
// Configure preview and recording surfaces.
mOutMediaFileName = VIDEO_FILE_PATH + "/test_video.mp4";
if (DEBUG_DUMP) {
mOutMediaFileName = VIDEO_FILE_PATH + "/test_video_" + cameraId + "_" + videoSz.toString() + ".mp4";
}
prepareRecordingWithProfile(profile);
// prepare preview surface by using video size.
updatePreviewSurfaceWithVideo(videoSz, profile.videoFrameRate);
CaptureRequest.Builder previewRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder recordingRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
prepareVideoPreview(previewRequest, recordingRequest, resultListener, imageListener);
// Can reuse the MediaRecorder object after reset.
mMediaRecorder.reset();
if (maxVideoFrameRate != -1) {
// At least one CamcorderProfile is present, check FPS
assertTrue("At least one CamcorderProfile must support >= 24 FPS", maxVideoFrameRate >= 24);
}
}
use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project android_frameworks_base by ResurrectionRemix.
the class Camera2RecordingTest method constrainedHighSpeedRecording.
private void constrainedHighSpeedRecording() throws Exception {
for (String id : mCameraIds) {
try {
Log.i(TAG, "Testing constrained high speed recording for camera " + id);
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(id);
if (!mStaticInfo.isConstrainedHighSpeedVideoSupported()) {
Log.i(TAG, "Camera " + id + " doesn't support high speed recording, skipping.");
continue;
}
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("Constrained high speed recording: %d/%d", iteration + 1, getIterationCount()));
StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] highSpeedVideoSizes = config.getHighSpeedVideoSizes();
for (Size size : highSpeedVideoSizes) {
List<Range<Integer>> fixedFpsRanges = getHighSpeedFixedFpsRangeForSize(config, size);
mCollector.expectTrue("Unable to find the fixed frame rate fps range for " + "size " + size, fixedFpsRanges.size() > 0);
// Test recording for each FPS range
for (Range<Integer> fpsRange : fixedFpsRanges) {
int captureRate = fpsRange.getLower();
final int VIDEO_FRAME_RATE = 30;
// Skip the test if the highest recording FPS supported by CamcorderProfile
if (fpsRange.getUpper() > getFpsFromHighSpeedProfileForSize(size)) {
Log.w(TAG, "high speed recording " + size + "@" + captureRate + "fps" + " is not supported by CamcorderProfile");
continue;
}
mOutMediaFileName = VIDEO_FILE_PATH + "/test_cslowMo_video_" + captureRate + "fps_" + id + "_" + size.toString() + ".mp4";
prepareRecording(size, VIDEO_FRAME_RATE, captureRate);
// prepare preview surface by using video size.
updatePreviewSurfaceWithVideo(size, captureRate);
// Start recording
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
startSlowMotionRecording(/*useMediaRecorder*/
true, VIDEO_FRAME_RATE, captureRate, fpsRange, resultListener, /*useHighSpeedSession*/
true);
// Record certain duration.
SystemClock.sleep(RECORDING_DURATION_MS);
// Stop recording and preview
stopRecording(/*useMediaRecorder*/
true);
// Convert number of frames camera produced into the duration in unit of ms.
int durationMs = (int) (resultListener.getTotalNumFrames() * 1000.0f / VIDEO_FRAME_RATE);
// Validation.
validateRecording(size, durationMs);
}
getResultPrinter().printStatus(getIterationCount(), iteration + 1, id);
Thread.sleep(getTestWaitIntervalMs());
}
}
} finally {
closeDevice();
releaseRecorder();
}
}
}
use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project android_frameworks_base by ResurrectionRemix.
the class Camera2ReprocessCaptureTest method testReprocessRequestKeys.
/**
* Test the following keys in reprocess results match the keys in reprocess requests:
* 1. EDGE_MODE
* 2. NOISE_REDUCTION_MODE
* 3. REPROCESS_EFFECTIVE_EXPOSURE_FACTOR (only for YUV reprocess)
*/
private void testReprocessRequestKeys(String cameraId, Size inputSize, int inputFormat, Size reprocessOutputSize, int reprocessOutputFormat) throws Exception {
if (VERBOSE) {
Log.v(TAG, "testReprocessRequestKeys: cameraId: " + cameraId + " inputSize: " + inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " + reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat);
}
final Integer[] EDGE_MODES = { CaptureRequest.EDGE_MODE_FAST, CaptureRequest.EDGE_MODE_HIGH_QUALITY, CaptureRequest.EDGE_MODE_OFF, CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG };
final Integer[] NR_MODES = { CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY, CaptureRequest.NOISE_REDUCTION_MODE_OFF, CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG, CaptureRequest.NOISE_REDUCTION_MODE_FAST };
final Float[] EFFECTIVE_EXP_FACTORS = { null, 1.0f, 2.5f, 4.0f };
int numFrames = EDGE_MODES.length;
try {
setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat, numFrames);
setupReprocessableSession(/*previewSurface*/
null, numFrames);
// Prepare reprocess capture requests.
ArrayList<CaptureRequest> reprocessRequests = new ArrayList<>(numFrames);
for (int i = 0; i < numFrames; i++) {
TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(), /*inputResult*/
null);
mImageWriter.queueInputImage(mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
CaptureRequest.Builder builder = mCamera.createReprocessCaptureRequest(result);
builder.addTarget(getReprocessOutputImageReader().getSurface());
// Set reprocess request keys
builder.set(CaptureRequest.EDGE_MODE, EDGE_MODES[i]);
builder.set(CaptureRequest.NOISE_REDUCTION_MODE, NR_MODES[i]);
if (inputFormat == ImageFormat.YUV_420_888) {
builder.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, EFFECTIVE_EXP_FACTORS[i]);
}
reprocessRequests.add(builder.build());
}
// Submit reprocess requests.
SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
mSession.captureBurst(reprocessRequests, captureCallback, mHandler);
TotalCaptureResult[] reprocessResults = captureCallback.getTotalCaptureResultsForRequests(reprocessRequests, CAPTURE_TIMEOUT_FRAMES);
for (int i = 0; i < numFrames; i++) {
// Verify result's keys
Integer resultEdgeMode = reprocessResults[i].get(CaptureResult.EDGE_MODE);
Integer resultNoiseReductionMode = reprocessResults[i].get(CaptureResult.NOISE_REDUCTION_MODE);
assertEquals("Reprocess result edge mode (" + resultEdgeMode + ") doesn't match requested edge mode (" + EDGE_MODES[i] + ")", resultEdgeMode, EDGE_MODES[i]);
assertEquals("Reprocess result noise reduction mode (" + resultNoiseReductionMode + ") doesn't match requested noise reduction mode (" + NR_MODES[i] + ")", resultNoiseReductionMode, NR_MODES[i]);
if (inputFormat == ImageFormat.YUV_420_888) {
Float resultEffectiveExposureFactor = reprocessResults[i].get(CaptureResult.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR);
assertEquals("Reprocess effective exposure factor (" + resultEffectiveExposureFactor + ") doesn't match requested " + "effective exposure factor (" + EFFECTIVE_EXP_FACTORS[i] + ")", resultEffectiveExposureFactor, EFFECTIVE_EXP_FACTORS[i]);
}
}
} finally {
closeReprossibleSession();
closeImageReaders();
}
}
use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project android_frameworks_base by DirtyUnicorns.
the class Camera2ReprocessCaptureTest method testReprocessTimestamps.
/**
* Test timestamps for reprocess requests. Reprocess request's shutter timestamp, result's
* sensor timestamp, and output image's timestamp should match the reprocess input's timestamp.
*/
private void testReprocessTimestamps(String cameraId, Size inputSize, int inputFormat, Size reprocessOutputSize, int reprocessOutputFormat) throws Exception {
if (VERBOSE) {
Log.v(TAG, "testReprocessTimestamps: cameraId: " + cameraId + " inputSize: " + inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " + reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat);
}
try {
setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat, NUM_REPROCESS_CAPTURES);
setupReprocessableSession(/*previewSurface*/
null, NUM_REPROCESS_CAPTURES);
// Prepare reprocess capture requests.
ArrayList<CaptureRequest> reprocessRequests = new ArrayList<>(NUM_REPROCESS_CAPTURES);
ArrayList<Long> expectedTimestamps = new ArrayList<>(NUM_REPROCESS_CAPTURES);
for (int i = 0; i < NUM_REPROCESS_CAPTURES; i++) {
TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(), /*inputResult*/
null);
mImageWriter.queueInputImage(mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
CaptureRequest.Builder builder = mCamera.createReprocessCaptureRequest(result);
builder.addTarget(getReprocessOutputImageReader().getSurface());
reprocessRequests.add(builder.build());
// Reprocess result's timestamp should match input image's timestamp.
expectedTimestamps.add(result.get(CaptureResult.SENSOR_TIMESTAMP));
}
// Submit reprocess requests.
SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
mSession.captureBurst(reprocessRequests, captureCallback, mHandler);
// Verify we get the expected timestamps.
for (int i = 0; i < reprocessRequests.size(); i++) {
captureCallback.waitForCaptureStart(reprocessRequests.get(i), expectedTimestamps.get(i), CAPTURE_TIMEOUT_FRAMES);
}
TotalCaptureResult[] reprocessResults = captureCallback.getTotalCaptureResultsForRequests(reprocessRequests, CAPTURE_TIMEOUT_FRAMES);
for (int i = 0; i < expectedTimestamps.size(); i++) {
// Verify the result timestamps match the input image's timestamps.
long expected = expectedTimestamps.get(i);
long timestamp = reprocessResults[i].get(CaptureResult.SENSOR_TIMESTAMP);
assertEquals("Reprocess result timestamp (" + timestamp + ") doesn't match input " + "image's timestamp (" + expected + ")", expected, timestamp);
// Verify the reprocess output image timestamps match the input image's timestamps.
Image image = getReprocessOutputImageReaderListener().getImage(CAPTURE_TIMEOUT_MS);
timestamp = image.getTimestamp();
image.close();
assertEquals("Reprocess output timestamp (" + timestamp + ") doesn't match input " + "image's timestamp (" + expected + ")", expected, timestamp);
}
// Make sure all input surfaces are released.
for (int i = 0; i < NUM_REPROCESS_CAPTURES; i++) {
mImageWriterListener.waitForImageReleased(CAPTURE_TIMEOUT_MS);
}
} finally {
closeReprossibleSession();
closeImageReaders();
}
}
use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project android_frameworks_base by DirtyUnicorns.
the class Camera2StillCaptureTest method takePictureTestByCamera.
/**
* Take a picture for a given set of 3A regions for a particular camera.
* <p>
* Before take a still capture, it triggers an auto focus and lock it first,
* then wait for AWB to converge and lock it, then trigger a precapture
* metering sequence and wait for AE converged. After capture is received, the
* capture result and image are validated. If {@code addAeTriggerCancel} is true,
* a precapture trigger cancel will be inserted between two adjacent triggers, which
* should effective cancel the first trigger.
* </p>
*
* @param aeRegions AE regions for this capture
* @param awbRegions AWB regions for this capture
* @param afRegions AF regions for this capture
* @param addAeTriggerCancel If a AE precapture trigger cancel is sent after the trigger.
*/
private void takePictureTestByCamera(MeteringRectangle[] aeRegions, MeteringRectangle[] awbRegions, MeteringRectangle[] afRegions, boolean addAeTriggerCancel) throws Exception {
boolean hasFocuser = mStaticInfo.hasFocuser();
Size maxStillSz = mOrderedStillSizes.get(0);
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
CaptureResult result;
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
CaptureRequest.Builder previewRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder stillRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
prepareStillCaptureAndStartPreview(previewRequest, stillRequest, maxPreviewSz, maxStillSz, resultListener, imageListener);
// Set AE mode to ON_AUTO_FLASH if flash is available.
if (mStaticInfo.hasFlash()) {
previewRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
stillRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
Camera2Focuser focuser = null;
/**
* Step 1: trigger an auto focus run, and wait for AF locked.
*/
boolean canSetAfRegion = hasFocuser && (afRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AF_INDEX);
if (hasFocuser) {
SimpleAutoFocusListener afListener = new SimpleAutoFocusListener();
focuser = new Camera2Focuser(mCamera, mSession, mPreviewSurface, afListener, mStaticInfo.getCharacteristics(), mHandler);
if (canSetAfRegion) {
stillRequest.set(CaptureRequest.CONTROL_AF_REGIONS, afRegions);
}
focuser.startAutoFocus(afRegions);
afListener.waitForAutoFocusDone(WAIT_FOR_FOCUS_DONE_TIMEOUT_MS);
}
/**
* Have to get the current AF mode to be used for other 3A repeating
* request, otherwise, the new AF mode in AE/AWB request could be
* different with existing repeating requests being sent by focuser,
* then it could make AF unlocked too early. Beside that, for still
* capture, AF mode must not be different with the one in current
* repeating request, otherwise, the still capture itself would trigger
* an AF mode change, and the AF lock would be lost for this capture.
*/
int currentAfMode = CaptureRequest.CONTROL_AF_MODE_OFF;
if (hasFocuser) {
currentAfMode = focuser.getCurrentAfMode();
}
previewRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
stillRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
/**
* Step 2: AF is already locked, wait for AWB converged, then lock it.
*/
resultListener = new SimpleCaptureCallback();
boolean canSetAwbRegion = (awbRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AWB_INDEX);
if (canSetAwbRegion) {
previewRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
stillRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
waitForResultValue(resultListener, CaptureResult.CONTROL_AWB_STATE, CaptureResult.CONTROL_AWB_STATE_CONVERGED, NUM_RESULTS_WAIT_TIMEOUT);
} else {
// LEGACY Devices don't have the AWB_STATE reported in results, so just wait
waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
}
boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
if (canSetAwbLock) {
previewRequest.set(CaptureRequest.CONTROL_AWB_LOCK, true);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AWB mode in result and request should be same", previewRequest.get(CaptureRequest.CONTROL_AWB_MODE), result.get(CaptureResult.CONTROL_AWB_MODE));
if (canSetAwbRegion) {
MeteringRectangle[] resultAwbRegions = getValueNotNull(result, CaptureResult.CONTROL_AWB_REGIONS);
mCollector.expectEquals("AWB regions in result and request should be same", awbRegions, resultAwbRegions);
}
/**
* Step 3: trigger an AE precapture metering sequence and wait for AE converged.
*/
resultListener = new SimpleCaptureCallback();
boolean canSetAeRegion = (aeRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AE_INDEX);
if (canSetAeRegion) {
previewRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
stillRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mSession.capture(previewRequest.build(), resultListener, mHandler);
if (addAeTriggerCancel) {
// Cancel the current precapture trigger, then send another trigger.
// The camera device should behave as if the first trigger is not sent.
// Wait one request to make the trigger start doing something before cancel.
waitForNumResults(resultListener, /*numResultsWait*/
1);
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL);
mSession.capture(previewRequest.build(), resultListener, mHandler);
waitForResultValue(resultListener, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
// Issue another trigger
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mSession.capture(previewRequest.build(), resultListener, mHandler);
}
waitForAeStable(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AE mode in result and request should be same", previewRequest.get(CaptureRequest.CONTROL_AE_MODE), result.get(CaptureResult.CONTROL_AE_MODE));
if (canSetAeRegion) {
MeteringRectangle[] resultAeRegions = getValueNotNull(result, CaptureResult.CONTROL_AE_REGIONS);
mCollector.expectMeteringRegionsAreSimilar("AE regions in result and request should be similar", aeRegions, resultAeRegions, METERING_REGION_ERROR_PERCENT_DELTA);
}
/**
* Step 4: take a picture when all 3A are in good state.
*/
resultListener = new SimpleCaptureCallback();
CaptureRequest request = stillRequest.build();
mSession.capture(request, resultListener, mHandler);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResultForRequest(request, WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AF mode in result and request should be same", stillRequest.get(CaptureRequest.CONTROL_AF_MODE), result.get(CaptureResult.CONTROL_AF_MODE));
if (canSetAfRegion) {
MeteringRectangle[] resultAfRegions = getValueNotNull(result, CaptureResult.CONTROL_AF_REGIONS);
mCollector.expectMeteringRegionsAreSimilar("AF regions in result and request should be similar", afRegions, resultAfRegions, METERING_REGION_ERROR_PERCENT_DELTA);
}
if (hasFocuser) {
// Unlock auto focus.
focuser.cancelAutoFocus();
}
// validate image
Image image = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
validateJpegCapture(image, maxStillSz);
// Free image resources
image.close();
stopPreview();
}
Aggregations