use of android.hardware.camera2.CaptureResult in project platform_frameworks_base by android.
the class Camera2SurfaceViewTestCase method waitForAnyResultValue.
/**
* Wait for any expected result key values available in a certain number of results.
*
* <p>
* Check the result immediately if numFramesWait is 0.
* </p>
*
* @param listener The capture listener to get capture result.
* @param resultKey The capture result key associated with the result value.
* @param expectedValues The list of result value need to be waited for,
* return immediately if the list is empty.
* @param numResultsWait Number of frame to wait before times out.
* @throws TimeoutRuntimeException If more than numResultsWait results are.
* seen before the result matching myRequest arrives, or each individual wait
* for result times out after {@value #WAIT_FOR_RESULT_TIMEOUT_MS}ms.
*/
protected static <T> void waitForAnyResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait) {
if (numResultsWait < 0 || listener == null || expectedValues == null) {
throw new IllegalArgumentException("Input must be non-negative number and listener/expectedValues " + "must be non-null");
}
int i = 0;
CaptureResult result;
do {
result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
T value = result.get(resultKey);
for (T expectedValue : expectedValues) {
if (VERBOSE) {
Log.v(TAG, "Current result value for key " + resultKey.getName() + " is: " + value.toString());
}
if (value.equals(expectedValue)) {
return;
}
}
} while (i++ < numResultsWait);
throw new TimeoutRuntimeException("Unable to get the expected result value " + expectedValues + " for key " + resultKey.getName() + " after waiting for " + numResultsWait + " results");
}
use of android.hardware.camera2.CaptureResult in project platform_frameworks_base by android.
the class Camera2CaptureRequestTest method aeManualControlTest.
/**
* Iterate through exposure times and sensitivities for manual AE control.
* <p>
* Use single request rather than repeating request to test manual exposure
* value change per frame control.
* </p>
*/
private void aeManualControlTest() throws Exception {
CaptureRequest.Builder requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
configurePreviewOutput(requestBuilder);
SimpleCaptureCallback listener = new SimpleCaptureCallback();
long[] expTimes = getExposureTimeTestValues();
int[] sensitivities = getSensitivityTestValues();
// Submit single request at a time, then verify the result.
for (int i = 0; i < expTimes.length; i++) {
for (int j = 0; j < sensitivities.length; j++) {
if (VERBOSE) {
Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity " + sensitivities[j] + ", exposure time " + expTimes[i] + "ns");
}
changeExposure(requestBuilder, expTimes[i], sensitivities[j]);
mSession.capture(requestBuilder.build(), listener, mHandler);
// make sure timeout is long enough for long exposure time
long timeout = WAIT_FOR_RESULT_TIMEOUT_MS + expTimes[i];
CaptureResult result = listener.getCaptureResult(timeout);
long resultExpTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
validateExposureTime(expTimes[i], resultExpTime);
validateSensitivity(sensitivities[j], resultSensitivity);
validateFrameDurationForCapture(result);
}
}
// TODO: Add another case to test where we can submit all requests, then wait for
// results, which will hide the pipeline latency. this is not only faster, but also
// test high speed per frame control and synchronization.
}
use of android.hardware.camera2.CaptureResult in project platform_frameworks_base by android.
the class Camera2CaptureRequestTest method autoAeMultipleCapturesThenTestLock.
/**
* Issue multiple auto AE captures, then lock AE, validate the AE lock vs.
* the first capture result after the AE lock. The right AE lock behavior is:
* When it is locked, it locks to the current exposure value, and all subsequent
* request with lock ON will have the same exposure value locked.
*/
private void autoAeMultipleCapturesThenTestLock(CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock) throws Exception {
if (numCapturesDuringLock < 1) {
throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1");
}
if (VERBOSE) {
Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode " + aeMode + " with " + numCapturesDuringLock + " captures before lock");
}
final int NUM_CAPTURES_BEFORE_LOCK = 2;
SimpleCaptureCallback listener = new SimpleCaptureCallback();
CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock];
boolean canSetAeLock = mStaticInfo.isAeLockSupported();
// Reset the AE lock to OFF, since we are reusing this builder many times
if (canSetAeLock) {
requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
}
// Just send several captures with auto AE, lock off.
CaptureRequest request = requestBuilder.build();
for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) {
mSession.capture(request, listener, mHandler);
}
waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK);
if (!canSetAeLock) {
// Without AE lock, the remaining tests items won't work
return;
}
// Then fire several capture to lock the AE.
requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
int requestCount = captureRequestsSynchronized(requestBuilder.build(), numCapturesDuringLock, listener, mHandler);
int[] sensitivities = new int[numCapturesDuringLock];
long[] expTimes = new long[numCapturesDuringLock];
Arrays.fill(sensitivities, -1);
Arrays.fill(expTimes, -1L);
// Get the AE lock on result and validate the exposure values.
waitForNumResults(listener, requestCount - numCapturesDuringLock);
for (int i = 0; i < resultsDuringLock.length; i++) {
resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
}
for (int i = 0; i < numCapturesDuringLock; i++) {
mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true);
}
// Can't read manual sensor/exposure settings without manual sensor
if (mStaticInfo.isCapabilitySupported(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
int sensitivityLocked = getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY);
long expTimeLocked = getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME);
for (int i = 1; i < resultsDuringLock.length; i++) {
mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked);
mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked);
}
}
}
use of android.hardware.camera2.CaptureResult in project platform_frameworks_base by android.
the class Camera2RecordingTest method validateFrameDropAroundVideoSnapshot.
/**
* Validate if video snapshot causes frame drop.
* Here frame drop is defined as frame duration >= 2 * expected frame duration.
* Return the estimated number of frames dropped during video snapshot
*/
private int validateFrameDropAroundVideoSnapshot(SimpleCaptureCallback resultListener, long imageTimeStamp) {
double expectedDurationMs = 1000.0 / mVideoFrameRate;
CaptureResult prevResult = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
long prevTS = getValueNotNull(prevResult, CaptureResult.SENSOR_TIMESTAMP);
while (!resultListener.hasMoreResults()) {
CaptureResult currentResult = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
long currentTS = getValueNotNull(currentResult, CaptureResult.SENSOR_TIMESTAMP);
if (currentTS == imageTimeStamp) {
// validate the timestamp before and after, then return
CaptureResult nextResult = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
long nextTS = getValueNotNull(nextResult, CaptureResult.SENSOR_TIMESTAMP);
double durationMs = (currentTS - prevTS) / 1000000.0;
int totalFramesDropped = 0;
// requirements for legacy mode unless this is fixed.
if (!mStaticInfo.isHardwareLevelLegacy()) {
mCollector.expectTrue(String.format("Video %dx%d Frame drop detected before video snapshot: " + "duration %.2fms (expected %.2fms)", mVideoSize.getWidth(), mVideoSize.getHeight(), durationMs, expectedDurationMs), durationMs <= (expectedDurationMs * MAX_NUM_FRAME_DROP_INTERVAL_ALLOWED));
// Log a warning is there is any frame drop detected.
if (durationMs >= expectedDurationMs * 2) {
Log.w(TAG, String.format("Video %dx%d Frame drop detected before video snapshot: " + "duration %.2fms (expected %.2fms)", mVideoSize.getWidth(), mVideoSize.getHeight(), durationMs, expectedDurationMs));
}
durationMs = (nextTS - currentTS) / 1000000.0;
mCollector.expectTrue(String.format("Video %dx%d Frame drop detected after video snapshot: " + "duration %.2fms (expected %.2fms)", mVideoSize.getWidth(), mVideoSize.getHeight(), durationMs, expectedDurationMs), durationMs <= (expectedDurationMs * MAX_NUM_FRAME_DROP_INTERVAL_ALLOWED));
// Log a warning is there is any frame drop detected.
if (durationMs >= expectedDurationMs * 2) {
Log.w(TAG, String.format("Video %dx%d Frame drop detected after video snapshot: " + "duration %fms (expected %fms)", mVideoSize.getWidth(), mVideoSize.getHeight(), durationMs, expectedDurationMs));
}
double totalDurationMs = (nextTS - prevTS) / 1000000.0;
// Minus 2 for the expected 2 frames interval
totalFramesDropped = (int) (totalDurationMs / expectedDurationMs) - 2;
if (totalFramesDropped < 0) {
Log.w(TAG, "totalFrameDropped is " + totalFramesDropped + ". Video frame rate might be too fast.");
}
totalFramesDropped = Math.max(0, totalFramesDropped);
}
return totalFramesDropped;
}
prevTS = currentTS;
}
throw new AssertionFailedError("Video snapshot timestamp does not match any of capture results!");
}
use of android.hardware.camera2.CaptureResult in project platform_frameworks_base by android.
the class Camera2StillCaptureTest method takePictureTestByCamera.
/**
* Take a picture for a given set of 3A regions for a particular camera.
* <p>
* Before take a still capture, it triggers an auto focus and lock it first,
* then wait for AWB to converge and lock it, then trigger a precapture
* metering sequence and wait for AE converged. After capture is received, the
* capture result and image are validated. If {@code addAeTriggerCancel} is true,
* a precapture trigger cancel will be inserted between two adjacent triggers, which
* should effective cancel the first trigger.
* </p>
*
* @param aeRegions AE regions for this capture
* @param awbRegions AWB regions for this capture
* @param afRegions AF regions for this capture
* @param addAeTriggerCancel If a AE precapture trigger cancel is sent after the trigger.
*/
private void takePictureTestByCamera(MeteringRectangle[] aeRegions, MeteringRectangle[] awbRegions, MeteringRectangle[] afRegions, boolean addAeTriggerCancel) throws Exception {
boolean hasFocuser = mStaticInfo.hasFocuser();
Size maxStillSz = mOrderedStillSizes.get(0);
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
CaptureResult result;
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
CaptureRequest.Builder previewRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder stillRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
prepareStillCaptureAndStartPreview(previewRequest, stillRequest, maxPreviewSz, maxStillSz, resultListener, imageListener);
// Set AE mode to ON_AUTO_FLASH if flash is available.
if (mStaticInfo.hasFlash()) {
previewRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
stillRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
Camera2Focuser focuser = null;
/**
* Step 1: trigger an auto focus run, and wait for AF locked.
*/
boolean canSetAfRegion = hasFocuser && (afRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AF_INDEX);
if (hasFocuser) {
SimpleAutoFocusListener afListener = new SimpleAutoFocusListener();
focuser = new Camera2Focuser(mCamera, mSession, mPreviewSurface, afListener, mStaticInfo.getCharacteristics(), mHandler);
if (canSetAfRegion) {
stillRequest.set(CaptureRequest.CONTROL_AF_REGIONS, afRegions);
}
focuser.startAutoFocus(afRegions);
afListener.waitForAutoFocusDone(WAIT_FOR_FOCUS_DONE_TIMEOUT_MS);
}
/**
* Have to get the current AF mode to be used for other 3A repeating
* request, otherwise, the new AF mode in AE/AWB request could be
* different with existing repeating requests being sent by focuser,
* then it could make AF unlocked too early. Beside that, for still
* capture, AF mode must not be different with the one in current
* repeating request, otherwise, the still capture itself would trigger
* an AF mode change, and the AF lock would be lost for this capture.
*/
int currentAfMode = CaptureRequest.CONTROL_AF_MODE_OFF;
if (hasFocuser) {
currentAfMode = focuser.getCurrentAfMode();
}
previewRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
stillRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
/**
* Step 2: AF is already locked, wait for AWB converged, then lock it.
*/
resultListener = new SimpleCaptureCallback();
boolean canSetAwbRegion = (awbRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AWB_INDEX);
if (canSetAwbRegion) {
previewRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
stillRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
waitForResultValue(resultListener, CaptureResult.CONTROL_AWB_STATE, CaptureResult.CONTROL_AWB_STATE_CONVERGED, NUM_RESULTS_WAIT_TIMEOUT);
} else {
// LEGACY Devices don't have the AWB_STATE reported in results, so just wait
waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
}
boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
if (canSetAwbLock) {
previewRequest.set(CaptureRequest.CONTROL_AWB_LOCK, true);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AWB mode in result and request should be same", previewRequest.get(CaptureRequest.CONTROL_AWB_MODE), result.get(CaptureResult.CONTROL_AWB_MODE));
if (canSetAwbRegion) {
MeteringRectangle[] resultAwbRegions = getValueNotNull(result, CaptureResult.CONTROL_AWB_REGIONS);
mCollector.expectEquals("AWB regions in result and request should be same", awbRegions, resultAwbRegions);
}
/**
* Step 3: trigger an AE precapture metering sequence and wait for AE converged.
*/
resultListener = new SimpleCaptureCallback();
boolean canSetAeRegion = (aeRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AE_INDEX);
if (canSetAeRegion) {
previewRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
stillRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mSession.capture(previewRequest.build(), resultListener, mHandler);
if (addAeTriggerCancel) {
// Cancel the current precapture trigger, then send another trigger.
// The camera device should behave as if the first trigger is not sent.
// Wait one request to make the trigger start doing something before cancel.
waitForNumResults(resultListener, /*numResultsWait*/
1);
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL);
mSession.capture(previewRequest.build(), resultListener, mHandler);
waitForResultValue(resultListener, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
// Issue another trigger
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mSession.capture(previewRequest.build(), resultListener, mHandler);
}
waitForAeStable(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AE mode in result and request should be same", previewRequest.get(CaptureRequest.CONTROL_AE_MODE), result.get(CaptureResult.CONTROL_AE_MODE));
if (canSetAeRegion) {
MeteringRectangle[] resultAeRegions = getValueNotNull(result, CaptureResult.CONTROL_AE_REGIONS);
mCollector.expectMeteringRegionsAreSimilar("AE regions in result and request should be similar", aeRegions, resultAeRegions, METERING_REGION_ERROR_PERCENT_DELTA);
}
/**
* Step 4: take a picture when all 3A are in good state.
*/
resultListener = new SimpleCaptureCallback();
CaptureRequest request = stillRequest.build();
mSession.capture(request, resultListener, mHandler);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResultForRequest(request, WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AF mode in result and request should be same", stillRequest.get(CaptureRequest.CONTROL_AF_MODE), result.get(CaptureResult.CONTROL_AF_MODE));
if (canSetAfRegion) {
MeteringRectangle[] resultAfRegions = getValueNotNull(result, CaptureResult.CONTROL_AF_REGIONS);
mCollector.expectMeteringRegionsAreSimilar("AF regions in result and request should be similar", afRegions, resultAfRegions, METERING_REGION_ERROR_PERCENT_DELTA);
}
if (hasFocuser) {
// Unlock auto focus.
focuser.cancelAutoFocus();
}
// validate image
Image image = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
validateJpegCapture(image, maxStillSz);
// Free image resources
image.close();
stopPreview();
}
Aggregations