use of android.hardware.camera2.CaptureResult in project android_frameworks_base by AOSPA.
the class Camera2SwitchPreviewTest method stillCapturePreviewPreparer.
private void stillCapturePreviewPreparer(String id) throws Exception {
CaptureResult result;
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
CaptureRequest.Builder previewRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder stillRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
// Preview Setup:
prepareCapturePreview(previewRequest, stillRequest, resultListener, imageListener);
Thread.sleep(getTestWaitIntervalMs());
}
use of android.hardware.camera2.CaptureResult in project android_frameworks_base by AOSPA.
the class CameraMetadata method getKeysStatic.
/**
* Return a list of all the Key<?> that are declared as a field inside of the class
* {@code type}.
*
* <p>
* Optionally, if {@code instance} is not null, then filter out any keys with null values.
* </p>
*
* <p>
* Optionally, if {@code filterTags} is not {@code null}, then filter out any keys
* whose native {@code tag} is not in {@code filterTags}. The {@code filterTags} array will be
* sorted as a side effect.
* </p>
*/
/*package*/
@SuppressWarnings("unchecked")
static <TKey> ArrayList<TKey> getKeysStatic(Class<?> type, Class<TKey> keyClass, CameraMetadata<TKey> instance, int[] filterTags) {
if (DEBUG)
Log.v(TAG, "getKeysStatic for " + type);
// TotalCaptureResult does not have any of the keys on it, use CaptureResult instead
if (type.equals(TotalCaptureResult.class)) {
type = CaptureResult.class;
}
if (filterTags != null) {
Arrays.sort(filterTags);
}
ArrayList<TKey> keyList = new ArrayList<TKey>();
Field[] fields = type.getDeclaredFields();
for (Field field : fields) {
// Filter for Keys that are public
if (field.getType().isAssignableFrom(keyClass) && (field.getModifiers() & Modifier.PUBLIC) != 0) {
TKey key;
try {
key = (TKey) field.get(instance);
} catch (IllegalAccessException e) {
throw new AssertionError("Can't get IllegalAccessException", e);
} catch (IllegalArgumentException e) {
throw new AssertionError("Can't get IllegalArgumentException", e);
}
if (instance == null || instance.getProtected(key) != null) {
if (shouldKeyBeAdded(key, field, filterTags)) {
keyList.add(key);
if (DEBUG) {
Log.v(TAG, "getKeysStatic - key was added - " + key);
}
} else if (DEBUG) {
Log.v(TAG, "getKeysStatic - key was filtered - " + key);
}
}
}
}
ArrayList<TKey> vendorKeys = CameraMetadataNative.getAllVendorKeys(keyClass);
if (vendorKeys != null) {
for (TKey k : vendorKeys) {
String keyName;
if (k instanceof CaptureRequest.Key<?>) {
keyName = ((CaptureRequest.Key<?>) k).getName();
} else if (k instanceof CaptureResult.Key<?>) {
keyName = ((CaptureResult.Key<?>) k).getName();
} else if (k instanceof CameraCharacteristics.Key<?>) {
keyName = ((CameraCharacteristics.Key<?>) k).getName();
} else {
continue;
}
if (filterTags == null || Arrays.binarySearch(filterTags, CameraMetadataNative.getTag(keyName)) >= 0) {
keyList.add(k);
}
}
}
return keyList;
}
use of android.hardware.camera2.CaptureResult in project android_frameworks_base by AOSPA.
the class Camera2CaptureRequestTest method autoAeMultipleCapturesThenTestLock.
/**
* Issue multiple auto AE captures, then lock AE, validate the AE lock vs.
* the first capture result after the AE lock. The right AE lock behavior is:
* When it is locked, it locks to the current exposure value, and all subsequent
* request with lock ON will have the same exposure value locked.
*/
private void autoAeMultipleCapturesThenTestLock(CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock) throws Exception {
if (numCapturesDuringLock < 1) {
throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1");
}
if (VERBOSE) {
Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode " + aeMode + " with " + numCapturesDuringLock + " captures before lock");
}
final int NUM_CAPTURES_BEFORE_LOCK = 2;
SimpleCaptureCallback listener = new SimpleCaptureCallback();
CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock];
boolean canSetAeLock = mStaticInfo.isAeLockSupported();
// Reset the AE lock to OFF, since we are reusing this builder many times
if (canSetAeLock) {
requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
}
// Just send several captures with auto AE, lock off.
CaptureRequest request = requestBuilder.build();
for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) {
mSession.capture(request, listener, mHandler);
}
waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK);
if (!canSetAeLock) {
// Without AE lock, the remaining tests items won't work
return;
}
// Then fire several capture to lock the AE.
requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
int requestCount = captureRequestsSynchronized(requestBuilder.build(), numCapturesDuringLock, listener, mHandler);
int[] sensitivities = new int[numCapturesDuringLock];
long[] expTimes = new long[numCapturesDuringLock];
Arrays.fill(sensitivities, -1);
Arrays.fill(expTimes, -1L);
// Get the AE lock on result and validate the exposure values.
waitForNumResults(listener, requestCount - numCapturesDuringLock);
for (int i = 0; i < resultsDuringLock.length; i++) {
resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
}
for (int i = 0; i < numCapturesDuringLock; i++) {
mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true);
}
// Can't read manual sensor/exposure settings without manual sensor
if (mStaticInfo.isCapabilitySupported(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
int sensitivityLocked = getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY);
long expTimeLocked = getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME);
for (int i = 1; i < resultsDuringLock.length; i++) {
mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked);
mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked);
}
}
}
use of android.hardware.camera2.CaptureResult in project android_frameworks_base by AOSPA.
the class Camera2RecordingTest method validateFrameDropAroundVideoSnapshot.
/**
* Validate if video snapshot causes frame drop.
* Here frame drop is defined as frame duration >= 2 * expected frame duration.
* Return the estimated number of frames dropped during video snapshot
*/
private int validateFrameDropAroundVideoSnapshot(SimpleCaptureCallback resultListener, long imageTimeStamp) {
double expectedDurationMs = 1000.0 / mVideoFrameRate;
CaptureResult prevResult = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
long prevTS = getValueNotNull(prevResult, CaptureResult.SENSOR_TIMESTAMP);
while (!resultListener.hasMoreResults()) {
CaptureResult currentResult = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
long currentTS = getValueNotNull(currentResult, CaptureResult.SENSOR_TIMESTAMP);
if (currentTS == imageTimeStamp) {
// validate the timestamp before and after, then return
CaptureResult nextResult = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
long nextTS = getValueNotNull(nextResult, CaptureResult.SENSOR_TIMESTAMP);
double durationMs = (currentTS - prevTS) / 1000000.0;
int totalFramesDropped = 0;
// requirements for legacy mode unless this is fixed.
if (!mStaticInfo.isHardwareLevelLegacy()) {
mCollector.expectTrue(String.format("Video %dx%d Frame drop detected before video snapshot: " + "duration %.2fms (expected %.2fms)", mVideoSize.getWidth(), mVideoSize.getHeight(), durationMs, expectedDurationMs), durationMs <= (expectedDurationMs * MAX_NUM_FRAME_DROP_INTERVAL_ALLOWED));
// Log a warning is there is any frame drop detected.
if (durationMs >= expectedDurationMs * 2) {
Log.w(TAG, String.format("Video %dx%d Frame drop detected before video snapshot: " + "duration %.2fms (expected %.2fms)", mVideoSize.getWidth(), mVideoSize.getHeight(), durationMs, expectedDurationMs));
}
durationMs = (nextTS - currentTS) / 1000000.0;
mCollector.expectTrue(String.format("Video %dx%d Frame drop detected after video snapshot: " + "duration %.2fms (expected %.2fms)", mVideoSize.getWidth(), mVideoSize.getHeight(), durationMs, expectedDurationMs), durationMs <= (expectedDurationMs * MAX_NUM_FRAME_DROP_INTERVAL_ALLOWED));
// Log a warning is there is any frame drop detected.
if (durationMs >= expectedDurationMs * 2) {
Log.w(TAG, String.format("Video %dx%d Frame drop detected after video snapshot: " + "duration %fms (expected %fms)", mVideoSize.getWidth(), mVideoSize.getHeight(), durationMs, expectedDurationMs));
}
double totalDurationMs = (nextTS - prevTS) / 1000000.0;
// Minus 2 for the expected 2 frames interval
totalFramesDropped = (int) (totalDurationMs / expectedDurationMs) - 2;
if (totalFramesDropped < 0) {
Log.w(TAG, "totalFrameDropped is " + totalFramesDropped + ". Video frame rate might be too fast.");
}
totalFramesDropped = Math.max(0, totalFramesDropped);
}
return totalFramesDropped;
}
prevTS = currentTS;
}
throw new AssertionFailedError("Video snapshot timestamp does not match any of capture results!");
}
use of android.hardware.camera2.CaptureResult in project android_frameworks_base by AOSPA.
the class Camera2CaptureRequestTest method aeManualControlTest.
/**
* Iterate through exposure times and sensitivities for manual AE control.
* <p>
* Use single request rather than repeating request to test manual exposure
* value change per frame control.
* </p>
*/
private void aeManualControlTest() throws Exception {
CaptureRequest.Builder requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
configurePreviewOutput(requestBuilder);
SimpleCaptureCallback listener = new SimpleCaptureCallback();
long[] expTimes = getExposureTimeTestValues();
int[] sensitivities = getSensitivityTestValues();
// Submit single request at a time, then verify the result.
for (int i = 0; i < expTimes.length; i++) {
for (int j = 0; j < sensitivities.length; j++) {
if (VERBOSE) {
Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity " + sensitivities[j] + ", exposure time " + expTimes[i] + "ns");
}
changeExposure(requestBuilder, expTimes[i], sensitivities[j]);
mSession.capture(requestBuilder.build(), listener, mHandler);
// make sure timeout is long enough for long exposure time
long timeout = WAIT_FOR_RESULT_TIMEOUT_MS + expTimes[i];
CaptureResult result = listener.getCaptureResult(timeout);
long resultExpTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
validateExposureTime(expTimes[i], resultExpTime);
validateSensitivity(sensitivities[j], resultSensitivity);
validateFrameDurationForCapture(result);
}
}
// TODO: Add another case to test where we can submit all requests, then wait for
// results, which will hide the pipeline latency. this is not only faster, but also
// test high speed per frame control and synchronization.
}
Aggregations