use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project platform_frameworks_base by android.
the class Camera2ReprocessCaptureTest method submitMixedCaptureBurstRequest.
/**
* Submit a burst request mixed with regular and reprocess requests.
*
* @param outputs An array of output surfaces. One output surface will be used in one request
* so the length of the array is the number of requests in a burst request.
* @param inputResults An array of input results. If it's null, all requests are regular
* requests. If an element is null, that element represents a regular
* request. If an element if not null, that element represents a reprocess
* request.
*
*/
private TotalCaptureResult[] submitMixedCaptureBurstRequest(Surface[] outputs, TotalCaptureResult[] inputResults) throws Exception {
if (outputs == null || outputs.length <= 0) {
throw new IllegalArgumentException("outputs must have at least 1 surface");
} else if (inputResults != null && inputResults.length != outputs.length) {
throw new IllegalArgumentException("The lengths of outputs and inputResults " + "don't match");
}
int numReprocessCaptures = 0;
SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
ArrayList<CaptureRequest> captureRequests = new ArrayList<>(outputs.length);
// is based on inputResults array.
for (int i = 0; i < outputs.length; i++) {
CaptureRequest.Builder builder;
boolean isReprocess = (inputResults != null && inputResults[i] != null);
if (isReprocess) {
builder = mCamera.createReprocessCaptureRequest(inputResults[i]);
numReprocessCaptures++;
} else {
builder = mCamera.createCaptureRequest(CAPTURE_TEMPLATE);
}
builder.addTarget(outputs[i]);
CaptureRequest request = builder.build();
assertTrue("Capture request reprocess type " + request.isReprocess() + " is wrong.", request.isReprocess() == isReprocess);
captureRequests.add(request);
}
if (captureRequests.size() == 1) {
mSession.capture(captureRequests.get(0), captureCallback, mHandler);
} else {
mSession.captureBurst(captureRequests, captureCallback, mHandler);
}
TotalCaptureResult[] results;
if (numReprocessCaptures == 0 || numReprocessCaptures == outputs.length) {
results = new TotalCaptureResult[outputs.length];
// If the requests are not mixed, they should come in order.
for (int i = 0; i < results.length; i++) {
results[i] = captureCallback.getTotalCaptureResultForRequest(captureRequests.get(i), CAPTURE_TIMEOUT_FRAMES);
}
} else {
// If the requests are mixed, they may not come in order.
results = captureCallback.getTotalCaptureResultsForRequests(captureRequests, CAPTURE_TIMEOUT_FRAMES * captureRequests.size());
}
// make sure all input surfaces are released.
for (int i = 0; i < numReprocessCaptures; i++) {
mImageWriterListener.waitForImageReleased(CAPTURE_TIMEOUT_MS);
}
return results;
}
use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project platform_frameworks_base by android.
the class Camera2StillCaptureTest method takePictureTestByCamera.
/**
* Take a picture for a given set of 3A regions for a particular camera.
* <p>
* Before take a still capture, it triggers an auto focus and lock it first,
* then wait for AWB to converge and lock it, then trigger a precapture
* metering sequence and wait for AE converged. After capture is received, the
* capture result and image are validated. If {@code addAeTriggerCancel} is true,
* a precapture trigger cancel will be inserted between two adjacent triggers, which
* should effective cancel the first trigger.
* </p>
*
* @param aeRegions AE regions for this capture
* @param awbRegions AWB regions for this capture
* @param afRegions AF regions for this capture
* @param addAeTriggerCancel If a AE precapture trigger cancel is sent after the trigger.
*/
private void takePictureTestByCamera(MeteringRectangle[] aeRegions, MeteringRectangle[] awbRegions, MeteringRectangle[] afRegions, boolean addAeTriggerCancel) throws Exception {
boolean hasFocuser = mStaticInfo.hasFocuser();
Size maxStillSz = mOrderedStillSizes.get(0);
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
CaptureResult result;
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
CaptureRequest.Builder previewRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder stillRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
prepareStillCaptureAndStartPreview(previewRequest, stillRequest, maxPreviewSz, maxStillSz, resultListener, imageListener);
// Set AE mode to ON_AUTO_FLASH if flash is available.
if (mStaticInfo.hasFlash()) {
previewRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
stillRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
Camera2Focuser focuser = null;
/**
* Step 1: trigger an auto focus run, and wait for AF locked.
*/
boolean canSetAfRegion = hasFocuser && (afRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AF_INDEX);
if (hasFocuser) {
SimpleAutoFocusListener afListener = new SimpleAutoFocusListener();
focuser = new Camera2Focuser(mCamera, mSession, mPreviewSurface, afListener, mStaticInfo.getCharacteristics(), mHandler);
if (canSetAfRegion) {
stillRequest.set(CaptureRequest.CONTROL_AF_REGIONS, afRegions);
}
focuser.startAutoFocus(afRegions);
afListener.waitForAutoFocusDone(WAIT_FOR_FOCUS_DONE_TIMEOUT_MS);
}
/**
* Have to get the current AF mode to be used for other 3A repeating
* request, otherwise, the new AF mode in AE/AWB request could be
* different with existing repeating requests being sent by focuser,
* then it could make AF unlocked too early. Beside that, for still
* capture, AF mode must not be different with the one in current
* repeating request, otherwise, the still capture itself would trigger
* an AF mode change, and the AF lock would be lost for this capture.
*/
int currentAfMode = CaptureRequest.CONTROL_AF_MODE_OFF;
if (hasFocuser) {
currentAfMode = focuser.getCurrentAfMode();
}
previewRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
stillRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
/**
* Step 2: AF is already locked, wait for AWB converged, then lock it.
*/
resultListener = new SimpleCaptureCallback();
boolean canSetAwbRegion = (awbRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AWB_INDEX);
if (canSetAwbRegion) {
previewRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
stillRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
waitForResultValue(resultListener, CaptureResult.CONTROL_AWB_STATE, CaptureResult.CONTROL_AWB_STATE_CONVERGED, NUM_RESULTS_WAIT_TIMEOUT);
} else {
// LEGACY Devices don't have the AWB_STATE reported in results, so just wait
waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
}
boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
if (canSetAwbLock) {
previewRequest.set(CaptureRequest.CONTROL_AWB_LOCK, true);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AWB mode in result and request should be same", previewRequest.get(CaptureRequest.CONTROL_AWB_MODE), result.get(CaptureResult.CONTROL_AWB_MODE));
if (canSetAwbRegion) {
MeteringRectangle[] resultAwbRegions = getValueNotNull(result, CaptureResult.CONTROL_AWB_REGIONS);
mCollector.expectEquals("AWB regions in result and request should be same", awbRegions, resultAwbRegions);
}
/**
* Step 3: trigger an AE precapture metering sequence and wait for AE converged.
*/
resultListener = new SimpleCaptureCallback();
boolean canSetAeRegion = (aeRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AE_INDEX);
if (canSetAeRegion) {
previewRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
stillRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
}
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mSession.capture(previewRequest.build(), resultListener, mHandler);
if (addAeTriggerCancel) {
// Cancel the current precapture trigger, then send another trigger.
// The camera device should behave as if the first trigger is not sent.
// Wait one request to make the trigger start doing something before cancel.
waitForNumResults(resultListener, /*numResultsWait*/
1);
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL);
mSession.capture(previewRequest.build(), resultListener, mHandler);
waitForResultValue(resultListener, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
// Issue another trigger
previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
mSession.capture(previewRequest.build(), resultListener, mHandler);
}
waitForAeStable(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AE mode in result and request should be same", previewRequest.get(CaptureRequest.CONTROL_AE_MODE), result.get(CaptureResult.CONTROL_AE_MODE));
if (canSetAeRegion) {
MeteringRectangle[] resultAeRegions = getValueNotNull(result, CaptureResult.CONTROL_AE_REGIONS);
mCollector.expectMeteringRegionsAreSimilar("AE regions in result and request should be similar", aeRegions, resultAeRegions, METERING_REGION_ERROR_PERCENT_DELTA);
}
/**
* Step 4: take a picture when all 3A are in good state.
*/
resultListener = new SimpleCaptureCallback();
CaptureRequest request = stillRequest.build();
mSession.capture(request, resultListener, mHandler);
// Validate the next result immediately for region and mode.
result = resultListener.getCaptureResultForRequest(request, WAIT_FOR_RESULT_TIMEOUT_MS);
mCollector.expectEquals("AF mode in result and request should be same", stillRequest.get(CaptureRequest.CONTROL_AF_MODE), result.get(CaptureResult.CONTROL_AF_MODE));
if (canSetAfRegion) {
MeteringRectangle[] resultAfRegions = getValueNotNull(result, CaptureResult.CONTROL_AF_REGIONS);
mCollector.expectMeteringRegionsAreSimilar("AF regions in result and request should be similar", afRegions, resultAfRegions, METERING_REGION_ERROR_PERCENT_DELTA);
}
if (hasFocuser) {
// Unlock auto focus.
focuser.cancelAutoFocus();
}
// validate image
Image image = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
validateJpegCapture(image, maxStillSz);
// Free image resources
image.close();
stopPreview();
}
use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project platform_frameworks_base by android.
the class Camera2StillCaptureTest method fullRawCaptureTestByCamera.
private void fullRawCaptureTestByCamera() throws Exception {
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
Size maxStillSz = mOrderedStillSizes.get(0);
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
SimpleImageReaderListener jpegListener = new SimpleImageReaderListener();
SimpleImageReaderListener rawListener = new SimpleImageReaderListener();
Size size = mStaticInfo.getRawDimensChecked();
if (VERBOSE) {
Log.v(TAG, "Testing multi capture with size " + size.toString() + ", preview size " + maxPreviewSz);
}
// Prepare raw capture and start preview.
CaptureRequest.Builder previewBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder multiBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
ImageReader rawReader = null;
ImageReader jpegReader = null;
try {
// Create ImageReaders.
rawReader = makeImageReader(size, ImageFormat.RAW_SENSOR, MAX_READER_IMAGES, rawListener, mHandler);
jpegReader = makeImageReader(maxStillSz, ImageFormat.JPEG, MAX_READER_IMAGES, jpegListener, mHandler);
updatePreviewSurface(maxPreviewSz);
// Configure output streams with preview and jpeg streams.
List<Surface> outputSurfaces = new ArrayList<Surface>();
outputSurfaces.add(rawReader.getSurface());
outputSurfaces.add(jpegReader.getSurface());
outputSurfaces.add(mPreviewSurface);
mSessionListener = new BlockingSessionCallback();
mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);
// Configure the requests.
previewBuilder.addTarget(mPreviewSurface);
multiBuilder.addTarget(mPreviewSurface);
multiBuilder.addTarget(rawReader.getSurface());
multiBuilder.addTarget(jpegReader.getSurface());
// Start preview.
mSession.setRepeatingRequest(previewBuilder.build(), null, mHandler);
// Poor man's 3A, wait 2 seconds for AE/AF (if any) to settle.
// TODO: Do proper 3A trigger and lock (see testTakePictureTest).
Thread.sleep(3000);
multiBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
CaptureRequest multiRequest = multiBuilder.build();
mSession.capture(multiRequest, resultListener, mHandler);
CaptureResult result = resultListener.getCaptureResultForRequest(multiRequest, NUM_RESULTS_WAIT_TIMEOUT);
Image jpegImage = jpegListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
basicValidateJpegImage(jpegImage, maxStillSz);
Image rawImage = rawListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
validateRaw16Image(rawImage, size);
verifyRawCaptureResult(multiRequest, result);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try (DngCreator dngCreator = new DngCreator(mStaticInfo.getCharacteristics(), result)) {
dngCreator.writeImage(outputStream, rawImage);
}
if (DEBUG) {
byte[] rawBuffer = outputStream.toByteArray();
String rawFileName = DEBUG_FILE_NAME_BASE + "/raw16_" + TAG + size.toString() + "_cam_" + mCamera.getId() + ".dng";
Log.d(TAG, "Dump raw file into " + rawFileName);
dumpFile(rawFileName, rawBuffer);
byte[] jpegBuffer = getDataFromImage(jpegImage);
String jpegFileName = DEBUG_FILE_NAME_BASE + "/jpeg_" + TAG + size.toString() + "_cam_" + mCamera.getId() + ".jpg";
Log.d(TAG, "Dump jpeg file into " + rawFileName);
dumpFile(jpegFileName, jpegBuffer);
}
stopPreview();
} finally {
CameraTestUtils.closeImageReader(rawReader);
CameraTestUtils.closeImageReader(jpegReader);
rawReader = null;
jpegReader = null;
}
}
use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project android_frameworks_base by DirtyUnicorns.
the class Camera2CaptureRequestTest method aeManualControlTest.
/**
* Iterate through exposure times and sensitivities for manual AE control.
* <p>
* Use single request rather than repeating request to test manual exposure
* value change per frame control.
* </p>
*/
private void aeManualControlTest() throws Exception {
CaptureRequest.Builder requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF);
configurePreviewOutput(requestBuilder);
SimpleCaptureCallback listener = new SimpleCaptureCallback();
long[] expTimes = getExposureTimeTestValues();
int[] sensitivities = getSensitivityTestValues();
// Submit single request at a time, then verify the result.
for (int i = 0; i < expTimes.length; i++) {
for (int j = 0; j < sensitivities.length; j++) {
if (VERBOSE) {
Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity " + sensitivities[j] + ", exposure time " + expTimes[i] + "ns");
}
changeExposure(requestBuilder, expTimes[i], sensitivities[j]);
mSession.capture(requestBuilder.build(), listener, mHandler);
// make sure timeout is long enough for long exposure time
long timeout = WAIT_FOR_RESULT_TIMEOUT_MS + expTimes[i];
CaptureResult result = listener.getCaptureResult(timeout);
long resultExpTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
validateExposureTime(expTimes[i], resultExpTime);
validateSensitivity(sensitivities[j], resultSensitivity);
validateFrameDurationForCapture(result);
}
}
// TODO: Add another case to test where we can submit all requests, then wait for
// results, which will hide the pipeline latency. this is not only faster, but also
// test high speed per frame control and synchronization.
}
use of com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback in project android_frameworks_base by AOSPA.
the class Camera2ReprocessCaptureTest method testReprocessTimestamps.
/**
* Test timestamps for reprocess requests. Reprocess request's shutter timestamp, result's
* sensor timestamp, and output image's timestamp should match the reprocess input's timestamp.
*/
private void testReprocessTimestamps(String cameraId, Size inputSize, int inputFormat, Size reprocessOutputSize, int reprocessOutputFormat) throws Exception {
if (VERBOSE) {
Log.v(TAG, "testReprocessTimestamps: cameraId: " + cameraId + " inputSize: " + inputSize + " inputFormat: " + inputFormat + " reprocessOutputSize: " + reprocessOutputSize + " reprocessOutputFormat: " + reprocessOutputFormat);
}
try {
setupImageReaders(inputSize, inputFormat, reprocessOutputSize, reprocessOutputFormat, NUM_REPROCESS_CAPTURES);
setupReprocessableSession(/*previewSurface*/
null, NUM_REPROCESS_CAPTURES);
// Prepare reprocess capture requests.
ArrayList<CaptureRequest> reprocessRequests = new ArrayList<>(NUM_REPROCESS_CAPTURES);
ArrayList<Long> expectedTimestamps = new ArrayList<>(NUM_REPROCESS_CAPTURES);
for (int i = 0; i < NUM_REPROCESS_CAPTURES; i++) {
TotalCaptureResult result = submitCaptureRequest(mFirstImageReader.getSurface(), /*inputResult*/
null);
mImageWriter.queueInputImage(mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
CaptureRequest.Builder builder = mCamera.createReprocessCaptureRequest(result);
builder.addTarget(getReprocessOutputImageReader().getSurface());
reprocessRequests.add(builder.build());
// Reprocess result's timestamp should match input image's timestamp.
expectedTimestamps.add(result.get(CaptureResult.SENSOR_TIMESTAMP));
}
// Submit reprocess requests.
SimpleCaptureCallback captureCallback = new SimpleCaptureCallback();
mSession.captureBurst(reprocessRequests, captureCallback, mHandler);
// Verify we get the expected timestamps.
for (int i = 0; i < reprocessRequests.size(); i++) {
captureCallback.waitForCaptureStart(reprocessRequests.get(i), expectedTimestamps.get(i), CAPTURE_TIMEOUT_FRAMES);
}
TotalCaptureResult[] reprocessResults = captureCallback.getTotalCaptureResultsForRequests(reprocessRequests, CAPTURE_TIMEOUT_FRAMES);
for (int i = 0; i < expectedTimestamps.size(); i++) {
// Verify the result timestamps match the input image's timestamps.
long expected = expectedTimestamps.get(i);
long timestamp = reprocessResults[i].get(CaptureResult.SENSOR_TIMESTAMP);
assertEquals("Reprocess result timestamp (" + timestamp + ") doesn't match input " + "image's timestamp (" + expected + ")", expected, timestamp);
// Verify the reprocess output image timestamps match the input image's timestamps.
Image image = getReprocessOutputImageReaderListener().getImage(CAPTURE_TIMEOUT_MS);
timestamp = image.getTimestamp();
image.close();
assertEquals("Reprocess output timestamp (" + timestamp + ") doesn't match input " + "image's timestamp (" + expected + ")", expected, timestamp);
}
// Make sure all input surfaces are released.
for (int i = 0; i < NUM_REPROCESS_CAPTURES; i++) {
mImageWriterListener.waitForImageReleased(CAPTURE_TIMEOUT_MS);
}
} finally {
closeReprossibleSession();
closeImageReaders();
}
}
Aggregations