use of android.view.Surface in project android_frameworks_base by DirtyUnicorns.
the class LegacyCameraDevice method getExtrasFromRequest.
private CaptureResultExtras getExtrasFromRequest(RequestHolder holder, int errorCode, Object errorArg) {
int errorStreamId = -1;
if (errorCode == CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_BUFFER) {
Surface errorTarget = (Surface) errorArg;
int indexOfTarget = mConfiguredSurfaces.indexOfValue(errorTarget);
if (indexOfTarget < 0) {
Log.e(TAG, "Buffer drop error reported for unknown Surface");
} else {
errorStreamId = mConfiguredSurfaces.keyAt(indexOfTarget);
}
}
if (holder == null) {
return new CaptureResultExtras(ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE);
}
return new CaptureResultExtras(holder.getRequestId(), holder.getSubsequeceId(), /*afTriggerId*/
0, /*precaptureTriggerId*/
0, holder.getFrameNumber(), /*partialResultCount*/
1, errorStreamId);
}
use of android.view.Surface in project android_frameworks_base by DirtyUnicorns.
the class LegacyCameraDevice method configureOutputs.
/**
* Configure the device with a set of output surfaces.
*
* <p>Using empty or {@code null} {@code outputs} is the same as unconfiguring.</p>
*
* <p>Every surface in {@code outputs} must be non-{@code null}.</p>
*
* @param outputs a list of surfaces to set. LegacyCameraDevice will take ownership of this
* list; it must not be modified by the caller once it's passed in.
* @return an error code for this binder operation, or {@link NO_ERROR}
* on success.
*/
public int configureOutputs(SparseArray<Surface> outputs) {
List<Pair<Surface, Size>> sizedSurfaces = new ArrayList<>();
if (outputs != null) {
int count = outputs.size();
for (int i = 0; i < count; i++) {
Surface output = outputs.valueAt(i);
if (output == null) {
Log.e(TAG, "configureOutputs - null outputs are not allowed");
return BAD_VALUE;
}
if (!output.isValid()) {
Log.e(TAG, "configureOutputs - invalid output surfaces are not allowed");
return BAD_VALUE;
}
StreamConfigurationMap streamConfigurations = mStaticCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Validate surface size and format.
try {
Size s = getSurfaceSize(output);
int surfaceType = detectSurfaceType(output);
boolean flexibleConsumer = isFlexibleConsumer(output);
Size[] sizes = streamConfigurations.getOutputSizes(surfaceType);
if (sizes == null) {
// WAR: Override default format to IMPLEMENTATION_DEFINED for b/9487482
if ((surfaceType >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 && surfaceType <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
// YUV_420_888 is always present in LEGACY for all
// IMPLEMENTATION_DEFINED output sizes, and is publicly visible in the
// API (i.e. {@code #getOutputSizes} works here).
sizes = streamConfigurations.getOutputSizes(ImageFormat.YUV_420_888);
} else if (surfaceType == LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB) {
sizes = streamConfigurations.getOutputSizes(ImageFormat.JPEG);
}
}
if (!ArrayUtils.contains(sizes, s)) {
if (flexibleConsumer && (s = findClosestSize(s, sizes)) != null) {
sizedSurfaces.add(new Pair<>(output, s));
} else {
String reason = (sizes == null) ? "format is invalid." : ("size not in valid set: " + Arrays.toString(sizes));
Log.e(TAG, String.format("Surface with size (w=%d, h=%d) and format " + "0x%x is not valid, %s", s.getWidth(), s.getHeight(), surfaceType, reason));
return BAD_VALUE;
}
} else {
sizedSurfaces.add(new Pair<>(output, s));
}
// Lock down the size before configuration
setSurfaceDimens(output, s.getWidth(), s.getHeight());
} catch (BufferQueueAbandonedException e) {
Log.e(TAG, "Surface bufferqueue is abandoned, cannot configure as output: ", e);
return BAD_VALUE;
}
}
}
boolean success = false;
if (mDeviceState.setConfiguring()) {
mRequestThreadManager.configure(sizedSurfaces);
success = mDeviceState.setIdle();
}
if (success) {
mConfiguredSurfaces = outputs;
} else {
return LegacyExceptionUtils.INVALID_OPERATION;
}
return LegacyExceptionUtils.NO_ERROR;
}
use of android.view.Surface in project android_frameworks_base by DirtyUnicorns.
the class CameraDeviceImpl method createCaptureSession.
@Override
public void createCaptureSession(List<Surface> outputs, CameraCaptureSession.StateCallback callback, Handler handler) throws CameraAccessException {
List<OutputConfiguration> outConfigurations = new ArrayList<>(outputs.size());
for (Surface surface : outputs) {
outConfigurations.add(new OutputConfiguration(surface));
}
createCaptureSessionInternal(null, outConfigurations, callback, handler, /*isConstrainedHighSpeed*/
false);
}
use of android.view.Surface in project android_frameworks_base by DirtyUnicorns.
the class Camera2SurfaceViewTestCase method prepareCaptureAndStartPreview.
/**
* Setup single capture configuration and start preview.
*
* @param previewRequest The capture request to be used for preview
* @param stillRequest The capture request to be used for still capture
* @param previewSz Preview size
* @param captureSz Still capture size
* @param format The single capture image format
* @param resultListener Capture result listener
* @param maxNumImages The max number of images set to the image reader
* @param imageListener The single capture capture image listener
*/
protected void prepareCaptureAndStartPreview(CaptureRequest.Builder previewRequest, CaptureRequest.Builder stillRequest, Size previewSz, Size captureSz, int format, CaptureCallback resultListener, int maxNumImages, ImageReader.OnImageAvailableListener imageListener) throws Exception {
if (VERBOSE) {
Log.v(TAG, String.format("Prepare single capture (%s) and preview (%s)", captureSz.toString(), previewSz.toString()));
}
// Update preview size.
updatePreviewSurface(previewSz);
// Create ImageReader.
createImageReader(captureSz, format, maxNumImages, imageListener);
// Configure output streams with preview and jpeg streams.
List<Surface> outputSurfaces = new ArrayList<Surface>();
outputSurfaces.add(mPreviewSurface);
outputSurfaces.add(mReaderSurface);
mSessionListener = new BlockingSessionCallback();
mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);
// Configure the requests.
previewRequest.addTarget(mPreviewSurface);
stillRequest.addTarget(mPreviewSurface);
stillRequest.addTarget(mReaderSurface);
// Start preview.
mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
}
use of android.view.Surface in project android_frameworks_base by DirtyUnicorns.
the class Camera2ReprocessCaptureTest method doMixedReprocessBurstCapture.
/**
* Do a burst of captures that are mixed with regular and reprocess captures.
*
* @param isReprocessCaptures An array whose elements indicate whether it's a reprocess capture
* request. If the element is true, it represents a reprocess capture
* request. If the element is false, it represents a regular capture
* request. The size of the array is the number of capture requests
* in the burst.
*/
private ImageResultHolder[] doMixedReprocessBurstCapture(boolean[] isReprocessCaptures) throws Exception {
if (isReprocessCaptures == null || isReprocessCaptures.length <= 0) {
throw new IllegalArgumentException("isReprocessCaptures must have at least 1 capture.");
}
boolean hasReprocessRequest = false;
boolean hasRegularRequest = false;
TotalCaptureResult[] results = new TotalCaptureResult[isReprocessCaptures.length];
for (int i = 0; i < isReprocessCaptures.length; i++) {
// submit a capture and get the result if this entry is a reprocess capture.
if (isReprocessCaptures[i]) {
results[i] = submitCaptureRequest(mFirstImageReader.getSurface(), /*inputResult*/
null);
mImageWriter.queueInputImage(mFirstImageReaderListener.getImage(CAPTURE_TIMEOUT_MS));
hasReprocessRequest = true;
} else {
hasRegularRequest = true;
}
}
Surface[] outputSurfaces = new Surface[isReprocessCaptures.length];
for (int i = 0; i < isReprocessCaptures.length; i++) {
outputSurfaces[i] = getReprocessOutputImageReader().getSurface();
}
TotalCaptureResult[] finalResults = submitMixedCaptureBurstRequest(outputSurfaces, results);
ImageResultHolder[] holders = new ImageResultHolder[isReprocessCaptures.length];
for (int i = 0; i < isReprocessCaptures.length; i++) {
Image image = getReprocessOutputImageReaderListener().getImage(CAPTURE_TIMEOUT_MS);
if (hasReprocessRequest && hasRegularRequest) {
// If there are mixed requests, images and results may not be in the same order.
for (int j = 0; j < finalResults.length; j++) {
if (finalResults[j] != null && finalResults[j].get(CaptureResult.SENSOR_TIMESTAMP) == image.getTimestamp()) {
holders[i] = new ImageResultHolder(image, finalResults[j]);
finalResults[j] = null;
break;
}
}
assertNotNull("Cannot find a result matching output image's timestamp: " + image.getTimestamp(), holders[i]);
} else {
// If no mixed requests, images and results should be in the same order.
holders[i] = new ImageResultHolder(image, finalResults[i]);
}
}
return holders;
}
Aggregations