use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by AOSPA.
the class CameraConstrainedHighSpeedCaptureSessionImpl method createHighSpeedRequestList.
@Override
public List<CaptureRequest> createHighSpeedRequestList(CaptureRequest request) throws CameraAccessException {
if (request == null) {
throw new IllegalArgumentException("Input capture request must not be null");
}
Collection<Surface> outputSurfaces = request.getTargets();
Range<Integer> fpsRange = request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
StreamConfigurationMap config = mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputSurfaces, fpsRange, config);
// Request list size: to limit the preview to 30fps, need use maxFps/30; to maximize
// the preview frame rate, should use maxBatch size for that high speed stream
// configuration. We choose the former for now.
int requestListSize = fpsRange.getUpper() / 30;
List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
// Prepare the Request builders: need carry over the request controls.
// First, create a request builder that will only include preview or recording target.
CameraMetadataNative requestMetadata = new CameraMetadataNative(request.getNativeCopy());
// Note that after this step, the requestMetadata is mutated (swapped) and can not be used
// for next request builder creation.
CaptureRequest.Builder singleTargetRequestBuilder = new CaptureRequest.Builder(requestMetadata, /*reprocess*/
false, CameraCaptureSession.SESSION_ID_NONE);
// Overwrite the capture intent to make sure a good value is set.
Iterator<Surface> iterator = outputSurfaces.iterator();
Surface firstSurface = iterator.next();
Surface secondSurface = null;
if (outputSurfaces.size() == 1 && SurfaceUtils.isSurfaceForHwVideoEncoder(firstSurface)) {
singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
} else {
// Video only, or preview + video
singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
}
singleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/
true);
// Second, Create a request builder that will include both preview and recording targets.
CaptureRequest.Builder doubleTargetRequestBuilder = null;
if (outputSurfaces.size() == 2) {
// Have to create a new copy, the original one was mutated after a new
// CaptureRequest.Builder creation.
requestMetadata = new CameraMetadataNative(request.getNativeCopy());
doubleTargetRequestBuilder = new CaptureRequest.Builder(requestMetadata, /*reprocess*/
false, CameraCaptureSession.SESSION_ID_NONE);
doubleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
doubleTargetRequestBuilder.addTarget(firstSurface);
secondSurface = iterator.next();
doubleTargetRequestBuilder.addTarget(secondSurface);
doubleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/
true);
// Make sure singleTargetRequestBuilder contains only recording surface for
// preview + recording case.
Surface recordingSurface = firstSurface;
if (!SurfaceUtils.isSurfaceForHwVideoEncoder(recordingSurface)) {
recordingSurface = secondSurface;
}
singleTargetRequestBuilder.addTarget(recordingSurface);
} else {
// Single output case: either recording or preview.
singleTargetRequestBuilder.addTarget(firstSurface);
}
// Generate the final request list.
for (int i = 0; i < requestListSize; i++) {
if (i == 0 && doubleTargetRequestBuilder != null) {
// First request should be recording + preview request
requestList.add(doubleTargetRequestBuilder.build());
} else {
requestList.add(singleTargetRequestBuilder.build());
}
}
return Collections.unmodifiableList(requestList);
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by AOSPA.
the class CameraManager method getOrCreateDeviceIdListLocked.
/**
* Return or create the list of currently connected camera devices.
*
* <p>In case of errors connecting to the camera service, will return an empty list.</p>
*/
private ArrayList<String> getOrCreateDeviceIdListLocked() throws CameraAccessException {
if (mDeviceIdList == null) {
int numCameras = 0;
ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
ArrayList<String> deviceIdList = new ArrayList<>();
// If no camera service, then no devices
if (cameraService == null) {
return deviceIdList;
}
try {
numCameras = cameraService.getNumberOfCameras(CAMERA_TYPE_ALL);
/* Force to expose only two cameras
* if the package name does not falls in this bucket
*/
boolean exposeAuxCamera = false;
String packageName = ActivityThread.currentOpPackageName();
String packageList = SystemProperties.get("camera.aux.packagelist");
if (packageList.length() > 0) {
TextUtils.StringSplitter splitter = new TextUtils.SimpleStringSplitter(',');
splitter.setString(packageList);
for (String str : splitter) {
if (packageName.equals(str)) {
exposeAuxCamera = true;
break;
}
}
}
if (exposeAuxCamera == false && (numCameras > 2)) {
numCameras = 2;
}
} catch (ServiceSpecificException e) {
throwAsPublicException(e);
} catch (RemoteException e) {
// camera service just died - if no camera service, then no devices
return deviceIdList;
}
for (int i = 0; i < numCameras; ++i) {
// Non-removable cameras use integers starting at 0 for their
// identifiers
boolean isDeviceSupported = false;
try {
CameraMetadataNative info = cameraService.getCameraCharacteristics(i);
if (!info.isEmpty()) {
isDeviceSupported = true;
} else {
throw new AssertionError("Expected to get non-empty characteristics");
}
} catch (ServiceSpecificException e) {
// propagate exception onward
if (e.errorCode != ICameraService.ERROR_DISCONNECTED || e.errorCode != ICameraService.ERROR_ILLEGAL_ARGUMENT) {
throwAsPublicException(e);
}
} catch (RemoteException e) {
// Camera service died - no devices to list
deviceIdList.clear();
return deviceIdList;
}
if (isDeviceSupported) {
deviceIdList.add(String.valueOf(i));
} else {
Log.w(TAG, "Error querying camera device " + i + " for listing.");
}
}
mDeviceIdList = deviceIdList;
}
return mDeviceIdList;
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by AOSPA.
the class CameraDeviceUserShim method createDefaultRequest.
@Override
public CameraMetadataNative createDefaultRequest(int templateId) {
if (DEBUG) {
Log.d(TAG, "createDefaultRequest called.");
}
if (mLegacyDevice.isClosed()) {
String err = "Cannot create default request, device has been closed.";
Log.e(TAG, err);
throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
}
CameraMetadataNative template;
try {
template = LegacyMetadataMapper.createRequestTemplate(mCameraCharacteristics, templateId);
} catch (IllegalArgumentException e) {
String err = "createDefaultRequest - invalid templateId specified";
Log.e(TAG, err);
throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
}
return template;
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by ResurrectionRemix.
the class LegacyMetadataMapper method mapScalerStreamConfigs.
private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
/*
* Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
* YUV_420_888 cpu callbacks -> use camera1 preview sizes
* Other preview callbacks (CPU) -> use camera1 preview sizes
* JPEG still capture -> use camera1 still capture sizes
*
* Use platform-internal format constants here, since StreamConfigurationMap does the
* remapping to public format constants.
*/
List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
/*
* Work-around for b/17589233:
* - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
* - This causes a large amount of problems with focus/metering because it's relative to
* preview, making the difference between the JPEG and preview viewport inaccessible
* - This boils down to metering or focusing areas being "arbitrarily" cropped
* in the capture result.
* - Work-around the HAL limitations by removing all of the largest preview sizes
* until we get one with the same aspect ratio as the jpeg size.
*/
{
SizeAreaComparator areaComparator = new SizeAreaComparator();
// Sort preview to min->max
Collections.sort(previewSizes, areaComparator);
Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f", maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
}
// Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
while (!previewSizes.isEmpty()) {
// max is always at the end
int index = previewSizes.size() - 1;
Camera.Size size = previewSizes.get(index);
float previewAspectRatio = size.width * 1.0f / size.height;
if (Math.abs(jpegAspectRatio - previewAspectRatio) >= PREVIEW_ASPECT_RATIO_TOLERANCE) {
// Assume removing from end is O(1)
previewSizes.remove(index);
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - removed preview size %dx%d, AR=%f " + "was not the same", size.width, size.height, previewAspectRatio));
}
} else {
break;
}
}
if (previewSizes.isEmpty()) {
// Fall-back to the original faulty behavior, but at least work
Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " + "JPEG aspect ratio " + jpegAspectRatio);
previewSizes = p.getSupportedPreviewSizes();
}
// Sort again, this time in descending order max->min
Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
appendStreamConfig(availableStreamConfigs, ImageFormat.YUV_420_888, previewSizes);
for (int format : p.getSupportedPreviewFormats()) {
if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
appendStreamConfig(availableStreamConfigs, format, previewSizes);
} else if (DEBUG) {
/*
* Do not add any formats unknown to us
* (since it would fail runtime checks in StreamConfigurationMap)
*/
Log.v(TAG, String.format("mapStreamConfigs - Skipping format %x", format));
}
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
/*
* scaler.availableStreamConfigurations
*/
m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS, availableStreamConfigs.toArray(new StreamConfiguration[0]));
/*
* scaler.availableMinFrameDurations
*/
// No frame durations available
m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
StreamConfigurationDuration[] jpegStalls = new StreamConfigurationDuration[jpegSizes.size()];
int i = 0;
long longestStallDuration = -1;
for (Camera.Size s : jpegSizes) {
long stallDuration = calculateJpegStallDuration(s);
jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width, s.height, stallDuration);
if (longestStallDuration < stallDuration) {
longestStallDuration = stallDuration;
}
}
/*
* scaler.availableStallDurations
*/
// Set stall durations for jpeg, other formats use default stall duration
m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
/*
* sensor.info.maxFrameDuration
*/
m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by ResurrectionRemix.
the class LegacyMetadataMapper method createCharacteristics.
/**
* Create characteristics for a legacy device by mapping the {@code parameters}
* and {@code info}
*
* @param parameters A string parseable by {@link Camera.Parameters#unflatten}
* @param info Camera info with camera facing direction and angle of orientation
* @return static camera characteristics for a camera device
*
* @throws NullPointerException if any of the args were {@code null}
*/
public static CameraCharacteristics createCharacteristics(String parameters, android.hardware.CameraInfo info) {
checkNotNull(parameters, "parameters must not be null");
checkNotNull(info, "info must not be null");
checkNotNull(info.info, "info.info must not be null");
CameraMetadataNative m = new CameraMetadataNative();
mapCharacteristicsFromInfo(m, info.info);
Camera.Parameters params = Camera.getEmptyParameters();
params.unflatten(parameters);
mapCharacteristicsFromParameters(m, params);
if (DEBUG) {
Log.v(TAG, "createCharacteristics metadata:");
Log.v(TAG, "--------------------------------------------------- (start)");
m.dumpToLog();
Log.v(TAG, "--------------------------------------------------- (end)");
}
return new CameraCharacteristics(m);
}
Aggregations