use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by ResurrectionRemix.
the class LegacyRequestMapper method convertRequestMetadata.
/**
* Set the legacy parameters using the {@link LegacyRequest legacy request}.
*
* <p>The legacy request's parameters are changed as a side effect of calling this
* method.</p>
*
* @param legacyRequest a non-{@code null} legacy request
*/
public static void convertRequestMetadata(LegacyRequest legacyRequest) {
CameraCharacteristics characteristics = legacyRequest.characteristics;
CaptureRequest request = legacyRequest.captureRequest;
Size previewSize = legacyRequest.previewSize;
Camera.Parameters params = legacyRequest.parameters;
Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
/*
* scaler.cropRegion
*/
ParameterUtils.ZoomData zoomData;
{
zoomData = ParameterUtils.convertScalerCropRegion(activeArray, request.get(SCALER_CROP_REGION), previewSize, params);
if (params.isZoomSupported()) {
params.setZoom(zoomData.zoomIndex);
} else if (DEBUG) {
Log.v(TAG, "convertRequestToMetadata - zoom is not supported");
}
}
/*
* colorCorrection.*
*/
// colorCorrection.aberrationMode
{
int aberrationMode = ParamsUtils.getOrDefault(request, COLOR_CORRECTION_ABERRATION_MODE, /*defaultValue*/
COLOR_CORRECTION_ABERRATION_MODE_FAST);
if (aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_FAST && aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " + "colorCorrection.aberrationMode = " + aberrationMode);
}
}
/*
* control.ae*
*/
// control.aeAntibandingMode
{
String legacyMode;
Integer antiBandingMode = request.get(CONTROL_AE_ANTIBANDING_MODE);
if (antiBandingMode != null) {
legacyMode = convertAeAntiBandingModeToLegacy(antiBandingMode);
} else {
legacyMode = ListUtils.listSelectFirstFrom(params.getSupportedAntibanding(), new String[] { Parameters.ANTIBANDING_AUTO, Parameters.ANTIBANDING_OFF, Parameters.ANTIBANDING_50HZ, Parameters.ANTIBANDING_60HZ });
}
if (legacyMode != null) {
params.setAntibanding(legacyMode);
}
}
/*
* control.aeRegions, afRegions
*/
{
// aeRegions
{
// Use aeRegions if available, fall back to using awbRegions if present
MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
if (request.get(CONTROL_AWB_REGIONS) != null) {
Log.w(TAG, "convertRequestMetadata - control.awbRegions setting is not " + "supported, ignoring value");
}
int maxNumMeteringAreas = params.getMaxNumMeteringAreas();
List<Camera.Area> meteringAreaList = convertMeteringRegionsToLegacy(activeArray, zoomData, aeRegions, maxNumMeteringAreas, /*regionName*/
"AE");
// WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
if (maxNumMeteringAreas > 0) {
params.setMeteringAreas(meteringAreaList);
}
}
// afRegions
{
MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
int maxNumFocusAreas = params.getMaxNumFocusAreas();
List<Camera.Area> focusAreaList = convertMeteringRegionsToLegacy(activeArray, zoomData, afRegions, maxNumFocusAreas, /*regionName*/
"AF");
// WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
if (maxNumFocusAreas > 0) {
params.setFocusAreas(focusAreaList);
}
}
}
// control.aeTargetFpsRange
Range<Integer> aeFpsRange = request.get(CONTROL_AE_TARGET_FPS_RANGE);
if (aeFpsRange != null) {
int[] legacyFps = convertAeFpsRangeToLegacy(aeFpsRange);
int[] rangeToApply = null;
for (int[] range : params.getSupportedPreviewFpsRange()) {
// Round range up/down to integer FPS value
int intRangeLow = (int) Math.floor(range[0] / 1000.0) * 1000;
int intRangeHigh = (int) Math.ceil(range[1] / 1000.0) * 1000;
if (legacyFps[0] == intRangeLow && legacyFps[1] == intRangeHigh) {
rangeToApply = range;
break;
}
}
if (rangeToApply != null) {
params.setPreviewFpsRange(rangeToApply[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], rangeToApply[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
} else {
Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
}
}
/*
* control
*/
// control.aeExposureCompensation
{
Range<Integer> compensationRange = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
int compensation = ParamsUtils.getOrDefault(request, CONTROL_AE_EXPOSURE_COMPENSATION, /*defaultValue*/
0);
if (!compensationRange.contains(compensation)) {
Log.w(TAG, "convertRequestMetadata - control.aeExposureCompensation " + "is out of range, ignoring value");
compensation = 0;
}
params.setExposureCompensation(compensation);
}
// control.aeLock
{
Boolean aeLock = getIfSupported(request, CONTROL_AE_LOCK, /*defaultValue*/
false, params.isAutoExposureLockSupported(), /*allowedValue*/
false);
if (aeLock != null) {
params.setAutoExposureLock(aeLock);
}
if (DEBUG) {
Log.v(TAG, "convertRequestToMetadata - control.aeLock set to " + aeLock);
}
// TODO: Don't add control.aeLock to availableRequestKeys if it's not supported
}
// control.aeMode, flash.mode
mapAeAndFlashMode(request, /*out*/
params);
// control.afMode
{
int afMode = ParamsUtils.getOrDefault(request, CONTROL_AF_MODE, /*defaultValue*/
CONTROL_AF_MODE_OFF);
String focusMode = LegacyMetadataMapper.convertAfModeToLegacy(afMode, params.getSupportedFocusModes());
if (focusMode != null) {
params.setFocusMode(focusMode);
}
if (DEBUG) {
Log.v(TAG, "convertRequestToMetadata - control.afMode " + afMode + " mapped to " + focusMode);
}
}
// control.awbMode
{
Integer awbMode = getIfSupported(request, CONTROL_AWB_MODE, /*defaultValue*/
CONTROL_AWB_MODE_AUTO, params.getSupportedWhiteBalance() != null, /*allowedValue*/
CONTROL_AWB_MODE_AUTO);
String whiteBalanceMode = null;
if (awbMode != null) {
// null iff AWB is not supported by camera1 api
whiteBalanceMode = convertAwbModeToLegacy(awbMode);
params.setWhiteBalance(whiteBalanceMode);
}
if (DEBUG) {
Log.v(TAG, "convertRequestToMetadata - control.awbMode " + awbMode + " mapped to " + whiteBalanceMode);
}
}
// control.awbLock
{
Boolean awbLock = getIfSupported(request, CONTROL_AWB_LOCK, /*defaultValue*/
false, params.isAutoWhiteBalanceLockSupported(), /*allowedValue*/
false);
if (awbLock != null) {
params.setAutoWhiteBalanceLock(awbLock);
}
// TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
}
// control.captureIntent
{
int captureIntent = ParamsUtils.getOrDefault(request, CONTROL_CAPTURE_INTENT, /*defaultValue*/
CONTROL_CAPTURE_INTENT_PREVIEW);
captureIntent = filterSupportedCaptureIntent(captureIntent);
params.setRecordingHint(captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_RECORD || captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
}
// control.videoStabilizationMode
{
Integer stabMode = getIfSupported(request, CONTROL_VIDEO_STABILIZATION_MODE, /*defaultValue*/
CONTROL_VIDEO_STABILIZATION_MODE_OFF, params.isVideoStabilizationSupported(), /*allowedValue*/
CONTROL_VIDEO_STABILIZATION_MODE_OFF);
if (stabMode != null) {
params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
}
}
// lens.focusDistance
{
boolean infinityFocusSupported = ListUtils.listContains(params.getSupportedFocusModes(), Parameters.FOCUS_MODE_INFINITY);
Float focusDistance = getIfSupported(request, LENS_FOCUS_DISTANCE, /*defaultValue*/
0f, infinityFocusSupported, /*allowedValue*/
0f);
if (focusDistance == null || focusDistance != 0f) {
Log.w(TAG, "convertRequestToMetadata - Ignoring android.lens.focusDistance " + infinityFocusSupported + ", only 0.0f is supported");
}
}
// control.sceneMode, control.mode
{
if (params.getSupportedSceneModes() != null) {
int controlMode = ParamsUtils.getOrDefault(request, CONTROL_MODE, /*defaultValue*/
CONTROL_MODE_AUTO);
String modeToSet;
switch(controlMode) {
case CONTROL_MODE_USE_SCENE_MODE:
{
int sceneMode = ParamsUtils.getOrDefault(request, CONTROL_SCENE_MODE, /*defaultValue*/
CONTROL_SCENE_MODE_DISABLED);
String legacySceneMode = LegacyMetadataMapper.convertSceneModeToLegacy(sceneMode);
if (legacySceneMode != null) {
modeToSet = legacySceneMode;
} else {
modeToSet = Parameters.SCENE_MODE_AUTO;
Log.w(TAG, "Skipping unknown requested scene mode: " + sceneMode);
}
break;
}
case CONTROL_MODE_AUTO:
{
modeToSet = Parameters.SCENE_MODE_AUTO;
break;
}
default:
{
Log.w(TAG, "Control mode " + controlMode + " is unsupported, defaulting to AUTO");
modeToSet = Parameters.SCENE_MODE_AUTO;
}
}
params.setSceneMode(modeToSet);
}
}
// control.effectMode
{
if (params.getSupportedColorEffects() != null) {
int effectMode = ParamsUtils.getOrDefault(request, CONTROL_EFFECT_MODE, /*defaultValue*/
CONTROL_EFFECT_MODE_OFF);
String legacyEffectMode = LegacyMetadataMapper.convertEffectModeToLegacy(effectMode);
if (legacyEffectMode != null) {
params.setColorEffect(legacyEffectMode);
} else {
params.setColorEffect(Parameters.EFFECT_NONE);
Log.w(TAG, "Skipping unknown requested effect mode: " + effectMode);
}
}
}
/*
* sensor
*/
// sensor.testPattern
{
int testPatternMode = ParamsUtils.getOrDefault(request, SENSOR_TEST_PATTERN_MODE, /*defaultValue*/
SENSOR_TEST_PATTERN_MODE_OFF);
if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) {
Log.w(TAG, "convertRequestToMetadata - ignoring sensor.testPatternMode " + testPatternMode + "; only OFF is supported");
}
}
/*
* jpeg.*
*/
// jpeg.gpsLocation
{
Location location = request.get(JPEG_GPS_LOCATION);
if (location != null) {
if (checkForCompleteGpsData(location)) {
params.setGpsAltitude(location.getAltitude());
params.setGpsLatitude(location.getLatitude());
params.setGpsLongitude(location.getLongitude());
params.setGpsProcessingMethod(location.getProvider().toUpperCase());
params.setGpsTimestamp(location.getTime());
} else {
Log.w(TAG, "Incomplete GPS parameters provided in location " + location);
}
} else {
params.removeGpsData();
}
}
// jpeg.orientation
{
Integer orientation = request.get(CaptureRequest.JPEG_ORIENTATION);
params.setRotation(ParamsUtils.getOrDefault(request, JPEG_ORIENTATION, (orientation == null) ? 0 : orientation));
}
// jpeg.quality
{
params.setJpegQuality(0xFF & ParamsUtils.getOrDefault(request, JPEG_QUALITY, DEFAULT_JPEG_QUALITY));
}
// jpeg.thumbnailQuality
{
params.setJpegThumbnailQuality(0xFF & ParamsUtils.getOrDefault(request, JPEG_THUMBNAIL_QUALITY, DEFAULT_JPEG_QUALITY));
}
// jpeg.thumbnailSize
{
List<Camera.Size> sizes = params.getSupportedJpegThumbnailSizes();
if (sizes != null && sizes.size() > 0) {
Size s = request.get(JPEG_THUMBNAIL_SIZE);
boolean invalidSize = (s == null) ? false : !ParameterUtils.containsSize(sizes, s.getWidth(), s.getHeight());
if (invalidSize) {
Log.w(TAG, "Invalid JPEG thumbnail size set " + s + ", skipping thumbnail...");
}
if (s == null || invalidSize) {
// (0,0) = "no thumbnail" in Camera API 1
params.setJpegThumbnailSize(/*width*/
0, /*height*/
0);
} else {
params.setJpegThumbnailSize(s.getWidth(), s.getHeight());
}
}
}
/*
* noiseReduction.*
*/
// noiseReduction.mode
{
int mode = ParamsUtils.getOrDefault(request, NOISE_REDUCTION_MODE, /*defaultValue*/
NOISE_REDUCTION_MODE_FAST);
if (mode != NOISE_REDUCTION_MODE_FAST && mode != NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " + "noiseReduction.mode = " + mode);
}
}
}
use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by ResurrectionRemix.
the class LegacyResultMapper method convertResultMetadata.
/**
* Generate capture result metadata from the legacy camera request.
*
* @param legacyRequest a non-{@code null} legacy request containing the latest parameters
* @return a {@link CameraMetadataNative} object containing result metadata.
*/
private static CameraMetadataNative convertResultMetadata(LegacyRequest legacyRequest) {
CameraCharacteristics characteristics = legacyRequest.characteristics;
CaptureRequest request = legacyRequest.captureRequest;
Size previewSize = legacyRequest.previewSize;
Camera.Parameters params = legacyRequest.parameters;
CameraMetadataNative result = new CameraMetadataNative();
Rect activeArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArraySize, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
/*
* colorCorrection
*/
// colorCorrection.aberrationMode
{
result.set(COLOR_CORRECTION_ABERRATION_MODE, request.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE));
}
/*
* control
*/
/*
* control.ae*
*/
mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/
params);
/*
* control.af*
*/
mapAf(result, activeArraySize, zoomData, /*out*/
params);
/*
* control.awb*
*/
mapAwb(result, /*out*/
params);
/*
* control.captureIntent
*/
{
int captureIntent = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_CAPTURE_INTENT, /*defaultValue*/
CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
captureIntent = LegacyRequestMapper.filterSupportedCaptureIntent(captureIntent);
result.set(CONTROL_CAPTURE_INTENT, captureIntent);
}
/*
* control.mode
*/
{
int controlMode = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
if (controlMode == CaptureResult.CONTROL_MODE_USE_SCENE_MODE) {
result.set(CONTROL_MODE, CONTROL_MODE_USE_SCENE_MODE);
} else {
result.set(CONTROL_MODE, CONTROL_MODE_AUTO);
}
}
/*
* control.sceneMode
*/
{
String legacySceneMode = params.getSceneMode();
int mode = LegacyMetadataMapper.convertSceneModeFromLegacy(legacySceneMode);
if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
result.set(CaptureResult.CONTROL_SCENE_MODE, mode);
// In case of SCENE_MODE == FACE_PRIORITY, LegacyFaceDetectMapper will override
// the result to say SCENE_MODE == FACE_PRIORITY.
} else {
Log.w(TAG, "Unknown scene mode " + legacySceneMode + " returned by camera HAL, setting to disabled.");
result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
}
}
/*
* control.effectMode
*/
{
String legacyEffectMode = params.getColorEffect();
int mode = LegacyMetadataMapper.convertEffectModeFromLegacy(legacyEffectMode);
if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
result.set(CaptureResult.CONTROL_EFFECT_MODE, mode);
} else {
Log.w(TAG, "Unknown effect mode " + legacyEffectMode + " returned by camera HAL, setting to off.");
result.set(CaptureResult.CONTROL_EFFECT_MODE, CONTROL_EFFECT_MODE_OFF);
}
}
// control.videoStabilizationMode
{
int stabMode = (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ? CONTROL_VIDEO_STABILIZATION_MODE_ON : CONTROL_VIDEO_STABILIZATION_MODE_OFF;
result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
}
/*
* flash
*/
{
// flash.mode, flash.state mapped in mapAeAndFlashMode
}
/*
* lens
*/
// lens.focusDistance
{
if (Parameters.FOCUS_MODE_INFINITY.equals(params.getFocusMode())) {
result.set(CaptureResult.LENS_FOCUS_DISTANCE, 0.0f);
}
}
// lens.focalLength
result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
/*
* request
*/
// request.pipelineDepth
result.set(REQUEST_PIPELINE_DEPTH, characteristics.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
/*
* scaler
*/
mapScaler(result, zoomData, /*out*/
params);
/*
* sensor
*/
// sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
{
// Unconditionally no test patterns
result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
}
/*
* jpeg
*/
// jpeg.gpsLocation
result.set(JPEG_GPS_LOCATION, request.get(CaptureRequest.JPEG_GPS_LOCATION));
// jpeg.orientation
result.set(JPEG_ORIENTATION, request.get(CaptureRequest.JPEG_ORIENTATION));
// jpeg.quality
result.set(JPEG_QUALITY, (byte) params.getJpegQuality());
// jpeg.thumbnailQuality
result.set(JPEG_THUMBNAIL_QUALITY, (byte) params.getJpegThumbnailQuality());
// jpeg.thumbnailSize
Camera.Size s = params.getJpegThumbnailSize();
if (s != null) {
result.set(JPEG_THUMBNAIL_SIZE, ParameterUtils.convertSize(s));
} else {
Log.w(TAG, "Null thumbnail size received from parameters.");
}
/*
* noiseReduction.*
*/
// noiseReduction.mode
result.set(NOISE_REDUCTION_MODE, request.get(CaptureRequest.NOISE_REDUCTION_MODE));
return result;
}
use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by ResurrectionRemix.
the class LegacyMetadataMapper method createRequestTemplate.
/**
* Create a request template
*
* @param c a non-{@code null} camera characteristics for this camera
* @param templateId a non-negative template ID
*
* @return a non-{@code null} request template
*
* @throws IllegalArgumentException if {@code templateId} was invalid
*
* @see android.hardware.camera2.CameraDevice#TEMPLATE_MANUAL
*/
public static CameraMetadataNative createRequestTemplate(CameraCharacteristics c, int templateId) {
if (!ArrayUtils.contains(sAllowedTemplates, templateId)) {
throw new IllegalArgumentException("templateId out of range");
}
CameraMetadataNative m = new CameraMetadataNative();
/*
* NOTE: If adding new code here and it needs to query the static info,
* query the camera characteristics, so we can reuse this for api2 code later
* to create our own templates in the framework
*/
/*
* control.*
*/
// control.awbMode
m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
// AWB is always unconditionally available in API1 devices
// control.aeAntibandingMode
m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO);
// control.aeExposureCompensation
m.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
// control.aeLock
m.set(CaptureRequest.CONTROL_AE_LOCK, false);
// control.aePrecaptureTrigger
m.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
// control.afTrigger
m.set(CaptureRequest.CONTROL_AF_TRIGGER, CONTROL_AF_TRIGGER_IDLE);
// control.awbMode
m.set(CaptureRequest.CONTROL_AWB_MODE, CONTROL_AWB_MODE_AUTO);
// control.awbLock
m.set(CaptureRequest.CONTROL_AWB_LOCK, false);
// control.aeRegions, control.awbRegions, control.afRegions
{
Rect activeArray = c.get(SENSOR_INFO_ACTIVE_ARRAY_SIZE);
MeteringRectangle[] activeRegions = new MeteringRectangle[] { new MeteringRectangle(/*x*/
0, /*y*/
0, /*width*/
activeArray.width() - 1, /*height*/
activeArray.height() - 1, /*weight*/
0) };
m.set(CaptureRequest.CONTROL_AE_REGIONS, activeRegions);
m.set(CaptureRequest.CONTROL_AWB_REGIONS, activeRegions);
m.set(CaptureRequest.CONTROL_AF_REGIONS, activeRegions);
}
// control.captureIntent
{
int captureIntent;
switch(templateId) {
case CameraDevice.TEMPLATE_PREVIEW:
captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
break;
case CameraDevice.TEMPLATE_STILL_CAPTURE:
captureIntent = CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
case CameraDevice.TEMPLATE_RECORD:
captureIntent = CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
break;
default:
// Can't get anything else since it's guarded by the IAE check
throw new AssertionError("Impossible; keep in sync with sAllowedTemplates");
}
m.set(CaptureRequest.CONTROL_CAPTURE_INTENT, captureIntent);
}
// control.aeMode
m.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
// AE is always unconditionally available in API1 devices
// control.mode
m.set(CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
// control.afMode
{
Float minimumFocusDistance = c.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
int afMode;
if (minimumFocusDistance != null && minimumFocusDistance == LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS) {
// Cannot control auto-focus with fixed-focus cameras
afMode = CameraMetadata.CONTROL_AF_MODE_OFF;
} else {
// If a minimum focus distance is reported; the camera must have AF
afMode = CameraMetadata.CONTROL_AF_MODE_AUTO;
if (templateId == CameraDevice.TEMPLATE_RECORD || templateId == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO;
}
} else if (templateId == CameraDevice.TEMPLATE_PREVIEW || templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
}
}
}
if (DEBUG) {
Log.v(TAG, "createRequestTemplate (templateId=" + templateId + ")," + " afMode=" + afMode + ", minimumFocusDistance=" + minimumFocusDistance);
}
m.set(CaptureRequest.CONTROL_AF_MODE, afMode);
}
{
// control.aeTargetFpsRange
Range<Integer>[] availableFpsRange = c.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
// Pick FPS range with highest max value, tiebreak on higher min value
Range<Integer> bestRange = availableFpsRange[0];
for (Range<Integer> r : availableFpsRange) {
if (bestRange.getUpper() < r.getUpper()) {
bestRange = r;
} else if (bestRange.getUpper() == r.getUpper() && bestRange.getLower() < r.getLower()) {
bestRange = r;
}
}
m.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, bestRange);
}
// control.sceneMode -- DISABLED is always available
m.set(CaptureRequest.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
/*
* statistics.*
*/
// statistics.faceDetectMode
m.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, STATISTICS_FACE_DETECT_MODE_OFF);
/*
* flash.*
*/
// flash.mode
m.set(CaptureRequest.FLASH_MODE, FLASH_MODE_OFF);
/*
* noiseReduction.*
*/
if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_HIGH_QUALITY);
} else {
m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_FAST);
}
/*
* colorCorrection.*
*/
if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY);
} else {
m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_FAST);
}
/*
* lens.*
*/
// lens.focalLength
m.set(CaptureRequest.LENS_FOCAL_LENGTH, c.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)[0]);
/*
* jpeg.*
*/
// jpeg.thumbnailSize - set smallest non-zero size if possible
Size[] sizes = c.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES);
m.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, (sizes.length > 1) ? sizes[1] : sizes[0]);
// TODO: map other request template values
return m;
}
use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by ResurrectionRemix.
the class CameraTestUtils method getSupportedSizeForFormat.
/**
* Get the available output sizes for the user-defined {@code format}.
*
* <p>Note that implementation-defined/hidden formats are not supported.</p>
*/
public static Size[] getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager) throws CameraAccessException {
CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
assertNotNull("Can't get camera characteristics!", properties);
if (VERBOSE) {
Log.v(TAG, "get camera characteristics for camera: " + cameraId);
}
StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] availableSizes = configMap.getOutputSizes(format);
assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: " + format);
Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
System.arraycopy(availableSizes, 0, allSizes, 0, availableSizes.length);
System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, highResAvailableSizes.length);
availableSizes = allSizes;
}
if (VERBOSE)
Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
return availableSizes;
}
use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by ResurrectionRemix.
the class CameraTestUtils method getSupportedSizeForClass.
/**
* Get the available output sizes for the given class.
*
*/
public static Size[] getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager) throws CameraAccessException {
CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
assertNotNull("Can't get camera characteristics!", properties);
if (VERBOSE) {
Log.v(TAG, "get camera characteristics for camera: " + cameraId);
}
StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] availableSizes = configMap.getOutputSizes(klass);
assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: " + klass);
Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE);
if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
System.arraycopy(availableSizes, 0, allSizes, 0, availableSizes.length);
System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, highResAvailableSizes.length);
availableSizes = allSizes;
}
if (VERBOSE)
Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
return availableSizes;
}
Aggregations