use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by AOSPA.
the class LegacyMetadataMapper method mapScalerStreamConfigs.
private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
/*
* Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
* YUV_420_888 cpu callbacks -> use camera1 preview sizes
* Other preview callbacks (CPU) -> use camera1 preview sizes
* JPEG still capture -> use camera1 still capture sizes
*
* Use platform-internal format constants here, since StreamConfigurationMap does the
* remapping to public format constants.
*/
List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
/*
* Work-around for b/17589233:
* - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
* - This causes a large amount of problems with focus/metering because it's relative to
* preview, making the difference between the JPEG and preview viewport inaccessible
* - This boils down to metering or focusing areas being "arbitrarily" cropped
* in the capture result.
* - Work-around the HAL limitations by removing all of the largest preview sizes
* until we get one with the same aspect ratio as the jpeg size.
*/
{
SizeAreaComparator areaComparator = new SizeAreaComparator();
// Sort preview to min->max
Collections.sort(previewSizes, areaComparator);
Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f", maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
}
// Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
while (!previewSizes.isEmpty()) {
// max is always at the end
int index = previewSizes.size() - 1;
Camera.Size size = previewSizes.get(index);
float previewAspectRatio = size.width * 1.0f / size.height;
if (Math.abs(jpegAspectRatio - previewAspectRatio) >= PREVIEW_ASPECT_RATIO_TOLERANCE) {
// Assume removing from end is O(1)
previewSizes.remove(index);
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - removed preview size %dx%d, AR=%f " + "was not the same", size.width, size.height, previewAspectRatio));
}
} else {
break;
}
}
if (previewSizes.isEmpty()) {
// Fall-back to the original faulty behavior, but at least work
Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " + "JPEG aspect ratio " + jpegAspectRatio);
previewSizes = p.getSupportedPreviewSizes();
}
// Sort again, this time in descending order max->min
Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
appendStreamConfig(availableStreamConfigs, ImageFormat.YUV_420_888, previewSizes);
for (int format : p.getSupportedPreviewFormats()) {
if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
appendStreamConfig(availableStreamConfigs, format, previewSizes);
} else if (DEBUG) {
/*
* Do not add any formats unknown to us
* (since it would fail runtime checks in StreamConfigurationMap)
*/
Log.v(TAG, String.format("mapStreamConfigs - Skipping format %x", format));
}
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
/*
* scaler.availableStreamConfigurations
*/
m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS, availableStreamConfigs.toArray(new StreamConfiguration[0]));
/*
* scaler.availableMinFrameDurations
*/
// No frame durations available
m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
StreamConfigurationDuration[] jpegStalls = new StreamConfigurationDuration[jpegSizes.size()];
int i = 0;
long longestStallDuration = -1;
for (Camera.Size s : jpegSizes) {
long stallDuration = calculateJpegStallDuration(s);
jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width, s.height, stallDuration);
if (longestStallDuration < stallDuration) {
longestStallDuration = stallDuration;
}
}
/*
* scaler.availableStallDurations
*/
// Set stall durations for jpeg, other formats use default stall duration
m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
/*
* sensor.info.maxFrameDuration
*/
m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by AOSPA.
the class LegacyMetadataMapper method createCharacteristics.
/**
* Create characteristics for a legacy device by mapping the {@code parameters}
* and {@code info}
*
* @param parameters A string parseable by {@link Camera.Parameters#unflatten}
* @param info Camera info with camera facing direction and angle of orientation
* @return static camera characteristics for a camera device
*
* @throws NullPointerException if any of the args were {@code null}
*/
public static CameraCharacteristics createCharacteristics(String parameters, android.hardware.CameraInfo info) {
checkNotNull(parameters, "parameters must not be null");
checkNotNull(info, "info must not be null");
checkNotNull(info.info, "info.info must not be null");
CameraMetadataNative m = new CameraMetadataNative();
mapCharacteristicsFromInfo(m, info.info);
Camera.Parameters params = Camera.getEmptyParameters();
params.unflatten(parameters);
mapCharacteristicsFromParameters(m, params);
if (DEBUG) {
Log.v(TAG, "createCharacteristics metadata:");
Log.v(TAG, "--------------------------------------------------- (start)");
m.dumpToLog();
Log.v(TAG, "--------------------------------------------------- (end)");
}
return new CameraCharacteristics(m);
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by AOSPA.
the class LegacyResultMapper method cachedConvertResultMetadata.
/**
* Generate capture result metadata from the legacy camera request.
*
* <p>This method caches and reuses the result from the previous call to this method if
* the {@code parameters} of the subsequent {@link LegacyRequest} passed to this method
* have not changed.</p>
*
* @param legacyRequest a non-{@code null} legacy request containing the latest parameters
* @param timestamp the timestamp to use for this result in nanoseconds.
*
* @return {@link CameraMetadataNative} object containing result metadata.
*/
public CameraMetadataNative cachedConvertResultMetadata(LegacyRequest legacyRequest, long timestamp) {
CameraMetadataNative result;
boolean cached;
/*
* Attempt to look up the result from the cache if the parameters haven't changed
*/
if (mCachedRequest != null && legacyRequest.parameters.same(mCachedRequest.parameters) && legacyRequest.captureRequest.equals(mCachedRequest.captureRequest)) {
result = new CameraMetadataNative(mCachedResult);
cached = true;
} else {
result = convertResultMetadata(legacyRequest);
cached = false;
// Always cache a *copy* of the metadata result,
// since api2's client side takes ownership of it after it receives a result
mCachedRequest = legacyRequest;
mCachedResult = new CameraMetadataNative(result);
}
/*
* Unconditionally set fields that change in every single frame
*/
{
// sensor.timestamp
result.set(SENSOR_TIMESTAMP, timestamp);
}
if (DEBUG) {
Log.v(TAG, "cachedConvertResultMetadata - cached? " + cached + " timestamp = " + timestamp);
Log.v(TAG, "----- beginning of result dump ------");
result.dumpToLog();
Log.v(TAG, "----- end of result dump ------");
}
return result;
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by AOSPA.
the class LegacyMetadataMapper method createRequestTemplate.
/**
* Create a request template
*
* @param c a non-{@code null} camera characteristics for this camera
* @param templateId a non-negative template ID
*
* @return a non-{@code null} request template
*
* @throws IllegalArgumentException if {@code templateId} was invalid
*
* @see android.hardware.camera2.CameraDevice#TEMPLATE_MANUAL
*/
public static CameraMetadataNative createRequestTemplate(CameraCharacteristics c, int templateId) {
if (!ArrayUtils.contains(sAllowedTemplates, templateId)) {
throw new IllegalArgumentException("templateId out of range");
}
CameraMetadataNative m = new CameraMetadataNative();
/*
* NOTE: If adding new code here and it needs to query the static info,
* query the camera characteristics, so we can reuse this for api2 code later
* to create our own templates in the framework
*/
/*
* control.*
*/
// control.awbMode
m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
// AWB is always unconditionally available in API1 devices
// control.aeAntibandingMode
m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO);
// control.aeExposureCompensation
m.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
// control.aeLock
m.set(CaptureRequest.CONTROL_AE_LOCK, false);
// control.aePrecaptureTrigger
m.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
// control.afTrigger
m.set(CaptureRequest.CONTROL_AF_TRIGGER, CONTROL_AF_TRIGGER_IDLE);
// control.awbMode
m.set(CaptureRequest.CONTROL_AWB_MODE, CONTROL_AWB_MODE_AUTO);
// control.awbLock
m.set(CaptureRequest.CONTROL_AWB_LOCK, false);
// control.aeRegions, control.awbRegions, control.afRegions
{
Rect activeArray = c.get(SENSOR_INFO_ACTIVE_ARRAY_SIZE);
MeteringRectangle[] activeRegions = new MeteringRectangle[] { new MeteringRectangle(/*x*/
0, /*y*/
0, /*width*/
activeArray.width() - 1, /*height*/
activeArray.height() - 1, /*weight*/
0) };
m.set(CaptureRequest.CONTROL_AE_REGIONS, activeRegions);
m.set(CaptureRequest.CONTROL_AWB_REGIONS, activeRegions);
m.set(CaptureRequest.CONTROL_AF_REGIONS, activeRegions);
}
// control.captureIntent
{
int captureIntent;
switch(templateId) {
case CameraDevice.TEMPLATE_PREVIEW:
captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
break;
case CameraDevice.TEMPLATE_STILL_CAPTURE:
captureIntent = CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
case CameraDevice.TEMPLATE_RECORD:
captureIntent = CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
break;
default:
// Can't get anything else since it's guarded by the IAE check
throw new AssertionError("Impossible; keep in sync with sAllowedTemplates");
}
m.set(CaptureRequest.CONTROL_CAPTURE_INTENT, captureIntent);
}
// control.aeMode
m.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
// AE is always unconditionally available in API1 devices
// control.mode
m.set(CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
// control.afMode
{
Float minimumFocusDistance = c.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
int afMode;
if (minimumFocusDistance != null && minimumFocusDistance == LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS) {
// Cannot control auto-focus with fixed-focus cameras
afMode = CameraMetadata.CONTROL_AF_MODE_OFF;
} else {
// If a minimum focus distance is reported; the camera must have AF
afMode = CameraMetadata.CONTROL_AF_MODE_AUTO;
if (templateId == CameraDevice.TEMPLATE_RECORD || templateId == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO;
}
} else if (templateId == CameraDevice.TEMPLATE_PREVIEW || templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
}
}
}
if (DEBUG) {
Log.v(TAG, "createRequestTemplate (templateId=" + templateId + ")," + " afMode=" + afMode + ", minimumFocusDistance=" + minimumFocusDistance);
}
m.set(CaptureRequest.CONTROL_AF_MODE, afMode);
}
{
// control.aeTargetFpsRange
Range<Integer>[] availableFpsRange = c.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
// Pick FPS range with highest max value, tiebreak on higher min value
Range<Integer> bestRange = availableFpsRange[0];
for (Range<Integer> r : availableFpsRange) {
if (bestRange.getUpper() < r.getUpper()) {
bestRange = r;
} else if (bestRange.getUpper() == r.getUpper() && bestRange.getLower() < r.getLower()) {
bestRange = r;
}
}
m.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, bestRange);
}
// control.sceneMode -- DISABLED is always available
m.set(CaptureRequest.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
/*
* statistics.*
*/
// statistics.faceDetectMode
m.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, STATISTICS_FACE_DETECT_MODE_OFF);
/*
* flash.*
*/
// flash.mode
m.set(CaptureRequest.FLASH_MODE, FLASH_MODE_OFF);
/*
* noiseReduction.*
*/
if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_HIGH_QUALITY);
} else {
m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_FAST);
}
/*
* colorCorrection.*
*/
if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY);
} else {
m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_FAST);
}
/*
* lens.*
*/
// lens.focalLength
m.set(CaptureRequest.LENS_FOCAL_LENGTH, c.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)[0]);
/*
* jpeg.*
*/
// jpeg.thumbnailSize - set smallest non-zero size if possible
Size[] sizes = c.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES);
m.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, (sizes.length > 1) ? sizes[1] : sizes[0]);
// TODO: map other request template values
return m;
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by ResurrectionRemix.
the class LegacyResultMapper method convertResultMetadata.
/**
* Generate capture result metadata from the legacy camera request.
*
* @param legacyRequest a non-{@code null} legacy request containing the latest parameters
* @return a {@link CameraMetadataNative} object containing result metadata.
*/
private static CameraMetadataNative convertResultMetadata(LegacyRequest legacyRequest) {
CameraCharacteristics characteristics = legacyRequest.characteristics;
CaptureRequest request = legacyRequest.captureRequest;
Size previewSize = legacyRequest.previewSize;
Camera.Parameters params = legacyRequest.parameters;
CameraMetadataNative result = new CameraMetadataNative();
Rect activeArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArraySize, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
/*
* colorCorrection
*/
// colorCorrection.aberrationMode
{
result.set(COLOR_CORRECTION_ABERRATION_MODE, request.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE));
}
/*
* control
*/
/*
* control.ae*
*/
mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/
params);
/*
* control.af*
*/
mapAf(result, activeArraySize, zoomData, /*out*/
params);
/*
* control.awb*
*/
mapAwb(result, /*out*/
params);
/*
* control.captureIntent
*/
{
int captureIntent = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_CAPTURE_INTENT, /*defaultValue*/
CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
captureIntent = LegacyRequestMapper.filterSupportedCaptureIntent(captureIntent);
result.set(CONTROL_CAPTURE_INTENT, captureIntent);
}
/*
* control.mode
*/
{
int controlMode = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
if (controlMode == CaptureResult.CONTROL_MODE_USE_SCENE_MODE) {
result.set(CONTROL_MODE, CONTROL_MODE_USE_SCENE_MODE);
} else {
result.set(CONTROL_MODE, CONTROL_MODE_AUTO);
}
}
/*
* control.sceneMode
*/
{
String legacySceneMode = params.getSceneMode();
int mode = LegacyMetadataMapper.convertSceneModeFromLegacy(legacySceneMode);
if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
result.set(CaptureResult.CONTROL_SCENE_MODE, mode);
// In case of SCENE_MODE == FACE_PRIORITY, LegacyFaceDetectMapper will override
// the result to say SCENE_MODE == FACE_PRIORITY.
} else {
Log.w(TAG, "Unknown scene mode " + legacySceneMode + " returned by camera HAL, setting to disabled.");
result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
}
}
/*
* control.effectMode
*/
{
String legacyEffectMode = params.getColorEffect();
int mode = LegacyMetadataMapper.convertEffectModeFromLegacy(legacyEffectMode);
if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
result.set(CaptureResult.CONTROL_EFFECT_MODE, mode);
} else {
Log.w(TAG, "Unknown effect mode " + legacyEffectMode + " returned by camera HAL, setting to off.");
result.set(CaptureResult.CONTROL_EFFECT_MODE, CONTROL_EFFECT_MODE_OFF);
}
}
// control.videoStabilizationMode
{
int stabMode = (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ? CONTROL_VIDEO_STABILIZATION_MODE_ON : CONTROL_VIDEO_STABILIZATION_MODE_OFF;
result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
}
/*
* flash
*/
{
// flash.mode, flash.state mapped in mapAeAndFlashMode
}
/*
* lens
*/
// lens.focusDistance
{
if (Parameters.FOCUS_MODE_INFINITY.equals(params.getFocusMode())) {
result.set(CaptureResult.LENS_FOCUS_DISTANCE, 0.0f);
}
}
// lens.focalLength
result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
/*
* request
*/
// request.pipelineDepth
result.set(REQUEST_PIPELINE_DEPTH, characteristics.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
/*
* scaler
*/
mapScaler(result, zoomData, /*out*/
params);
/*
* sensor
*/
// sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
{
// Unconditionally no test patterns
result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
}
/*
* jpeg
*/
// jpeg.gpsLocation
result.set(JPEG_GPS_LOCATION, request.get(CaptureRequest.JPEG_GPS_LOCATION));
// jpeg.orientation
result.set(JPEG_ORIENTATION, request.get(CaptureRequest.JPEG_ORIENTATION));
// jpeg.quality
result.set(JPEG_QUALITY, (byte) params.getJpegQuality());
// jpeg.thumbnailQuality
result.set(JPEG_THUMBNAIL_QUALITY, (byte) params.getJpegThumbnailQuality());
// jpeg.thumbnailSize
Camera.Size s = params.getJpegThumbnailSize();
if (s != null) {
result.set(JPEG_THUMBNAIL_SIZE, ParameterUtils.convertSize(s));
} else {
Log.w(TAG, "Null thumbnail size received from parameters.");
}
/*
* noiseReduction.*
*/
// noiseReduction.mode
result.set(NOISE_REDUCTION_MODE, request.get(CaptureRequest.NOISE_REDUCTION_MODE));
return result;
}
Aggregations