use of android.hardware.camera2.utils.SizeAreaComparator in project android_frameworks_base by DirtyUnicorns.
the class LegacyMetadataMapper method mapScalerStreamConfigs.
private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
/*
* Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
* YUV_420_888 cpu callbacks -> use camera1 preview sizes
* Other preview callbacks (CPU) -> use camera1 preview sizes
* JPEG still capture -> use camera1 still capture sizes
*
* Use platform-internal format constants here, since StreamConfigurationMap does the
* remapping to public format constants.
*/
List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
/*
* Work-around for b/17589233:
* - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
* - This causes a large amount of problems with focus/metering because it's relative to
* preview, making the difference between the JPEG and preview viewport inaccessible
* - This boils down to metering or focusing areas being "arbitrarily" cropped
* in the capture result.
* - Work-around the HAL limitations by removing all of the largest preview sizes
* until we get one with the same aspect ratio as the jpeg size.
*/
{
SizeAreaComparator areaComparator = new SizeAreaComparator();
// Sort preview to min->max
Collections.sort(previewSizes, areaComparator);
Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f", maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
}
// Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
while (!previewSizes.isEmpty()) {
// max is always at the end
int index = previewSizes.size() - 1;
Camera.Size size = previewSizes.get(index);
float previewAspectRatio = size.width * 1.0f / size.height;
if (Math.abs(jpegAspectRatio - previewAspectRatio) >= PREVIEW_ASPECT_RATIO_TOLERANCE) {
// Assume removing from end is O(1)
previewSizes.remove(index);
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - removed preview size %dx%d, AR=%f " + "was not the same", size.width, size.height, previewAspectRatio));
}
} else {
break;
}
}
if (previewSizes.isEmpty()) {
// Fall-back to the original faulty behavior, but at least work
Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " + "JPEG aspect ratio " + jpegAspectRatio);
previewSizes = p.getSupportedPreviewSizes();
}
// Sort again, this time in descending order max->min
Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
appendStreamConfig(availableStreamConfigs, ImageFormat.YUV_420_888, previewSizes);
for (int format : p.getSupportedPreviewFormats()) {
if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
appendStreamConfig(availableStreamConfigs, format, previewSizes);
} else if (DEBUG) {
/*
* Do not add any formats unknown to us
* (since it would fail runtime checks in StreamConfigurationMap)
*/
Log.v(TAG, String.format("mapStreamConfigs - Skipping format %x", format));
}
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
/*
* scaler.availableStreamConfigurations
*/
m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS, availableStreamConfigs.toArray(new StreamConfiguration[0]));
/*
* scaler.availableMinFrameDurations
*/
// No frame durations available
m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
StreamConfigurationDuration[] jpegStalls = new StreamConfigurationDuration[jpegSizes.size()];
int i = 0;
long longestStallDuration = -1;
for (Camera.Size s : jpegSizes) {
long stallDuration = calculateJpegStallDuration(s);
jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width, s.height, stallDuration);
if (longestStallDuration < stallDuration) {
longestStallDuration = stallDuration;
}
}
/*
* scaler.availableStallDurations
*/
// Set stall durations for jpeg, other formats use default stall duration
m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
/*
* sensor.info.maxFrameDuration
*/
m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
}
use of android.hardware.camera2.utils.SizeAreaComparator in project android_frameworks_base by DirtyUnicorns.
the class RequestThreadManager method calculatePictureSize.
/**
* Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
* than all of the configured {@code JPEG} outputs (by both width and height).
*
* <p>If multiple supported JPEG sizes are larger, select the smallest of them which
* still satisfies the above constraint.</p>
*
* <p>As a result, the returned size is guaranteed to be usable without needing
* to upscale any of the outputs. If only one {@code JPEG} surface is used,
* then no scaling/cropping is necessary between the taken picture and
* the {@code JPEG} output surface.</p>
*
* @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats
* @param params api1 parameters (used for reading only)
*
* @return a size large enough to fit all of the configured {@code JPEG} outputs, or
* {@code null} if the {@code callbackOutputs} did not have any {@code JPEG}
* surfaces.
*/
private Size calculatePictureSize(List<Surface> callbackOutputs, List<Size> callbackSizes, Camera.Parameters params) {
/*
* Find the largest JPEG size (if any), from the configured outputs:
* - the api1 picture size should be set to the smallest legal size that's at least as large
* as the largest configured JPEG size
*/
if (callbackOutputs.size() != callbackSizes.size()) {
throw new IllegalStateException("Input collections must be same length");
}
List<Size> configuredJpegSizes = new ArrayList<>();
Iterator<Size> sizeIterator = callbackSizes.iterator();
for (Surface callbackSurface : callbackOutputs) {
Size jpegSize = sizeIterator.next();
if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) {
// Ignore non-JPEG callback formats
continue;
}
configuredJpegSizes.add(jpegSize);
}
if (!configuredJpegSizes.isEmpty()) {
/*
* Find the largest configured JPEG width, and height, independently
* of the rest.
*
* The rest of the JPEG streams can be cropped out of this smallest bounding
* rectangle.
*/
int maxConfiguredJpegWidth = -1;
int maxConfiguredJpegHeight = -1;
for (Size jpegSize : configuredJpegSizes) {
maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ? jpegSize.getWidth() : maxConfiguredJpegWidth;
maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ? jpegSize.getHeight() : maxConfiguredJpegHeight;
}
Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
List<Size> supportedJpegSizes = ParameterUtils.convertSizeList(params.getSupportedPictureSizes());
/*
* Find the smallest supported JPEG size that can fit the smallest bounding
* rectangle for the configured JPEG sizes.
*/
List<Size> candidateSupportedJpegSizes = new ArrayList<>();
for (Size supportedJpegSize : supportedJpegSizes) {
if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth && supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
candidateSupportedJpegSizes.add(supportedJpegSize);
}
}
if (candidateSupportedJpegSizes.isEmpty()) {
throw new AssertionError("Could not find any supported JPEG sizes large enough to fit " + smallestBoundJpegSize);
}
Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes, new SizeAreaComparator());
if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
Log.w(TAG, String.format("configureOutputs - Will need to crop picture %s into " + "smallest bound size %s", smallestSupportedJpegSize, smallestBoundJpegSize));
}
return smallestSupportedJpegSize;
}
return null;
}
use of android.hardware.camera2.utils.SizeAreaComparator in project android_frameworks_base by AOSPA.
the class LegacyMetadataMapper method mapScalerStreamConfigs.
private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
/*
* Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
* YUV_420_888 cpu callbacks -> use camera1 preview sizes
* Other preview callbacks (CPU) -> use camera1 preview sizes
* JPEG still capture -> use camera1 still capture sizes
*
* Use platform-internal format constants here, since StreamConfigurationMap does the
* remapping to public format constants.
*/
List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
/*
* Work-around for b/17589233:
* - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
* - This causes a large amount of problems with focus/metering because it's relative to
* preview, making the difference between the JPEG and preview viewport inaccessible
* - This boils down to metering or focusing areas being "arbitrarily" cropped
* in the capture result.
* - Work-around the HAL limitations by removing all of the largest preview sizes
* until we get one with the same aspect ratio as the jpeg size.
*/
{
SizeAreaComparator areaComparator = new SizeAreaComparator();
// Sort preview to min->max
Collections.sort(previewSizes, areaComparator);
Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f", maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
}
// Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
while (!previewSizes.isEmpty()) {
// max is always at the end
int index = previewSizes.size() - 1;
Camera.Size size = previewSizes.get(index);
float previewAspectRatio = size.width * 1.0f / size.height;
if (Math.abs(jpegAspectRatio - previewAspectRatio) >= PREVIEW_ASPECT_RATIO_TOLERANCE) {
// Assume removing from end is O(1)
previewSizes.remove(index);
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - removed preview size %dx%d, AR=%f " + "was not the same", size.width, size.height, previewAspectRatio));
}
} else {
break;
}
}
if (previewSizes.isEmpty()) {
// Fall-back to the original faulty behavior, but at least work
Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " + "JPEG aspect ratio " + jpegAspectRatio);
previewSizes = p.getSupportedPreviewSizes();
}
// Sort again, this time in descending order max->min
Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
appendStreamConfig(availableStreamConfigs, ImageFormat.YUV_420_888, previewSizes);
for (int format : p.getSupportedPreviewFormats()) {
if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
appendStreamConfig(availableStreamConfigs, format, previewSizes);
} else if (DEBUG) {
/*
* Do not add any formats unknown to us
* (since it would fail runtime checks in StreamConfigurationMap)
*/
Log.v(TAG, String.format("mapStreamConfigs - Skipping format %x", format));
}
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
/*
* scaler.availableStreamConfigurations
*/
m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS, availableStreamConfigs.toArray(new StreamConfiguration[0]));
/*
* scaler.availableMinFrameDurations
*/
// No frame durations available
m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
StreamConfigurationDuration[] jpegStalls = new StreamConfigurationDuration[jpegSizes.size()];
int i = 0;
long longestStallDuration = -1;
for (Camera.Size s : jpegSizes) {
long stallDuration = calculateJpegStallDuration(s);
jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width, s.height, stallDuration);
if (longestStallDuration < stallDuration) {
longestStallDuration = stallDuration;
}
}
/*
* scaler.availableStallDurations
*/
// Set stall durations for jpeg, other formats use default stall duration
m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
/*
* sensor.info.maxFrameDuration
*/
m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
}
use of android.hardware.camera2.utils.SizeAreaComparator in project android_frameworks_base by AOSPA.
the class RequestThreadManager method calculatePictureSize.
/**
* Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
* than all of the configured {@code JPEG} outputs (by both width and height).
*
* <p>If multiple supported JPEG sizes are larger, select the smallest of them which
* still satisfies the above constraint.</p>
*
* <p>As a result, the returned size is guaranteed to be usable without needing
* to upscale any of the outputs. If only one {@code JPEG} surface is used,
* then no scaling/cropping is necessary between the taken picture and
* the {@code JPEG} output surface.</p>
*
* @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats
* @param params api1 parameters (used for reading only)
*
* @return a size large enough to fit all of the configured {@code JPEG} outputs, or
* {@code null} if the {@code callbackOutputs} did not have any {@code JPEG}
* surfaces.
*/
private Size calculatePictureSize(List<Surface> callbackOutputs, List<Size> callbackSizes, Camera.Parameters params) {
/*
* Find the largest JPEG size (if any), from the configured outputs:
* - the api1 picture size should be set to the smallest legal size that's at least as large
* as the largest configured JPEG size
*/
if (callbackOutputs.size() != callbackSizes.size()) {
throw new IllegalStateException("Input collections must be same length");
}
List<Size> configuredJpegSizes = new ArrayList<>();
Iterator<Size> sizeIterator = callbackSizes.iterator();
for (Surface callbackSurface : callbackOutputs) {
Size jpegSize = sizeIterator.next();
if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) {
// Ignore non-JPEG callback formats
continue;
}
configuredJpegSizes.add(jpegSize);
}
if (!configuredJpegSizes.isEmpty()) {
/*
* Find the largest configured JPEG width, and height, independently
* of the rest.
*
* The rest of the JPEG streams can be cropped out of this smallest bounding
* rectangle.
*/
int maxConfiguredJpegWidth = -1;
int maxConfiguredJpegHeight = -1;
for (Size jpegSize : configuredJpegSizes) {
maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ? jpegSize.getWidth() : maxConfiguredJpegWidth;
maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ? jpegSize.getHeight() : maxConfiguredJpegHeight;
}
Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
List<Size> supportedJpegSizes = ParameterUtils.convertSizeList(params.getSupportedPictureSizes());
/*
* Find the smallest supported JPEG size that can fit the smallest bounding
* rectangle for the configured JPEG sizes.
*/
List<Size> candidateSupportedJpegSizes = new ArrayList<>();
for (Size supportedJpegSize : supportedJpegSizes) {
if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth && supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
candidateSupportedJpegSizes.add(supportedJpegSize);
}
}
if (candidateSupportedJpegSizes.isEmpty()) {
throw new AssertionError("Could not find any supported JPEG sizes large enough to fit " + smallestBoundJpegSize);
}
Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes, new SizeAreaComparator());
if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
Log.w(TAG, String.format("configureOutputs - Will need to crop picture %s into " + "smallest bound size %s", smallestSupportedJpegSize, smallestBoundJpegSize));
}
return smallestSupportedJpegSize;
}
return null;
}
use of android.hardware.camera2.utils.SizeAreaComparator in project android_frameworks_base by crdroidandroid.
the class LegacyMetadataMapper method mapScalerStreamConfigs.
private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
/*
* Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
* YUV_420_888 cpu callbacks -> use camera1 preview sizes
* Other preview callbacks (CPU) -> use camera1 preview sizes
* JPEG still capture -> use camera1 still capture sizes
*
* Use platform-internal format constants here, since StreamConfigurationMap does the
* remapping to public format constants.
*/
List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
/*
* Work-around for b/17589233:
* - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
* - This causes a large amount of problems with focus/metering because it's relative to
* preview, making the difference between the JPEG and preview viewport inaccessible
* - This boils down to metering or focusing areas being "arbitrarily" cropped
* in the capture result.
* - Work-around the HAL limitations by removing all of the largest preview sizes
* until we get one with the same aspect ratio as the jpeg size.
*/
{
SizeAreaComparator areaComparator = new SizeAreaComparator();
// Sort preview to min->max
Collections.sort(previewSizes, areaComparator);
Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f", maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
}
// Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
while (!previewSizes.isEmpty()) {
// max is always at the end
int index = previewSizes.size() - 1;
Camera.Size size = previewSizes.get(index);
float previewAspectRatio = size.width * 1.0f / size.height;
if (Math.abs(jpegAspectRatio - previewAspectRatio) >= PREVIEW_ASPECT_RATIO_TOLERANCE) {
// Assume removing from end is O(1)
previewSizes.remove(index);
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - removed preview size %dx%d, AR=%f " + "was not the same", size.width, size.height, previewAspectRatio));
}
} else {
break;
}
}
if (previewSizes.isEmpty()) {
// Fall-back to the original faulty behavior, but at least work
Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " + "JPEG aspect ratio " + jpegAspectRatio);
previewSizes = p.getSupportedPreviewSizes();
}
// Sort again, this time in descending order max->min
Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
appendStreamConfig(availableStreamConfigs, ImageFormat.YUV_420_888, previewSizes);
for (int format : p.getSupportedPreviewFormats()) {
if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
appendStreamConfig(availableStreamConfigs, format, previewSizes);
} else if (DEBUG) {
/*
* Do not add any formats unknown to us
* (since it would fail runtime checks in StreamConfigurationMap)
*/
Log.v(TAG, String.format("mapStreamConfigs - Skipping format %x", format));
}
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
/*
* scaler.availableStreamConfigurations
*/
m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS, availableStreamConfigs.toArray(new StreamConfiguration[0]));
/*
* scaler.availableMinFrameDurations
*/
// No frame durations available
m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
StreamConfigurationDuration[] jpegStalls = new StreamConfigurationDuration[jpegSizes.size()];
int i = 0;
long longestStallDuration = -1;
for (Camera.Size s : jpegSizes) {
long stallDuration = calculateJpegStallDuration(s);
jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width, s.height, stallDuration);
if (longestStallDuration < stallDuration) {
longestStallDuration = stallDuration;
}
}
/*
* scaler.availableStallDurations
*/
// Set stall durations for jpeg, other formats use default stall duration
m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
/*
* sensor.info.maxFrameDuration
*/
m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
}
Aggregations