use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class StaticMetadata method isHighSpeedVideoSupported.
/**
* Check if high speed video is supported (HIGH_SPEED_VIDEO scene mode is
* supported, supported high speed fps ranges and sizes are valid).
*
* @return true if high speed video is supported.
*/
public boolean isHighSpeedVideoSupported() {
List<Integer> sceneModes = Arrays.asList(CameraTestUtils.toObject(getAvailableSceneModesChecked()));
if (sceneModes.contains(CameraCharacteristics.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO)) {
StreamConfigurationMap config = getValueFromKeyNonNull(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (config == null) {
return false;
}
Size[] availableSizes = config.getHighSpeedVideoSizes();
if (availableSizes.length == 0) {
return false;
}
for (Size size : availableSizes) {
Range<Integer>[] availableFpsRanges = config.getHighSpeedVideoFpsRangesFor(size);
if (availableFpsRanges.length == 0) {
return false;
}
}
return true;
} else {
return false;
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class StaticMetadata method getAvailableSizesForFormatChecked.
/**
* Get available sizes for given format and direction, and whether to limit to slow or fast
* resolutions.
*
* @param format The format for the requested size array.
* @param direction The stream direction, input or output.
* @param fastSizes whether to include getOutputSizes() sizes (generally faster)
* @param slowSizes whether to include getHighResolutionOutputSizes() sizes (generally slower)
* @return The sizes of the given format, empty array if no available size is found.
*/
public Size[] getAvailableSizesForFormatChecked(int format, StreamDirection direction, boolean fastSizes, boolean slowSizes) {
Key<StreamConfigurationMap> key = CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
StreamConfigurationMap config = getValueFromKeyNonNull(key);
if (config == null) {
return new Size[0];
}
Size[] sizes = null;
switch(direction) {
case Output:
Size[] fastSizeList = null;
Size[] slowSizeList = null;
if (fastSizes) {
fastSizeList = config.getOutputSizes(format);
}
if (slowSizes) {
slowSizeList = config.getHighResolutionOutputSizes(format);
}
if (fastSizeList != null && slowSizeList != null) {
sizes = new Size[slowSizeList.length + fastSizeList.length];
System.arraycopy(fastSizeList, 0, sizes, 0, fastSizeList.length);
System.arraycopy(slowSizeList, 0, sizes, fastSizeList.length, slowSizeList.length);
} else if (fastSizeList != null) {
sizes = fastSizeList;
} else if (slowSizeList != null) {
sizes = slowSizeList;
}
break;
case Input:
sizes = config.getInputSizes(format);
break;
default:
throw new IllegalArgumentException("direction must be output or input");
}
if (sizes == null) {
sizes = new Size[0];
}
return sizes;
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class CameraTestUtils method getSupportedSizeForFormat.
/**
* Get the available output sizes for the user-defined {@code format}.
*
* <p>Note that implementation-defined/hidden formats are not supported.</p>
*/
public static Size[] getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager) throws CameraAccessException {
CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
assertNotNull("Can't get camera characteristics!", properties);
if (VERBOSE) {
Log.v(TAG, "get camera characteristics for camera: " + cameraId);
}
StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] availableSizes = configMap.getOutputSizes(format);
assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: " + format);
Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
System.arraycopy(availableSizes, 0, allSizes, 0, availableSizes.length);
System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, highResAvailableSizes.length);
availableSizes = allSizes;
}
if (VERBOSE)
Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
return availableSizes;
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by DirtyUnicorns.
the class CameraMetadataTest method testOverrideStreamConfigurationMap.
/**
* Set the raw native value of the available stream configurations; ensure that
* the read-out managed value is consistent with what we write in.
*/
@SmallTest
public void testOverrideStreamConfigurationMap() {
/*
* First, write all the raw values:
* - availableStreamConfigurations
* - availableMinFrameDurations
* - availableStallDurations
*
* Then, read this out as a synthetic multi-key 'streamConfigurationMap'
*
* Finally, validate that the map was unmarshaled correctly
* and is converting the internal formats to public formats properly.
*/
//
// android.scaler.availableStreamConfigurations (int x n x 4 array)
//
final int OUTPUT = 0;
final int INPUT = 1;
int[] rawAvailableStreamConfigs = new int[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
OUTPUT, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
OUTPUT, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
OUTPUT, // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
OUTPUT, // IMPLEMENTATION_DEFINED
0x22, // IMPLEMENTATION_DEFINED
640, // IMPLEMENTATION_DEFINED
480, // IMPLEMENTATION_DEFINED
OUTPUT, // RAW16
0x20, // RAW16
320, // RAW16
240, // RAW16
INPUT };
Key<StreamConfiguration[]> configKey = CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS.getNativeKey();
mMetadata.writeValues(configKey.getTag(), toByteArray(rawAvailableStreamConfigs));
//
// android.scaler.availableMinFrameDurations (int x n x 4 array)
//
long[] expectedAvailableMinDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
33333331, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
33333332, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
33333333, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3264, // ImageFormat.JPEG
2448, // ImageFormat.JPEG
33333334, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3200, // ImageFormat.JPEG
2400, // ImageFormat.JPEG
33333335, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2592, // ImageFormat.JPEG
1944, // ImageFormat.JPEG
33333336, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2048, // ImageFormat.JPEG
1536, // ImageFormat.JPEG
33333337, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
1920, // ImageFormat.JPEG
1080, // ImageFormat.JPEG
33333338 };
long[] rawAvailableMinDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
33333331, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
33333332, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
33333333, // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
33333334, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
33333335, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
33333336, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
33333337, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
33333338 };
Key<StreamConfigurationDuration[]> durationKey = CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS.getNativeKey();
mMetadata.writeValues(durationKey.getTag(), toByteArray(rawAvailableMinDurations));
//
// android.scaler.availableStallDurations (int x n x 4 array)
//
long[] expectedAvailableStallDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
0, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
0, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
0, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3264, // ImageFormat.JPEG
2448, // ImageFormat.JPEG
33333334, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3200, // ImageFormat.JPEG
2400, // ImageFormat.JPEG
33333335, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2592, // ImageFormat.JPEG
1944, // ImageFormat.JPEG
33333336, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2048, // ImageFormat.JPEG
1536, // ImageFormat.JPEG
33333337, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
1920, // ImageFormat.JPEG
1080, // ImageFormat.JPEG
33333338 };
// Note: RAW16 and YUV_420_888 omitted intentionally; omitted values should default to 0
long[] rawAvailableStallDurations = new long[] { // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
33333334, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
33333335, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
33333336, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
33333337, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
33333338 };
Key<StreamConfigurationDuration[]> stallDurationKey = CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS.getNativeKey();
mMetadata.writeValues(stallDurationKey.getTag(), toByteArray(rawAvailableStallDurations));
//
// android.scaler.streamConfigurationMap (synthetic as StreamConfigurationMap)
//
StreamConfigurationMap streamConfigMap = mMetadata.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Inputs
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.RAW_SENSOR, 320, 240, /*output*/
false);
// Outputs
checkStreamConfigurationMapByFormatSize(streamConfigMap, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 1920, 1080, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 2048, 1536, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 2592, 1944, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 3200, 2400, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.YUV_420_888, 3200, 2400, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.YUV_420_888, 3264, 2448, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.RAW_SENSOR, 3280, 2464, /*output*/
true);
// Min Frame Durations
final int DURATION_TUPLE_SIZE = 4;
for (int i = 0; i < expectedAvailableMinDurations.length; i += DURATION_TUPLE_SIZE) {
checkStreamConfigurationMapDurationByFormatSize(streamConfigMap, (int) expectedAvailableMinDurations[i], (int) expectedAvailableMinDurations[i + 1], (int) expectedAvailableMinDurations[i + 2], Duration.MinFrame, expectedAvailableMinDurations[i + 3]);
}
for (int i = 0; i < expectedAvailableStallDurations.length; i += DURATION_TUPLE_SIZE) {
checkStreamConfigurationMapDurationByFormatSize(streamConfigMap, (int) expectedAvailableStallDurations[i], (int) expectedAvailableStallDurations[i + 1], (int) expectedAvailableStallDurations[i + 2], Duration.Stall, expectedAvailableStallDurations[i + 3]);
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by DirtyUnicorns.
the class CameraConstrainedHighSpeedCaptureSessionImpl method createHighSpeedRequestList.
@Override
public List<CaptureRequest> createHighSpeedRequestList(CaptureRequest request) throws CameraAccessException {
if (request == null) {
throw new IllegalArgumentException("Input capture request must not be null");
}
Collection<Surface> outputSurfaces = request.getTargets();
Range<Integer> fpsRange = request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
StreamConfigurationMap config = mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputSurfaces, fpsRange, config);
// Request list size: to limit the preview to 30fps, need use maxFps/30; to maximize
// the preview frame rate, should use maxBatch size for that high speed stream
// configuration. We choose the former for now.
int requestListSize = fpsRange.getUpper() / 30;
List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
// Prepare the Request builders: need carry over the request controls.
// First, create a request builder that will only include preview or recording target.
CameraMetadataNative requestMetadata = new CameraMetadataNative(request.getNativeCopy());
// Note that after this step, the requestMetadata is mutated (swapped) and can not be used
// for next request builder creation.
CaptureRequest.Builder singleTargetRequestBuilder = new CaptureRequest.Builder(requestMetadata, /*reprocess*/
false, CameraCaptureSession.SESSION_ID_NONE);
// Overwrite the capture intent to make sure a good value is set.
Iterator<Surface> iterator = outputSurfaces.iterator();
Surface firstSurface = iterator.next();
Surface secondSurface = null;
if (outputSurfaces.size() == 1 && SurfaceUtils.isSurfaceForHwVideoEncoder(firstSurface)) {
singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
} else {
// Video only, or preview + video
singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
}
singleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/
true);
// Second, Create a request builder that will include both preview and recording targets.
CaptureRequest.Builder doubleTargetRequestBuilder = null;
if (outputSurfaces.size() == 2) {
// Have to create a new copy, the original one was mutated after a new
// CaptureRequest.Builder creation.
requestMetadata = new CameraMetadataNative(request.getNativeCopy());
doubleTargetRequestBuilder = new CaptureRequest.Builder(requestMetadata, /*reprocess*/
false, CameraCaptureSession.SESSION_ID_NONE);
doubleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
doubleTargetRequestBuilder.addTarget(firstSurface);
secondSurface = iterator.next();
doubleTargetRequestBuilder.addTarget(secondSurface);
doubleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/
true);
// Make sure singleTargetRequestBuilder contains only recording surface for
// preview + recording case.
Surface recordingSurface = firstSurface;
if (!SurfaceUtils.isSurfaceForHwVideoEncoder(recordingSurface)) {
recordingSurface = secondSurface;
}
singleTargetRequestBuilder.addTarget(recordingSurface);
} else {
// Single output case: either recording or preview.
singleTargetRequestBuilder.addTarget(firstSurface);
}
// Generate the final request list.
for (int i = 0; i < requestListSize; i++) {
if (i == 0 && doubleTargetRequestBuilder != null) {
// First request should be recording + preview request
requestList.add(doubleTargetRequestBuilder.build());
} else {
requestList.add(singleTargetRequestBuilder.build());
}
}
return Collections.unmodifiableList(requestList);
}
Aggregations