use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class CameraMetadataTest method testOverrideStreamConfigurationMap.
/**
* Set the raw native value of the available stream configurations; ensure that
* the read-out managed value is consistent with what we write in.
*/
@SmallTest
public void testOverrideStreamConfigurationMap() {
/*
* First, write all the raw values:
* - availableStreamConfigurations
* - availableMinFrameDurations
* - availableStallDurations
*
* Then, read this out as a synthetic multi-key 'streamConfigurationMap'
*
* Finally, validate that the map was unmarshaled correctly
* and is converting the internal formats to public formats properly.
*/
//
// android.scaler.availableStreamConfigurations (int x n x 4 array)
//
final int OUTPUT = 0;
final int INPUT = 1;
int[] rawAvailableStreamConfigs = new int[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
OUTPUT, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
OUTPUT, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
OUTPUT, // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
OUTPUT, // IMPLEMENTATION_DEFINED
0x22, // IMPLEMENTATION_DEFINED
640, // IMPLEMENTATION_DEFINED
480, // IMPLEMENTATION_DEFINED
OUTPUT, // RAW16
0x20, // RAW16
320, // RAW16
240, // RAW16
INPUT };
Key<StreamConfiguration[]> configKey = CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS.getNativeKey();
mMetadata.writeValues(configKey.getTag(), toByteArray(rawAvailableStreamConfigs));
//
// android.scaler.availableMinFrameDurations (int x n x 4 array)
//
long[] expectedAvailableMinDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
33333331, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
33333332, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
33333333, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3264, // ImageFormat.JPEG
2448, // ImageFormat.JPEG
33333334, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3200, // ImageFormat.JPEG
2400, // ImageFormat.JPEG
33333335, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2592, // ImageFormat.JPEG
1944, // ImageFormat.JPEG
33333336, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2048, // ImageFormat.JPEG
1536, // ImageFormat.JPEG
33333337, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
1920, // ImageFormat.JPEG
1080, // ImageFormat.JPEG
33333338 };
long[] rawAvailableMinDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
33333331, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
33333332, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
33333333, // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
33333334, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
33333335, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
33333336, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
33333337, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
33333338 };
Key<StreamConfigurationDuration[]> durationKey = CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS.getNativeKey();
mMetadata.writeValues(durationKey.getTag(), toByteArray(rawAvailableMinDurations));
//
// android.scaler.availableStallDurations (int x n x 4 array)
//
long[] expectedAvailableStallDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
0, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
0, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
0, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3264, // ImageFormat.JPEG
2448, // ImageFormat.JPEG
33333334, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3200, // ImageFormat.JPEG
2400, // ImageFormat.JPEG
33333335, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2592, // ImageFormat.JPEG
1944, // ImageFormat.JPEG
33333336, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2048, // ImageFormat.JPEG
1536, // ImageFormat.JPEG
33333337, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
1920, // ImageFormat.JPEG
1080, // ImageFormat.JPEG
33333338 };
// Note: RAW16 and YUV_420_888 omitted intentionally; omitted values should default to 0
long[] rawAvailableStallDurations = new long[] { // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
33333334, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
33333335, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
33333336, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
33333337, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
33333338 };
Key<StreamConfigurationDuration[]> stallDurationKey = CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS.getNativeKey();
mMetadata.writeValues(stallDurationKey.getTag(), toByteArray(rawAvailableStallDurations));
//
// android.scaler.streamConfigurationMap (synthetic as StreamConfigurationMap)
//
StreamConfigurationMap streamConfigMap = mMetadata.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Inputs
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.RAW_SENSOR, 320, 240, /*output*/
false);
// Outputs
checkStreamConfigurationMapByFormatSize(streamConfigMap, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 1920, 1080, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 2048, 1536, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 2592, 1944, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 3200, 2400, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.YUV_420_888, 3200, 2400, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.YUV_420_888, 3264, 2448, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.RAW_SENSOR, 3280, 2464, /*output*/
true);
// Min Frame Durations
final int DURATION_TUPLE_SIZE = 4;
for (int i = 0; i < expectedAvailableMinDurations.length; i += DURATION_TUPLE_SIZE) {
checkStreamConfigurationMapDurationByFormatSize(streamConfigMap, (int) expectedAvailableMinDurations[i], (int) expectedAvailableMinDurations[i + 1], (int) expectedAvailableMinDurations[i + 2], Duration.MinFrame, expectedAvailableMinDurations[i + 3]);
}
for (int i = 0; i < expectedAvailableStallDurations.length; i += DURATION_TUPLE_SIZE) {
checkStreamConfigurationMapDurationByFormatSize(streamConfigMap, (int) expectedAvailableStallDurations[i], (int) expectedAvailableStallDurations[i + 1], (int) expectedAvailableStallDurations[i + 2], Duration.Stall, expectedAvailableStallDurations[i + 3]);
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class Camera2RecordingTest method constrainedHighSpeedRecording.
private void constrainedHighSpeedRecording() throws Exception {
for (String id : mCameraIds) {
try {
Log.i(TAG, "Testing constrained high speed recording for camera " + id);
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(id);
if (!mStaticInfo.isConstrainedHighSpeedVideoSupported()) {
Log.i(TAG, "Camera " + id + " doesn't support high speed recording, skipping.");
continue;
}
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("Constrained high speed recording: %d/%d", iteration + 1, getIterationCount()));
StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] highSpeedVideoSizes = config.getHighSpeedVideoSizes();
for (Size size : highSpeedVideoSizes) {
List<Range<Integer>> fixedFpsRanges = getHighSpeedFixedFpsRangeForSize(config, size);
mCollector.expectTrue("Unable to find the fixed frame rate fps range for " + "size " + size, fixedFpsRanges.size() > 0);
// Test recording for each FPS range
for (Range<Integer> fpsRange : fixedFpsRanges) {
int captureRate = fpsRange.getLower();
final int VIDEO_FRAME_RATE = 30;
// Skip the test if the highest recording FPS supported by CamcorderProfile
if (fpsRange.getUpper() > getFpsFromHighSpeedProfileForSize(size)) {
Log.w(TAG, "high speed recording " + size + "@" + captureRate + "fps" + " is not supported by CamcorderProfile");
continue;
}
mOutMediaFileName = VIDEO_FILE_PATH + "/test_cslowMo_video_" + captureRate + "fps_" + id + "_" + size.toString() + ".mp4";
prepareRecording(size, VIDEO_FRAME_RATE, captureRate);
// prepare preview surface by using video size.
updatePreviewSurfaceWithVideo(size, captureRate);
// Start recording
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
startSlowMotionRecording(/*useMediaRecorder*/
true, VIDEO_FRAME_RATE, captureRate, fpsRange, resultListener, /*useHighSpeedSession*/
true);
// Record certain duration.
SystemClock.sleep(RECORDING_DURATION_MS);
// Stop recording and preview
stopRecording(/*useMediaRecorder*/
true);
// Convert number of frames camera produced into the duration in unit of ms.
int durationMs = (int) (resultListener.getTotalNumFrames() * 1000.0f / VIDEO_FRAME_RATE);
// Validation.
validateRecording(size, durationMs);
}
getResultPrinter().printStatus(getIterationCount(), iteration + 1, id);
Thread.sleep(getTestWaitIntervalMs());
}
}
} finally {
closeDevice();
releaseRecorder();
}
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class CameraTestUtils method getSupportedSizeForFormat.
/**
* Get the available output sizes for the user-defined {@code format}.
*
* <p>Note that implementation-defined/hidden formats are not supported.</p>
*/
public static Size[] getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager) throws CameraAccessException {
CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
assertNotNull("Can't get camera characteristics!", properties);
if (VERBOSE) {
Log.v(TAG, "get camera characteristics for camera: " + cameraId);
}
StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] availableSizes = configMap.getOutputSizes(format);
assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: " + format);
Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
System.arraycopy(availableSizes, 0, allSizes, 0, availableSizes.length);
System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, highResAvailableSizes.length);
availableSizes = allSizes;
}
if (VERBOSE)
Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
return availableSizes;
}
use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class StaticMetadata method getAvailableSizesForFormatChecked.
/**
* Get available sizes for given format and direction, and whether to limit to slow or fast
* resolutions.
*
* @param format The format for the requested size array.
* @param direction The stream direction, input or output.
* @param fastSizes whether to include getOutputSizes() sizes (generally faster)
* @param slowSizes whether to include getHighResolutionOutputSizes() sizes (generally slower)
* @return The sizes of the given format, empty array if no available size is found.
*/
public Size[] getAvailableSizesForFormatChecked(int format, StreamDirection direction, boolean fastSizes, boolean slowSizes) {
Key<StreamConfigurationMap> key = CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
StreamConfigurationMap config = getValueFromKeyNonNull(key);
if (config == null) {
return new Size[0];
}
Size[] sizes = null;
switch(direction) {
case Output:
Size[] fastSizeList = null;
Size[] slowSizeList = null;
if (fastSizes) {
fastSizeList = config.getOutputSizes(format);
}
if (slowSizes) {
slowSizeList = config.getHighResolutionOutputSizes(format);
}
if (fastSizeList != null && slowSizeList != null) {
sizes = new Size[slowSizeList.length + fastSizeList.length];
System.arraycopy(fastSizeList, 0, sizes, 0, fastSizeList.length);
System.arraycopy(slowSizeList, 0, sizes, fastSizeList.length, slowSizeList.length);
} else if (fastSizeList != null) {
sizes = fastSizeList;
} else if (slowSizeList != null) {
sizes = slowSizeList;
}
break;
case Input:
sizes = config.getInputSizes(format);
break;
default:
throw new IllegalArgumentException("direction must be output or input");
}
if (sizes == null) {
sizes = new Size[0];
}
return sizes;
}
use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class StaticMetadata method isHighSpeedVideoSupported.
/**
* Check if high speed video is supported (HIGH_SPEED_VIDEO scene mode is
* supported, supported high speed fps ranges and sizes are valid).
*
* @return true if high speed video is supported.
*/
public boolean isHighSpeedVideoSupported() {
List<Integer> sceneModes = Arrays.asList(CameraTestUtils.toObject(getAvailableSceneModesChecked()));
if (sceneModes.contains(CameraCharacteristics.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO)) {
StreamConfigurationMap config = getValueFromKeyNonNull(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (config == null) {
return false;
}
Size[] availableSizes = config.getHighSpeedVideoSizes();
if (availableSizes.length == 0) {
return false;
}
for (Size size : availableSizes) {
Range<Integer>[] availableFpsRanges = config.getHighSpeedVideoFpsRangesFor(size);
if (availableFpsRanges.length == 0) {
return false;
}
}
return true;
} else {
return false;
}
}
Aggregations