use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class StaticMetadata method getValidOutputFormatsForInput.
/**
* Get valid output formats for a given input format.
*
* @param inputFormat The input format used to produce the output images.
* @return The output formats for the given input format, empty array if
* no available format is found.
*/
public int[] getValidOutputFormatsForInput(int inputFormat) {
Key<StreamConfigurationMap> key = CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
StreamConfigurationMap config = getValueFromKeyNonNull(key);
if (config == null) {
return new int[0];
}
return config.getValidOutputFormatsForInput(inputFormat);
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class StaticMetadata method getAvailableFormats.
/**
* Get available formats for a given direction.
*
* @param direction The stream direction, input or output.
* @return The formats of the given direction, empty array if no available format is found.
*/
public int[] getAvailableFormats(StreamDirection direction) {
Key<StreamConfigurationMap> key = CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
StreamConfigurationMap config = getValueFromKeyNonNull(key);
if (config == null) {
return new int[0];
}
switch(direction) {
case Output:
return config.getOutputFormats();
case Input:
return config.getInputFormats();
default:
throw new IllegalArgumentException("direction must be output or input");
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class CameraMetadataTest method testOverrideStreamConfigurationMap.
/**
* Set the raw native value of the available stream configurations; ensure that
* the read-out managed value is consistent with what we write in.
*/
@SmallTest
public void testOverrideStreamConfigurationMap() {
/*
* First, write all the raw values:
* - availableStreamConfigurations
* - availableMinFrameDurations
* - availableStallDurations
*
* Then, read this out as a synthetic multi-key 'streamConfigurationMap'
*
* Finally, validate that the map was unmarshaled correctly
* and is converting the internal formats to public formats properly.
*/
//
// android.scaler.availableStreamConfigurations (int x n x 4 array)
//
final int OUTPUT = 0;
final int INPUT = 1;
int[] rawAvailableStreamConfigs = new int[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
OUTPUT, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
OUTPUT, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
OUTPUT, // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
OUTPUT, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
OUTPUT, // IMPLEMENTATION_DEFINED
0x22, // IMPLEMENTATION_DEFINED
640, // IMPLEMENTATION_DEFINED
480, // IMPLEMENTATION_DEFINED
OUTPUT, // RAW16
0x20, // RAW16
320, // RAW16
240, // RAW16
INPUT };
Key<StreamConfiguration[]> configKey = CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS.getNativeKey();
mMetadata.writeValues(configKey.getTag(), toByteArray(rawAvailableStreamConfigs));
//
// android.scaler.availableMinFrameDurations (int x n x 4 array)
//
long[] expectedAvailableMinDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
33333331, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
33333332, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
33333333, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3264, // ImageFormat.JPEG
2448, // ImageFormat.JPEG
33333334, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3200, // ImageFormat.JPEG
2400, // ImageFormat.JPEG
33333335, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2592, // ImageFormat.JPEG
1944, // ImageFormat.JPEG
33333336, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2048, // ImageFormat.JPEG
1536, // ImageFormat.JPEG
33333337, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
1920, // ImageFormat.JPEG
1080, // ImageFormat.JPEG
33333338 };
long[] rawAvailableMinDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
33333331, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
33333332, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
33333333, // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
33333334, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
33333335, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
33333336, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
33333337, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
33333338 };
Key<StreamConfigurationDuration[]> durationKey = CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS.getNativeKey();
mMetadata.writeValues(durationKey.getTag(), toByteArray(rawAvailableMinDurations));
//
// android.scaler.availableStallDurations (int x n x 4 array)
//
long[] expectedAvailableStallDurations = new long[] { // RAW16
0x20, // RAW16
3280, // RAW16
2464, // RAW16
0, // YCbCr_420_888
0x23, // YCbCr_420_888
3264, // YCbCr_420_888
2448, // YCbCr_420_888
0, // YCbCr_420_888
0x23, // YCbCr_420_888
3200, // YCbCr_420_888
2400, // YCbCr_420_888
0, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3264, // ImageFormat.JPEG
2448, // ImageFormat.JPEG
33333334, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
3200, // ImageFormat.JPEG
2400, // ImageFormat.JPEG
33333335, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2592, // ImageFormat.JPEG
1944, // ImageFormat.JPEG
33333336, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
2048, // ImageFormat.JPEG
1536, // ImageFormat.JPEG
33333337, // ImageFormat.JPEG
0x100, // ImageFormat.JPEG
1920, // ImageFormat.JPEG
1080, // ImageFormat.JPEG
33333338 };
// Note: RAW16 and YUV_420_888 omitted intentionally; omitted values should default to 0
long[] rawAvailableStallDurations = new long[] { // BLOB
0x21, // BLOB
3264, // BLOB
2448, // BLOB
33333334, // BLOB
0x21, // BLOB
3200, // BLOB
2400, // BLOB
33333335, // BLOB
0x21, // BLOB
2592, // BLOB
1944, // BLOB
33333336, // BLOB
0x21, // BLOB
2048, // BLOB
1536, // BLOB
33333337, // BLOB
0x21, // BLOB
1920, // BLOB
1080, // BLOB
33333338 };
Key<StreamConfigurationDuration[]> stallDurationKey = CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS.getNativeKey();
mMetadata.writeValues(stallDurationKey.getTag(), toByteArray(rawAvailableStallDurations));
//
// android.scaler.streamConfigurationMap (synthetic as StreamConfigurationMap)
//
StreamConfigurationMap streamConfigMap = mMetadata.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Inputs
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.RAW_SENSOR, 320, 240, /*output*/
false);
// Outputs
checkStreamConfigurationMapByFormatSize(streamConfigMap, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 1920, 1080, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 2048, 1536, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 2592, 1944, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.JPEG, 3200, 2400, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.YUV_420_888, 3200, 2400, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.YUV_420_888, 3264, 2448, /*output*/
true);
checkStreamConfigurationMapByFormatSize(streamConfigMap, ImageFormat.RAW_SENSOR, 3280, 2464, /*output*/
true);
// Min Frame Durations
final int DURATION_TUPLE_SIZE = 4;
for (int i = 0; i < expectedAvailableMinDurations.length; i += DURATION_TUPLE_SIZE) {
checkStreamConfigurationMapDurationByFormatSize(streamConfigMap, (int) expectedAvailableMinDurations[i], (int) expectedAvailableMinDurations[i + 1], (int) expectedAvailableMinDurations[i + 2], Duration.MinFrame, expectedAvailableMinDurations[i + 3]);
}
for (int i = 0; i < expectedAvailableStallDurations.length; i += DURATION_TUPLE_SIZE) {
checkStreamConfigurationMapDurationByFormatSize(streamConfigMap, (int) expectedAvailableStallDurations[i], (int) expectedAvailableStallDurations[i + 1], (int) expectedAvailableStallDurations[i + 2], Duration.Stall, expectedAvailableStallDurations[i + 3]);
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by ResurrectionRemix.
the class Camera2SurfaceViewTestCase method openDevice.
/**
* Open a camera device and get the StaticMetadata for a given camera id.
*
* @param cameraId The id of the camera device to be opened.
*/
protected void openDevice(String cameraId) throws Exception {
mCamera = CameraTestUtils.openCamera(mCameraManager, cameraId, mCameraListener, mHandler);
mCollector.setCameraId(cameraId);
CameraCharacteristics properties = mCameraManager.getCameraCharacteristics(cameraId);
mStaticInfo = new StaticMetadata(properties, CheckLevel.ASSERT, /*collector*/
null);
StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSupportRAW10 = configMap.isOutputSupportedFor(ImageFormat.RAW10);
if (mStaticInfo.isColorOutputSupported()) {
mOrderedPreviewSizes = getSupportedPreviewSizes(cameraId, mCameraManager, getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
mOrderedVideoSizes = getSupportedVideoSizes(cameraId, mCameraManager, PREVIEW_SIZE_BOUND);
mOrderedStillSizes = getSupportedStillSizes(cameraId, mCameraManager, null);
if (mSupportRAW10) {
mOrderedRAW10Sizes = getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.RAW10, null);
}
mOrderedYUV420888Sizes = getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.YUV_420_888, null);
// Use ImageFormat.YUV_420_888 for now. TODO: need figure out what's format for preview
// in public API side.
mMinPreviewFrameDurationMap = mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888);
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by ResurrectionRemix.
the class LegacyCameraDevice method configureOutputs.
/**
* Configure the device with a set of output surfaces.
*
* <p>Using empty or {@code null} {@code outputs} is the same as unconfiguring.</p>
*
* <p>Every surface in {@code outputs} must be non-{@code null}.</p>
*
* @param outputs a list of surfaces to set. LegacyCameraDevice will take ownership of this
* list; it must not be modified by the caller once it's passed in.
* @return an error code for this binder operation, or {@link NO_ERROR}
* on success.
*/
public int configureOutputs(SparseArray<Surface> outputs) {
List<Pair<Surface, Size>> sizedSurfaces = new ArrayList<>();
if (outputs != null) {
int count = outputs.size();
for (int i = 0; i < count; i++) {
Surface output = outputs.valueAt(i);
if (output == null) {
Log.e(TAG, "configureOutputs - null outputs are not allowed");
return BAD_VALUE;
}
if (!output.isValid()) {
Log.e(TAG, "configureOutputs - invalid output surfaces are not allowed");
return BAD_VALUE;
}
StreamConfigurationMap streamConfigurations = mStaticCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Validate surface size and format.
try {
Size s = getSurfaceSize(output);
int surfaceType = detectSurfaceType(output);
boolean flexibleConsumer = isFlexibleConsumer(output);
Size[] sizes = streamConfigurations.getOutputSizes(surfaceType);
if (sizes == null) {
// WAR: Override default format to IMPLEMENTATION_DEFINED for b/9487482
if ((surfaceType >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 && surfaceType <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
// YUV_420_888 is always present in LEGACY for all
// IMPLEMENTATION_DEFINED output sizes, and is publicly visible in the
// API (i.e. {@code #getOutputSizes} works here).
sizes = streamConfigurations.getOutputSizes(ImageFormat.YUV_420_888);
} else if (surfaceType == LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB) {
sizes = streamConfigurations.getOutputSizes(ImageFormat.JPEG);
}
}
if (!ArrayUtils.contains(sizes, s)) {
if (flexibleConsumer && (s = findClosestSize(s, sizes)) != null) {
sizedSurfaces.add(new Pair<>(output, s));
} else {
String reason = (sizes == null) ? "format is invalid." : ("size not in valid set: " + Arrays.toString(sizes));
Log.e(TAG, String.format("Surface with size (w=%d, h=%d) and format " + "0x%x is not valid, %s", s.getWidth(), s.getHeight(), surfaceType, reason));
return BAD_VALUE;
}
} else {
sizedSurfaces.add(new Pair<>(output, s));
}
// Lock down the size before configuration
setSurfaceDimens(output, s.getWidth(), s.getHeight());
} catch (BufferQueueAbandonedException e) {
Log.e(TAG, "Surface bufferqueue is abandoned, cannot configure as output: ", e);
return BAD_VALUE;
}
}
}
boolean success = false;
if (mDeviceState.setConfiguring()) {
mRequestThreadManager.configure(sizedSurfaces);
success = mDeviceState.setIdle();
}
if (success) {
mConfiguredSurfaces = outputs;
} else {
return LegacyExceptionUtils.INVALID_OPERATION;
}
return LegacyExceptionUtils.NO_ERROR;
}
Aggregations