use of android.hardware.camera2.params.ReprocessFormatsMap in project platform_frameworks_base by android.
the class CameraMetadataTest method testReadWriteReprocessFormatsMap.
@SmallTest
public void testReadWriteReprocessFormatsMap() {
// final int RAW_OPAQUE = 0x24; // TODO: add RAW_OPAQUE to ImageFormat
final int RAW16 = ImageFormat.RAW_SENSOR;
final int YUV_420_888 = ImageFormat.YUV_420_888;
final int BLOB = 0x21;
// TODO: also test HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED as an output
int[] contents = new int[] { YUV_420_888, 3, YUV_420_888, ImageFormat.NV21, BLOB, RAW16, 2, YUV_420_888, BLOB };
// int32 x n
Key<ReprocessFormatsMap> key = new Key<ReprocessFormatsMap>("android.scaler.availableInputOutputFormatsMap", ReprocessFormatsMap.class);
mMetadata.writeValues(key.getTag(), toByteArray(contents));
ReprocessFormatsMap map = mMetadata.get(key);
/*
* Make sure the inputs/outputs were what we expected.
* - Use public image format constants here.
*/
int[] expectedInputs = new int[] { YUV_420_888, RAW16 };
assertArrayEquals(expectedInputs, map.getInputs());
int[] expectedYuvOutputs = new int[] { YUV_420_888, ImageFormat.NV21, ImageFormat.JPEG };
assertArrayEquals(expectedYuvOutputs, map.getOutputs(ImageFormat.YUV_420_888));
int[] expectedRaw16Outputs = new int[] { YUV_420_888, ImageFormat.JPEG };
assertArrayEquals(expectedRaw16Outputs, map.getOutputs(ImageFormat.RAW_SENSOR));
// Finally, do a round-trip check as a sanity
checkKeyMarshal("android.scaler.availableInputOutputFormatsMap", new ReprocessFormatsMap(contents), toByteArray(contents));
}
use of android.hardware.camera2.params.ReprocessFormatsMap in project platform_frameworks_base by android.
the class CameraMetadataNative method getStreamConfigurationMap.
private StreamConfigurationMap getStreamConfigurationMap() {
StreamConfiguration[] configurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
StreamConfigurationDuration[] minFrameDurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] stallDurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS);
StreamConfiguration[] depthConfigurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
StreamConfigurationDuration[] depthMinFrameDurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] depthStallDurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase(CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
ReprocessFormatsMap inputOutputFormatsMap = getBase(CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP);
int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean listHighResolution = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE) {
listHighResolution = true;
break;
}
}
return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations, depthConfigurations, depthMinFrameDurations, depthStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution);
}
use of android.hardware.camera2.params.ReprocessFormatsMap in project android_frameworks_base by DirtyUnicorns.
the class CameraMetadataNative method getStreamConfigurationMap.
private StreamConfigurationMap getStreamConfigurationMap() {
StreamConfiguration[] configurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
StreamConfigurationDuration[] minFrameDurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] stallDurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS);
StreamConfiguration[] depthConfigurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
StreamConfigurationDuration[] depthMinFrameDurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] depthStallDurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase(CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
ReprocessFormatsMap inputOutputFormatsMap = getBase(CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP);
int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean listHighResolution = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE) {
listHighResolution = true;
break;
}
}
return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations, depthConfigurations, depthMinFrameDurations, depthStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution);
}
use of android.hardware.camera2.params.ReprocessFormatsMap in project android_frameworks_base by AOSPA.
the class CameraMetadataTest method testReadWriteReprocessFormatsMap.
@SmallTest
public void testReadWriteReprocessFormatsMap() {
// final int RAW_OPAQUE = 0x24; // TODO: add RAW_OPAQUE to ImageFormat
final int RAW16 = ImageFormat.RAW_SENSOR;
final int YUV_420_888 = ImageFormat.YUV_420_888;
final int BLOB = 0x21;
// TODO: also test HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED as an output
int[] contents = new int[] { YUV_420_888, 3, YUV_420_888, ImageFormat.NV21, BLOB, RAW16, 2, YUV_420_888, BLOB };
// int32 x n
Key<ReprocessFormatsMap> key = new Key<ReprocessFormatsMap>("android.scaler.availableInputOutputFormatsMap", ReprocessFormatsMap.class);
mMetadata.writeValues(key.getTag(), toByteArray(contents));
ReprocessFormatsMap map = mMetadata.get(key);
/*
* Make sure the inputs/outputs were what we expected.
* - Use public image format constants here.
*/
int[] expectedInputs = new int[] { YUV_420_888, RAW16 };
assertArrayEquals(expectedInputs, map.getInputs());
int[] expectedYuvOutputs = new int[] { YUV_420_888, ImageFormat.NV21, ImageFormat.JPEG };
assertArrayEquals(expectedYuvOutputs, map.getOutputs(ImageFormat.YUV_420_888));
int[] expectedRaw16Outputs = new int[] { YUV_420_888, ImageFormat.JPEG };
assertArrayEquals(expectedRaw16Outputs, map.getOutputs(ImageFormat.RAW_SENSOR));
// Finally, do a round-trip check as a sanity
checkKeyMarshal("android.scaler.availableInputOutputFormatsMap", new ReprocessFormatsMap(contents), toByteArray(contents));
}
use of android.hardware.camera2.params.ReprocessFormatsMap in project android_frameworks_base by AOSPA.
the class CameraMetadataNative method getStreamConfigurationMap.
private StreamConfigurationMap getStreamConfigurationMap() {
StreamConfiguration[] configurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
StreamConfigurationDuration[] minFrameDurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] stallDurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS);
StreamConfiguration[] depthConfigurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
StreamConfigurationDuration[] depthMinFrameDurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] depthStallDurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase(CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
ReprocessFormatsMap inputOutputFormatsMap = getBase(CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP);
int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean listHighResolution = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE) {
listHighResolution = true;
break;
}
}
return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations, depthConfigurations, depthMinFrameDurations, depthStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution);
}
Aggregations