use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class StaticMetadata method getAvailableFormats.
/**
* Get available formats for a given direction.
*
* @param direction The stream direction, input or output.
* @return The formats of the given direction, empty array if no available format is found.
*/
public int[] getAvailableFormats(StreamDirection direction) {
Key<StreamConfigurationMap> key = CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
StreamConfigurationMap config = getValueFromKeyNonNull(key);
if (config == null) {
return new int[0];
}
switch(direction) {
case Output:
return config.getOutputFormats();
case Input:
return config.getInputFormats();
default:
throw new IllegalArgumentException("direction must be output or input");
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class Camera2RecordingTest method constrainedHighSpeedRecording.
private void constrainedHighSpeedRecording() throws Exception {
for (String id : mCameraIds) {
try {
Log.i(TAG, "Testing constrained high speed recording for camera " + id);
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(id);
if (!mStaticInfo.isConstrainedHighSpeedVideoSupported()) {
Log.i(TAG, "Camera " + id + " doesn't support high speed recording, skipping.");
continue;
}
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("Constrained high speed recording: %d/%d", iteration + 1, getIterationCount()));
StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] highSpeedVideoSizes = config.getHighSpeedVideoSizes();
for (Size size : highSpeedVideoSizes) {
List<Range<Integer>> fixedFpsRanges = getHighSpeedFixedFpsRangeForSize(config, size);
mCollector.expectTrue("Unable to find the fixed frame rate fps range for " + "size " + size, fixedFpsRanges.size() > 0);
// Test recording for each FPS range
for (Range<Integer> fpsRange : fixedFpsRanges) {
int captureRate = fpsRange.getLower();
final int VIDEO_FRAME_RATE = 30;
// Skip the test if the highest recording FPS supported by CamcorderProfile
if (fpsRange.getUpper() > getFpsFromHighSpeedProfileForSize(size)) {
Log.w(TAG, "high speed recording " + size + "@" + captureRate + "fps" + " is not supported by CamcorderProfile");
continue;
}
mOutMediaFileName = VIDEO_FILE_PATH + "/test_cslowMo_video_" + captureRate + "fps_" + id + "_" + size.toString() + ".mp4";
prepareRecording(size, VIDEO_FRAME_RATE, captureRate);
// prepare preview surface by using video size.
updatePreviewSurfaceWithVideo(size, captureRate);
// Start recording
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
startSlowMotionRecording(/*useMediaRecorder*/
true, VIDEO_FRAME_RATE, captureRate, fpsRange, resultListener, /*useHighSpeedSession*/
true);
// Record certain duration.
SystemClock.sleep(RECORDING_DURATION_MS);
// Stop recording and preview
stopRecording(/*useMediaRecorder*/
true);
// Convert number of frames camera produced into the duration in unit of ms.
int durationMs = (int) (resultListener.getTotalNumFrames() * 1000.0f / VIDEO_FRAME_RATE);
// Validation.
validateRecording(size, durationMs);
}
getResultPrinter().printStatus(getIterationCount(), iteration + 1, id);
Thread.sleep(getTestWaitIntervalMs());
}
}
} finally {
closeDevice();
releaseRecorder();
}
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class CameraDeviceImpl method createConstrainedHighSpeedCaptureSession.
@Override
public void createConstrainedHighSpeedCaptureSession(List<Surface> outputs, android.hardware.camera2.CameraCaptureSession.StateCallback callback, Handler handler) throws CameraAccessException {
if (outputs == null || outputs.size() == 0 || outputs.size() > 2) {
throw new IllegalArgumentException("Output surface list must not be null and the size must be no more than 2");
}
StreamConfigurationMap config = getCharacteristics().get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputs, /*fpsRange*/
null, config);
List<OutputConfiguration> outConfigurations = new ArrayList<>(outputs.size());
for (Surface surface : outputs) {
outConfigurations.add(new OutputConfiguration(surface));
}
createCaptureSessionInternal(null, outConfigurations, callback, handler, /*isConstrainedHighSpeed*/
true);
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class CameraMetadataNative method getStreamConfigurationMap.
private StreamConfigurationMap getStreamConfigurationMap() {
StreamConfiguration[] configurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
StreamConfigurationDuration[] minFrameDurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] stallDurations = getBase(CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS);
StreamConfiguration[] depthConfigurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
StreamConfigurationDuration[] depthMinFrameDurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS);
StreamConfigurationDuration[] depthStallDurations = getBase(CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase(CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
ReprocessFormatsMap inputOutputFormatsMap = getBase(CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP);
int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean listHighResolution = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE) {
listHighResolution = true;
break;
}
}
return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations, depthConfigurations, depthMinFrameDurations, depthStallDurations, highSpeedVideoConfigurations, inputOutputFormatsMap, listHighResolution);
}
use of android.hardware.camera2.params.StreamConfigurationMap in project android_frameworks_base by AOSPA.
the class LegacyMetadataMapper method mapScalerStreamConfigs.
private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
/*
* Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
* YUV_420_888 cpu callbacks -> use camera1 preview sizes
* Other preview callbacks (CPU) -> use camera1 preview sizes
* JPEG still capture -> use camera1 still capture sizes
*
* Use platform-internal format constants here, since StreamConfigurationMap does the
* remapping to public format constants.
*/
List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
/*
* Work-around for b/17589233:
* - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
* - This causes a large amount of problems with focus/metering because it's relative to
* preview, making the difference between the JPEG and preview viewport inaccessible
* - This boils down to metering or focusing areas being "arbitrarily" cropped
* in the capture result.
* - Work-around the HAL limitations by removing all of the largest preview sizes
* until we get one with the same aspect ratio as the jpeg size.
*/
{
SizeAreaComparator areaComparator = new SizeAreaComparator();
// Sort preview to min->max
Collections.sort(previewSizes, areaComparator);
Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f", maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
}
// Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
while (!previewSizes.isEmpty()) {
// max is always at the end
int index = previewSizes.size() - 1;
Camera.Size size = previewSizes.get(index);
float previewAspectRatio = size.width * 1.0f / size.height;
if (Math.abs(jpegAspectRatio - previewAspectRatio) >= PREVIEW_ASPECT_RATIO_TOLERANCE) {
// Assume removing from end is O(1)
previewSizes.remove(index);
if (DEBUG) {
Log.v(TAG, String.format("mapScalerStreamConfigs - removed preview size %dx%d, AR=%f " + "was not the same", size.width, size.height, previewAspectRatio));
}
} else {
break;
}
}
if (previewSizes.isEmpty()) {
// Fall-back to the original faulty behavior, but at least work
Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " + "JPEG aspect ratio " + jpegAspectRatio);
previewSizes = p.getSupportedPreviewSizes();
}
// Sort again, this time in descending order max->min
Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
appendStreamConfig(availableStreamConfigs, ImageFormat.YUV_420_888, previewSizes);
for (int format : p.getSupportedPreviewFormats()) {
if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
appendStreamConfig(availableStreamConfigs, format, previewSizes);
} else if (DEBUG) {
/*
* Do not add any formats unknown to us
* (since it would fail runtime checks in StreamConfigurationMap)
*/
Log.v(TAG, String.format("mapStreamConfigs - Skipping format %x", format));
}
}
appendStreamConfig(availableStreamConfigs, HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
/*
* scaler.availableStreamConfigurations
*/
m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS, availableStreamConfigs.toArray(new StreamConfiguration[0]));
/*
* scaler.availableMinFrameDurations
*/
// No frame durations available
m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
StreamConfigurationDuration[] jpegStalls = new StreamConfigurationDuration[jpegSizes.size()];
int i = 0;
long longestStallDuration = -1;
for (Camera.Size s : jpegSizes) {
long stallDuration = calculateJpegStallDuration(s);
jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width, s.height, stallDuration);
if (longestStallDuration < stallDuration) {
longestStallDuration = stallDuration;
}
}
/*
* scaler.availableStallDurations
*/
// Set stall durations for jpeg, other formats use default stall duration
m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
/*
* sensor.info.maxFrameDuration
*/
m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
}
Aggregations