use of android.hardware.camera2.params.StreamConfigurationMap in project material-camera by afollestad.
the class Camera2Fragment method openCamera.
@Override
public void openCamera() {
final int width = mTextureView.getWidth();
final int height = mTextureView.getHeight();
final Activity activity = getActivity();
if (null == activity || activity.isFinishing())
return;
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throwError(new Exception("Time out waiting to lock camera opening."));
return;
}
if (mInterface.getFrontCamera() == null || mInterface.getBackCamera() == null) {
for (String cameraId : manager.getCameraIdList()) {
if (cameraId == null)
continue;
if (mInterface.getFrontCamera() != null && mInterface.getBackCamera() != null)
break;
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
//noinspection ConstantConditions
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing == CameraCharacteristics.LENS_FACING_FRONT)
mInterface.setFrontCamera(cameraId);
else if (facing == CameraCharacteristics.LENS_FACING_BACK)
mInterface.setBackCamera(cameraId);
}
}
switch(mInterface.getCurrentCameraPosition()) {
case CAMERA_POSITION_FRONT:
setImageRes(mButtonFacing, mInterface.iconRearCamera());
break;
case CAMERA_POSITION_BACK:
setImageRes(mButtonFacing, mInterface.iconFrontCamera());
break;
case CAMERA_POSITION_UNKNOWN:
default:
if (getArguments().getBoolean(CameraIntentKey.DEFAULT_TO_FRONT_FACING, false)) {
// Check front facing first
if (mInterface.getFrontCamera() != null) {
setImageRes(mButtonFacing, mInterface.iconRearCamera());
mInterface.setCameraPosition(CAMERA_POSITION_FRONT);
} else {
setImageRes(mButtonFacing, mInterface.iconFrontCamera());
if (mInterface.getBackCamera() != null)
mInterface.setCameraPosition(CAMERA_POSITION_BACK);
else
mInterface.setCameraPosition(CAMERA_POSITION_UNKNOWN);
}
} else {
// Check back facing first
if (mInterface.getBackCamera() != null) {
setImageRes(mButtonFacing, mInterface.iconFrontCamera());
mInterface.setCameraPosition(CAMERA_POSITION_BACK);
} else {
setImageRes(mButtonFacing, mInterface.iconRearCamera());
if (mInterface.getFrontCamera() != null)
mInterface.setCameraPosition(CAMERA_POSITION_FRONT);
else
mInterface.setCameraPosition(CAMERA_POSITION_UNKNOWN);
}
}
break;
}
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics((String) mInterface.getCurrentCameraId());
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
// For still image captures, we use the largest available size.
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
//noinspection ConstantConditions,ResourceType
@Degrees.DegreeUnits final int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
@Degrees.DegreeUnits int deviceRotation = Degrees.getDisplayRotation(getActivity());
mDisplayOrientation = Degrees.getDisplayOrientation(sensorOrientation, deviceRotation, getCurrentCameraPosition() == CAMERA_POSITION_FRONT);
Log.d("Camera2Fragment", String.format("Orientations: Sensor = %d˚, Device = %d˚, Display = %d˚", sensorOrientation, deviceRotation, mDisplayOrientation));
if (mInterface.useStillshot()) {
boolean swappedDimensions = false;
switch(displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (sensorOrientation == Degrees.DEGREES_90 || sensorOrientation == Degrees.DEGREES_270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (sensorOrientation == Degrees.DEGREES_0 || sensorOrientation == Degrees.DEGREES_180) {
swappedDimensions = true;
}
break;
default:
Log.e("stillshot", "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
final File outputPic = getOutputPictureFile();
FileOutputStream output = null;
try {
output = new FileOutputStream(outputPic);
output.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
image.close();
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
Log.d("stillshot", "picture saved to disk - jpeg, size: " + bytes.length);
mOutputUri = Uri.fromFile(outputPic).toString();
mInterface.onShowStillshot(mOutputUri);
}
}, mBackgroundHandler);
} else {
mMediaRecorder = new MediaRecorder();
mVideoSize = chooseVideoSize((BaseCaptureInterface) activity, map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);
}
int orientation = VideoStreamView.getScreenOrientation(activity);
if (orientation == ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE || orientation == ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
mAfAvailable = false;
int[] afModes = characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
if (afModes != null) {
for (int i : afModes) {
if (i != 0) {
mAfAvailable = true;
break;
}
}
}
configureTransform(width, height);
mInterface.setFlashModes(CameraUtil.getSupportedFlashModes(getActivity(), characteristics));
onFlashModesLoaded();
// noinspection ResourceType
manager.openCamera((String) mInterface.getCurrentCameraId(), mStateCallback, null);
} catch (CameraAccessException e) {
throwError(new Exception("Cannot access the camera.", e));
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
new ErrorDialog().show(getFragmentManager(), "dialog");
} catch (InterruptedException e) {
throwError(new Exception("Interrupted while trying to lock camera opening.", e));
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class Camera2SurfaceViewTestCase method openDevice.
/**
* Open a camera device and get the StaticMetadata for a given camera id.
*
* @param cameraId The id of the camera device to be opened.
*/
protected void openDevice(String cameraId) throws Exception {
mCamera = CameraTestUtils.openCamera(mCameraManager, cameraId, mCameraListener, mHandler);
mCollector.setCameraId(cameraId);
CameraCharacteristics properties = mCameraManager.getCameraCharacteristics(cameraId);
mStaticInfo = new StaticMetadata(properties, CheckLevel.ASSERT, /*collector*/
null);
StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mSupportRAW10 = configMap.isOutputSupportedFor(ImageFormat.RAW10);
if (mStaticInfo.isColorOutputSupported()) {
mOrderedPreviewSizes = getSupportedPreviewSizes(cameraId, mCameraManager, getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
mOrderedVideoSizes = getSupportedVideoSizes(cameraId, mCameraManager, PREVIEW_SIZE_BOUND);
mOrderedStillSizes = getSupportedStillSizes(cameraId, mCameraManager, null);
if (mSupportRAW10) {
mOrderedRAW10Sizes = getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.RAW10, null);
}
mOrderedYUV420888Sizes = getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.YUV_420_888, null);
// Use ImageFormat.YUV_420_888 for now. TODO: need figure out what's format for preview
// in public API side.
mMinPreviewFrameDurationMap = mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888);
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class StaticMetadata method getAvailableFormats.
/**
* Get available formats for a given direction.
*
* @param direction The stream direction, input or output.
* @return The formats of the given direction, empty array if no available format is found.
*/
public int[] getAvailableFormats(StreamDirection direction) {
Key<StreamConfigurationMap> key = CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
StreamConfigurationMap config = getValueFromKeyNonNull(key);
if (config == null) {
return new int[0];
}
switch(direction) {
case Output:
return config.getOutputFormats();
case Input:
return config.getInputFormats();
default:
throw new IllegalArgumentException("direction must be output or input");
}
}
use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class CameraTestUtils method getSupportedSizeForClass.
/**
* Get the available output sizes for the given class.
*
*/
public static Size[] getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager) throws CameraAccessException {
CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
assertNotNull("Can't get camera characteristics!", properties);
if (VERBOSE) {
Log.v(TAG, "get camera characteristics for camera: " + cameraId);
}
StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] availableSizes = configMap.getOutputSizes(klass);
assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: " + klass);
Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE);
if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
System.arraycopy(availableSizes, 0, allSizes, 0, availableSizes.length);
System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, highResAvailableSizes.length);
availableSizes = allSizes;
}
if (VERBOSE)
Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
return availableSizes;
}
use of android.hardware.camera2.params.StreamConfigurationMap in project platform_frameworks_base by android.
the class LegacyCameraDevice method configureOutputs.
/**
* Configure the device with a set of output surfaces.
*
* <p>Using empty or {@code null} {@code outputs} is the same as unconfiguring.</p>
*
* <p>Every surface in {@code outputs} must be non-{@code null}.</p>
*
* @param outputs a list of surfaces to set. LegacyCameraDevice will take ownership of this
* list; it must not be modified by the caller once it's passed in.
* @return an error code for this binder operation, or {@link NO_ERROR}
* on success.
*/
public int configureOutputs(SparseArray<Surface> outputs) {
List<Pair<Surface, Size>> sizedSurfaces = new ArrayList<>();
if (outputs != null) {
int count = outputs.size();
for (int i = 0; i < count; i++) {
Surface output = outputs.valueAt(i);
if (output == null) {
Log.e(TAG, "configureOutputs - null outputs are not allowed");
return BAD_VALUE;
}
if (!output.isValid()) {
Log.e(TAG, "configureOutputs - invalid output surfaces are not allowed");
return BAD_VALUE;
}
StreamConfigurationMap streamConfigurations = mStaticCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Validate surface size and format.
try {
Size s = getSurfaceSize(output);
int surfaceType = detectSurfaceType(output);
boolean flexibleConsumer = isFlexibleConsumer(output);
Size[] sizes = streamConfigurations.getOutputSizes(surfaceType);
if (sizes == null) {
// WAR: Override default format to IMPLEMENTATION_DEFINED for b/9487482
if ((surfaceType >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 && surfaceType <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
// YUV_420_888 is always present in LEGACY for all
// IMPLEMENTATION_DEFINED output sizes, and is publicly visible in the
// API (i.e. {@code #getOutputSizes} works here).
sizes = streamConfigurations.getOutputSizes(ImageFormat.YUV_420_888);
} else if (surfaceType == LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB) {
sizes = streamConfigurations.getOutputSizes(ImageFormat.JPEG);
}
}
if (!ArrayUtils.contains(sizes, s)) {
if (flexibleConsumer && (s = findClosestSize(s, sizes)) != null) {
sizedSurfaces.add(new Pair<>(output, s));
} else {
String reason = (sizes == null) ? "format is invalid." : ("size not in valid set: " + Arrays.toString(sizes));
Log.e(TAG, String.format("Surface with size (w=%d, h=%d) and format " + "0x%x is not valid, %s", s.getWidth(), s.getHeight(), surfaceType, reason));
return BAD_VALUE;
}
} else {
sizedSurfaces.add(new Pair<>(output, s));
}
// Lock down the size before configuration
setSurfaceDimens(output, s.getWidth(), s.getHeight());
} catch (BufferQueueAbandonedException e) {
Log.e(TAG, "Surface bufferqueue is abandoned, cannot configure as output: ", e);
return BAD_VALUE;
}
}
}
boolean success = false;
if (mDeviceState.setConfiguring()) {
mRequestThreadManager.configure(sizedSurfaces);
success = mDeviceState.setIdle();
}
if (success) {
mConfiguredSurfaces = outputs;
} else {
return LegacyExceptionUtils.INVALID_OPERATION;
}
return LegacyExceptionUtils.NO_ERROR;
}
Aggregations