use of android.hardware.camera2.CameraAccessException in project material-camera by afollestad.
the class Camera2Fragment method startPreview.
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize)
return;
try {
if (!mInterface.useStillshot()) {
if (!setUpMediaRecorder()) {
return;
}
}
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
List<Surface> surfaces = new ArrayList<>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
if (mInterface.useStillshot()) {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewBuilder.addTarget(previewSurface);
surfaces.add(mImageReader.getSurface());
} else {
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
mPreviewBuilder.addTarget(previewSurface);
Surface recorderSurface = mMediaRecorder.getSurface();
surfaces.add(recorderSurface);
mPreviewBuilder.addTarget(recorderSurface);
}
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
if (mCameraDevice == null) {
return;
}
mPreviewSession = cameraCaptureSession;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
throwError(new Exception("Camera configuration failed"));
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
use of android.hardware.camera2.CameraAccessException in project platform_frameworks_base by android.
the class FlashlightController method getCameraId.
private String getCameraId() throws CameraAccessException {
String[] ids = mCameraManager.getCameraIdList();
for (String id : ids) {
CameraCharacteristics c = mCameraManager.getCameraCharacteristics(id);
Boolean flashAvailable = c.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
Integer lensFacing = c.get(CameraCharacteristics.LENS_FACING);
if (flashAvailable != null && flashAvailable && lensFacing != null && lensFacing == CameraCharacteristics.LENS_FACING_BACK) {
return id;
}
}
return null;
}
use of android.hardware.camera2.CameraAccessException in project platform_frameworks_base by android.
the class CameraTestUtils method getSupportedSizeForFormat.
/**
* Get the available output sizes for the user-defined {@code format}.
*
* <p>Note that implementation-defined/hidden formats are not supported.</p>
*/
public static Size[] getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager) throws CameraAccessException {
CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
assertNotNull("Can't get camera characteristics!", properties);
if (VERBOSE) {
Log.v(TAG, "get camera characteristics for camera: " + cameraId);
}
StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] availableSizes = configMap.getOutputSizes(format);
assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: " + format);
Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
System.arraycopy(availableSizes, 0, allSizes, 0, availableSizes.length);
System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, highResAvailableSizes.length);
availableSizes = allSizes;
}
if (VERBOSE)
Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
return availableSizes;
}
use of android.hardware.camera2.CameraAccessException in project platform_frameworks_base by android.
the class CameraTestUtils method configureReprocessableCameraSession.
public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera, InputConfiguration inputConfiguration, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler) throws CameraAccessException {
BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
camera.createReprocessableCaptureSession(inputConfiguration, outputSurfaces, sessionListener, handler);
Integer[] sessionStates = { BlockingSessionCallback.SESSION_READY, BlockingSessionCallback.SESSION_CONFIGURE_FAILED };
int state = sessionListener.getStateWaiter().waitForAnyOfStates(Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
assertTrue("Creating a reprocessable session failed.", state == BlockingSessionCallback.SESSION_READY);
CameraCaptureSession session = sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
return session;
}
use of android.hardware.camera2.CameraAccessException in project platform_frameworks_base by android.
the class Camera2Source method onOpen.
@Override
protected void onOpen() {
mLooperThread = new CameraTestThread();
Handler mHandler;
try {
mHandler = mLooperThread.start();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new RuntimeException(e);
}
try {
String backCameraId = "0";
BlockingCameraManager blkManager = new BlockingCameraManager(mCameraManager);
mCamera = blkManager.openCamera(backCameraId, /*listener*/
null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (BlockingOpenException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
Element ele = Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV);
rgbConverter = ScriptIntrinsicYuvToRGB.create(mRS, ele);
Type.Builder yuvBuilder = new Type.Builder(mRS, ele);
yuvBuilder.setYuvFormat(ImageFormat.YUV_420_888);
yuvBuilder.setX(mWidth);
yuvBuilder.setY(mHeight);
mAllocationIn = Allocation.createTyped(mRS, yuvBuilder.create(), Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_INPUT);
mSurface = mAllocationIn.getSurface();
mAllocationIn.setOnBufferAvailableListener(this);
rgbConverter.setInput(mAllocationIn);
mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
mAllocationOut = Allocation.createFromBitmap(mRS, mBitmap);
Log.v(TAG, "mcamera: " + mCamera);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(mSurface);
CaptureRequest.Builder mCaptureRequest = null;
try {
BlockingSessionCallback blkSession = new BlockingSessionCallback();
mCamera.createCaptureSession(surfaces, blkSession, mHandler);
mCaptureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequest.addTarget(mSurface);
mCameraSession = blkSession.waitAndGetSession(SESSION_TIMEOUT_MS);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
try {
mCameraSession.setRepeatingRequest(mCaptureRequest.build(), new MyCaptureCallback(), mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
mProperties = null;
try {
mProperties = mCameraManager.getCameraCharacteristics(mCamera.getId());
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
Aggregations