use of com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException in project platform_frameworks_base by android.
the class Camera2Source method onOpen.
@Override
protected void onOpen() {
mLooperThread = new CameraTestThread();
Handler mHandler;
try {
mHandler = mLooperThread.start();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new RuntimeException(e);
}
try {
String backCameraId = "0";
BlockingCameraManager blkManager = new BlockingCameraManager(mCameraManager);
mCamera = blkManager.openCamera(backCameraId, /*listener*/
null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (BlockingOpenException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
Element ele = Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV);
rgbConverter = ScriptIntrinsicYuvToRGB.create(mRS, ele);
Type.Builder yuvBuilder = new Type.Builder(mRS, ele);
yuvBuilder.setYuvFormat(ImageFormat.YUV_420_888);
yuvBuilder.setX(mWidth);
yuvBuilder.setY(mHeight);
mAllocationIn = Allocation.createTyped(mRS, yuvBuilder.create(), Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_INPUT);
mSurface = mAllocationIn.getSurface();
mAllocationIn.setOnBufferAvailableListener(this);
rgbConverter.setInput(mAllocationIn);
mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
mAllocationOut = Allocation.createFromBitmap(mRS, mBitmap);
Log.v(TAG, "mcamera: " + mCamera);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(mSurface);
CaptureRequest.Builder mCaptureRequest = null;
try {
BlockingSessionCallback blkSession = new BlockingSessionCallback();
mCamera.createCaptureSession(surfaces, blkSession, mHandler);
mCaptureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequest.addTarget(mSurface);
mCameraSession = blkSession.waitAndGetSession(SESSION_TIMEOUT_MS);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
try {
mCameraSession.setRepeatingRequest(mCaptureRequest.build(), new MyCaptureCallback(), mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
mProperties = null;
try {
mProperties = mCameraManager.getCameraCharacteristics(mCamera.getId());
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
use of com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException in project android_frameworks_base by DirtyUnicorns.
the class Camera2Source method onOpen.
@Override
protected void onOpen() {
mLooperThread = new CameraTestThread();
Handler mHandler;
try {
mHandler = mLooperThread.start();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new RuntimeException(e);
}
try {
String backCameraId = "0";
BlockingCameraManager blkManager = new BlockingCameraManager(mCameraManager);
mCamera = blkManager.openCamera(backCameraId, /*listener*/
null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (BlockingOpenException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
Element ele = Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV);
rgbConverter = ScriptIntrinsicYuvToRGB.create(mRS, ele);
Type.Builder yuvBuilder = new Type.Builder(mRS, ele);
yuvBuilder.setYuvFormat(ImageFormat.YUV_420_888);
yuvBuilder.setX(mWidth);
yuvBuilder.setY(mHeight);
mAllocationIn = Allocation.createTyped(mRS, yuvBuilder.create(), Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_INPUT);
mSurface = mAllocationIn.getSurface();
mAllocationIn.setOnBufferAvailableListener(this);
rgbConverter.setInput(mAllocationIn);
mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
mAllocationOut = Allocation.createFromBitmap(mRS, mBitmap);
Log.v(TAG, "mcamera: " + mCamera);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(mSurface);
CaptureRequest.Builder mCaptureRequest = null;
try {
BlockingSessionCallback blkSession = new BlockingSessionCallback();
mCamera.createCaptureSession(surfaces, blkSession, mHandler);
mCaptureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequest.addTarget(mSurface);
mCameraSession = blkSession.waitAndGetSession(SESSION_TIMEOUT_MS);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
try {
mCameraSession.setRepeatingRequest(mCaptureRequest.build(), new MyCaptureCallback(), mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
mProperties = null;
try {
mProperties = mCameraManager.getCameraCharacteristics(mCamera.getId());
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
use of com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException in project android_frameworks_base by ResurrectionRemix.
the class Camera2Source method onOpen.
@Override
protected void onOpen() {
mLooperThread = new CameraTestThread();
Handler mHandler;
try {
mHandler = mLooperThread.start();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new RuntimeException(e);
}
try {
String backCameraId = "0";
BlockingCameraManager blkManager = new BlockingCameraManager(mCameraManager);
mCamera = blkManager.openCamera(backCameraId, /*listener*/
null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (BlockingOpenException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
Element ele = Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV);
rgbConverter = ScriptIntrinsicYuvToRGB.create(mRS, ele);
Type.Builder yuvBuilder = new Type.Builder(mRS, ele);
yuvBuilder.setYuvFormat(ImageFormat.YUV_420_888);
yuvBuilder.setX(mWidth);
yuvBuilder.setY(mHeight);
mAllocationIn = Allocation.createTyped(mRS, yuvBuilder.create(), Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_INPUT);
mSurface = mAllocationIn.getSurface();
mAllocationIn.setOnBufferAvailableListener(this);
rgbConverter.setInput(mAllocationIn);
mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
mAllocationOut = Allocation.createFromBitmap(mRS, mBitmap);
Log.v(TAG, "mcamera: " + mCamera);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(mSurface);
CaptureRequest.Builder mCaptureRequest = null;
try {
BlockingSessionCallback blkSession = new BlockingSessionCallback();
mCamera.createCaptureSession(surfaces, blkSession, mHandler);
mCaptureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequest.addTarget(mSurface);
mCameraSession = blkSession.waitAndGetSession(SESSION_TIMEOUT_MS);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
try {
mCameraSession.setRepeatingRequest(mCaptureRequest.build(), new MyCaptureCallback(), mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
mProperties = null;
try {
mProperties = mCameraManager.getCameraCharacteristics(mCamera.getId());
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
use of com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException in project android_frameworks_base by crdroidandroid.
the class Camera2Source method onOpen.
@Override
protected void onOpen() {
mLooperThread = new CameraTestThread();
Handler mHandler;
try {
mHandler = mLooperThread.start();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new RuntimeException(e);
}
try {
String backCameraId = "0";
BlockingCameraManager blkManager = new BlockingCameraManager(mCameraManager);
mCamera = blkManager.openCamera(backCameraId, /*listener*/
null, mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (BlockingOpenException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
Element ele = Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV);
rgbConverter = ScriptIntrinsicYuvToRGB.create(mRS, ele);
Type.Builder yuvBuilder = new Type.Builder(mRS, ele);
yuvBuilder.setYuvFormat(ImageFormat.YUV_420_888);
yuvBuilder.setX(mWidth);
yuvBuilder.setY(mHeight);
mAllocationIn = Allocation.createTyped(mRS, yuvBuilder.create(), Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_INPUT);
mSurface = mAllocationIn.getSurface();
mAllocationIn.setOnBufferAvailableListener(this);
rgbConverter.setInput(mAllocationIn);
mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
mAllocationOut = Allocation.createFromBitmap(mRS, mBitmap);
Log.v(TAG, "mcamera: " + mCamera);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(mSurface);
CaptureRequest.Builder mCaptureRequest = null;
try {
BlockingSessionCallback blkSession = new BlockingSessionCallback();
mCamera.createCaptureSession(surfaces, blkSession, mHandler);
mCaptureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mCaptureRequest.addTarget(mSurface);
mCameraSession = blkSession.waitAndGetSession(SESSION_TIMEOUT_MS);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
try {
mCameraSession.setRepeatingRequest(mCaptureRequest.build(), new MyCaptureCallback(), mHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
mProperties = null;
try {
mProperties = mCameraManager.getCameraCharacteristics(mCamera.getId());
} catch (CameraAccessException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
Aggregations