use of android.media.ImageReader in project android_frameworks_base by crdroidandroid.
the class Camera2SurfaceViewTestCase method createImageReader.
/**
* Create an {@link ImageReader} object and get the surface.
*
* @param size The size of this ImageReader to be created.
* @param format The format of this ImageReader to be created
* @param maxNumImages The max number of images that can be acquired simultaneously.
* @param listener The listener used by this ImageReader to notify callbacks.
*/
protected void createImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener) throws Exception {
closeImageReader();
ImageReader r = makeImageReader(size, format, maxNumImages, listener, mHandler);
mReader = r;
mReaderSurface = r.getSurface();
}
use of android.media.ImageReader in project android_frameworks_base by crdroidandroid.
the class CameraTestUtils method makeImageReader.
/**
* Create an {@link ImageReader} object and get the surface.
*
* @param size The size of this ImageReader to be created.
* @param format The format of this ImageReader to be created
* @param maxNumImages The max number of images that can be acquired simultaneously.
* @param listener The listener used by this ImageReader to notify callbacks.
* @param handler The handler to use for any listener callbacks.
*/
public static ImageReader makeImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener, Handler handler) {
ImageReader reader;
reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format, maxNumImages);
reader.setOnImageAvailableListener(listener, handler);
if (VERBOSE)
Log.v(TAG, "Created ImageReader size " + size);
return reader;
}
use of android.media.ImageReader in project android_frameworks_base by AOSPA.
the class Camera2SurfaceViewTestCase method createImageReader.
/**
* Create an {@link ImageReader} object and get the surface.
*
* @param size The size of this ImageReader to be created.
* @param format The format of this ImageReader to be created
* @param maxNumImages The max number of images that can be acquired simultaneously.
* @param listener The listener used by this ImageReader to notify callbacks.
*/
protected void createImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener) throws Exception {
closeImageReader();
ImageReader r = makeImageReader(size, format, maxNumImages, listener, mHandler);
mReader = r;
mReaderSurface = r.getSurface();
}
use of android.media.ImageReader in project android_frameworks_base by DirtyUnicorns.
the class Camera2StillCaptureTest method fullRawCaptureTestByCamera.
private void fullRawCaptureTestByCamera() throws Exception {
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
Size maxStillSz = mOrderedStillSizes.get(0);
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
SimpleImageReaderListener jpegListener = new SimpleImageReaderListener();
SimpleImageReaderListener rawListener = new SimpleImageReaderListener();
Size size = mStaticInfo.getRawDimensChecked();
if (VERBOSE) {
Log.v(TAG, "Testing multi capture with size " + size.toString() + ", preview size " + maxPreviewSz);
}
// Prepare raw capture and start preview.
CaptureRequest.Builder previewBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder multiBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
ImageReader rawReader = null;
ImageReader jpegReader = null;
try {
// Create ImageReaders.
rawReader = makeImageReader(size, ImageFormat.RAW_SENSOR, MAX_READER_IMAGES, rawListener, mHandler);
jpegReader = makeImageReader(maxStillSz, ImageFormat.JPEG, MAX_READER_IMAGES, jpegListener, mHandler);
updatePreviewSurface(maxPreviewSz);
// Configure output streams with preview and jpeg streams.
List<Surface> outputSurfaces = new ArrayList<Surface>();
outputSurfaces.add(rawReader.getSurface());
outputSurfaces.add(jpegReader.getSurface());
outputSurfaces.add(mPreviewSurface);
mSessionListener = new BlockingSessionCallback();
mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);
// Configure the requests.
previewBuilder.addTarget(mPreviewSurface);
multiBuilder.addTarget(mPreviewSurface);
multiBuilder.addTarget(rawReader.getSurface());
multiBuilder.addTarget(jpegReader.getSurface());
// Start preview.
mSession.setRepeatingRequest(previewBuilder.build(), null, mHandler);
// Poor man's 3A, wait 2 seconds for AE/AF (if any) to settle.
// TODO: Do proper 3A trigger and lock (see testTakePictureTest).
Thread.sleep(3000);
multiBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
CaptureRequest multiRequest = multiBuilder.build();
mSession.capture(multiRequest, resultListener, mHandler);
CaptureResult result = resultListener.getCaptureResultForRequest(multiRequest, NUM_RESULTS_WAIT_TIMEOUT);
Image jpegImage = jpegListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
basicValidateJpegImage(jpegImage, maxStillSz);
Image rawImage = rawListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
validateRaw16Image(rawImage, size);
verifyRawCaptureResult(multiRequest, result);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try (DngCreator dngCreator = new DngCreator(mStaticInfo.getCharacteristics(), result)) {
dngCreator.writeImage(outputStream, rawImage);
}
if (DEBUG) {
byte[] rawBuffer = outputStream.toByteArray();
String rawFileName = DEBUG_FILE_NAME_BASE + "/raw16_" + TAG + size.toString() + "_cam_" + mCamera.getId() + ".dng";
Log.d(TAG, "Dump raw file into " + rawFileName);
dumpFile(rawFileName, rawBuffer);
byte[] jpegBuffer = getDataFromImage(jpegImage);
String jpegFileName = DEBUG_FILE_NAME_BASE + "/jpeg_" + TAG + size.toString() + "_cam_" + mCamera.getId() + ".jpg";
Log.d(TAG, "Dump jpeg file into " + rawFileName);
dumpFile(jpegFileName, jpegBuffer);
}
stopPreview();
} finally {
CameraTestUtils.closeImageReader(rawReader);
CameraTestUtils.closeImageReader(jpegReader);
rawReader = null;
jpegReader = null;
}
}
use of android.media.ImageReader in project material-camera by afollestad.
the class Camera2Fragment method openCamera.
@Override
public void openCamera() {
final int width = mTextureView.getWidth();
final int height = mTextureView.getHeight();
final Activity activity = getActivity();
if (null == activity || activity.isFinishing())
return;
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throwError(new Exception("Time out waiting to lock camera opening."));
return;
}
if (mInterface.getFrontCamera() == null || mInterface.getBackCamera() == null) {
for (String cameraId : manager.getCameraIdList()) {
if (cameraId == null)
continue;
if (mInterface.getFrontCamera() != null && mInterface.getBackCamera() != null)
break;
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
//noinspection ConstantConditions
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing == CameraCharacteristics.LENS_FACING_FRONT)
mInterface.setFrontCamera(cameraId);
else if (facing == CameraCharacteristics.LENS_FACING_BACK)
mInterface.setBackCamera(cameraId);
}
}
switch(mInterface.getCurrentCameraPosition()) {
case CAMERA_POSITION_FRONT:
setImageRes(mButtonFacing, mInterface.iconRearCamera());
break;
case CAMERA_POSITION_BACK:
setImageRes(mButtonFacing, mInterface.iconFrontCamera());
break;
case CAMERA_POSITION_UNKNOWN:
default:
if (getArguments().getBoolean(CameraIntentKey.DEFAULT_TO_FRONT_FACING, false)) {
// Check front facing first
if (mInterface.getFrontCamera() != null) {
setImageRes(mButtonFacing, mInterface.iconRearCamera());
mInterface.setCameraPosition(CAMERA_POSITION_FRONT);
} else {
setImageRes(mButtonFacing, mInterface.iconFrontCamera());
if (mInterface.getBackCamera() != null)
mInterface.setCameraPosition(CAMERA_POSITION_BACK);
else
mInterface.setCameraPosition(CAMERA_POSITION_UNKNOWN);
}
} else {
// Check back facing first
if (mInterface.getBackCamera() != null) {
setImageRes(mButtonFacing, mInterface.iconFrontCamera());
mInterface.setCameraPosition(CAMERA_POSITION_BACK);
} else {
setImageRes(mButtonFacing, mInterface.iconRearCamera());
if (mInterface.getFrontCamera() != null)
mInterface.setCameraPosition(CAMERA_POSITION_FRONT);
else
mInterface.setCameraPosition(CAMERA_POSITION_UNKNOWN);
}
}
break;
}
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics((String) mInterface.getCurrentCameraId());
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
// For still image captures, we use the largest available size.
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
//noinspection ConstantConditions,ResourceType
@Degrees.DegreeUnits final int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
@Degrees.DegreeUnits int deviceRotation = Degrees.getDisplayRotation(getActivity());
mDisplayOrientation = Degrees.getDisplayOrientation(sensorOrientation, deviceRotation, getCurrentCameraPosition() == CAMERA_POSITION_FRONT);
Log.d("Camera2Fragment", String.format("Orientations: Sensor = %d˚, Device = %d˚, Display = %d˚", sensorOrientation, deviceRotation, mDisplayOrientation));
if (mInterface.useStillshot()) {
boolean swappedDimensions = false;
switch(displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (sensorOrientation == Degrees.DEGREES_90 || sensorOrientation == Degrees.DEGREES_270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (sensorOrientation == Degrees.DEGREES_0 || sensorOrientation == Degrees.DEGREES_180) {
swappedDimensions = true;
}
break;
default:
Log.e("stillshot", "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
final byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
final File outputPic = getOutputPictureFile();
FileOutputStream output = null;
try {
output = new FileOutputStream(outputPic);
output.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
image.close();
if (null != output) {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
Log.d("stillshot", "picture saved to disk - jpeg, size: " + bytes.length);
mOutputUri = Uri.fromFile(outputPic).toString();
mInterface.onShowStillshot(mOutputUri);
}
}, mBackgroundHandler);
} else {
mMediaRecorder = new MediaRecorder();
mVideoSize = chooseVideoSize((BaseCaptureInterface) activity, map.getOutputSizes(MediaRecorder.class));
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);
}
int orientation = VideoStreamView.getScreenOrientation(activity);
if (orientation == ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE || orientation == ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
mAfAvailable = false;
int[] afModes = characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
if (afModes != null) {
for (int i : afModes) {
if (i != 0) {
mAfAvailable = true;
break;
}
}
}
configureTransform(width, height);
mInterface.setFlashModes(CameraUtil.getSupportedFlashModes(getActivity(), characteristics));
onFlashModesLoaded();
// noinspection ResourceType
manager.openCamera((String) mInterface.getCurrentCameraId(), mStateCallback, null);
} catch (CameraAccessException e) {
throwError(new Exception("Cannot access the camera.", e));
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
new ErrorDialog().show(getFragmentManager(), "dialog");
} catch (InterruptedException e) {
throwError(new Exception("Interrupted while trying to lock camera opening.", e));
}
}
Aggregations