use of android.util.Size in project android_frameworks_base by AOSPA.
the class StreamConfigurationMap method appendOutputsString.
private void appendOutputsString(StringBuilder sb) {
sb.append("Outputs(");
int[] formats = getOutputFormats();
for (int format : formats) {
Size[] sizes = getOutputSizes(format);
for (Size size : sizes) {
long minFrameDuration = getOutputMinFrameDuration(format, size);
long stallDuration = getOutputStallDuration(format, size);
sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " + "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format), format, minFrameDuration, stallDuration));
}
}
// Remove the pending ", "
if (sb.charAt(sb.length() - 1) == ' ') {
sb.delete(sb.length() - 2, sb.length());
}
sb.append(")");
}
use of android.util.Size in project android_frameworks_base by AOSPA.
the class StreamConfigurationMap method getInternalFormatSizes.
private Size[] getInternalFormatSizes(int format, int dataspace, boolean output, boolean highRes) {
// All depth formats are non-high-res.
if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
return new Size[0];
}
SparseIntArray formatsMap = !output ? mInputFormats : dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats : highRes ? mHighResOutputFormats : mOutputFormats;
int sizesCount = formatsMap.get(format);
if (((!output || dataspace == HAL_DATASPACE_DEPTH) && sizesCount == 0) || (output && dataspace != HAL_DATASPACE_DEPTH && mAllOutputFormats.get(format) == 0)) {
// Only throw if this is really not supported at all
throw new IllegalArgumentException("format not available");
}
Size[] sizes = new Size[sizesCount];
int sizeIndex = 0;
StreamConfiguration[] configurations = (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
StreamConfigurationDuration[] minFrameDurations = (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations : mMinFrameDurations;
for (StreamConfiguration config : configurations) {
int fmt = config.getFormat();
if (fmt == format && config.isOutput() == output) {
if (output && mListHighResolution) {
// Filter slow high-res output formats; include for
// highRes, remove for !highRes
long duration = 0;
for (int i = 0; i < minFrameDurations.length; i++) {
StreamConfigurationDuration d = minFrameDurations[i];
if (d.getFormat() == fmt && d.getWidth() == config.getSize().getWidth() && d.getHeight() == config.getSize().getHeight()) {
duration = d.getDuration();
break;
}
}
if (dataspace != HAL_DATASPACE_DEPTH && highRes != (duration > DURATION_20FPS_NS)) {
continue;
}
}
sizes[sizeIndex++] = config.getSize();
}
}
if (sizeIndex != sizesCount) {
throw new AssertionError("Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
}
return sizes;
}
use of android.util.Size in project android_frameworks_base by AOSPA.
the class Camera2CaptureRequestTest method testAeModeAndLock.
/**
* Test AE mode and lock.
*
* <p>
* For AE lock, when it is locked, exposure parameters shouldn't be changed.
* For AE modes, each mode should satisfy the per frame controls defined in
* API specifications.
* </p>
*/
public void testAeModeAndLock() throws Exception {
for (int i = 0; i < mCameraIds.length; i++) {
try {
openDevice(mCameraIds[i]);
if (!mStaticInfo.isColorOutputSupported()) {
Log.i(TAG, "Camera " + mCameraIds[i] + " does not support color outputs, skipping");
continue;
}
// Max preview size.
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
// Update preview surface with given size for all sub-tests.
updatePreviewSurface(maxPreviewSz);
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("AE mode and lock: %d/%d", iteration + 1, getIterationCount()));
// Test aeMode and lock
int[] aeModes = mStaticInfo.getAeAvailableModesChecked();
for (int mode : aeModes) {
aeModeAndLockTestByMode(mode);
}
getResultPrinter().printStatus(getIterationCount(), iteration + 1, mCameraIds[i]);
Thread.sleep(getTestWaitIntervalMs());
}
} finally {
closeDevice();
}
}
}
use of android.util.Size in project android_frameworks_base by crdroidandroid.
the class ImageUtils method imageCopy.
/**
* <p>
* Copy source image data to destination Image.
* </p>
* <p>
* Only support the copy between two non-{@link ImageFormat#PRIVATE PRIVATE} format
* images with same properties (format, size, etc.). The data from the
* source image will be copied to the byteBuffers from the destination Image
* starting from position zero, and the destination image will be rewound to
* zero after copy is done.
* </p>
*
* @param src The source image to be copied from.
* @param dst The destination image to be copied to.
* @throws IllegalArgumentException If the source and destination images
* have different format, or one of the images is not copyable.
*/
public static void imageCopy(Image src, Image dst) {
if (src == null || dst == null) {
throw new IllegalArgumentException("Images should be non-null");
}
if (src.getFormat() != dst.getFormat()) {
throw new IllegalArgumentException("Src and dst images should have the same format");
}
if (src.getFormat() == ImageFormat.PRIVATE || dst.getFormat() == ImageFormat.PRIVATE) {
throw new IllegalArgumentException("PRIVATE format images are not copyable");
}
if (src.getFormat() == ImageFormat.RAW_PRIVATE) {
throw new IllegalArgumentException("Copy of RAW_OPAQUE format has not been implemented");
}
if (!(dst.getOwner() instanceof ImageWriter)) {
throw new IllegalArgumentException("Destination image is not from ImageWriter. Only" + " the images from ImageWriter are writable");
}
Size srcSize = new Size(src.getWidth(), src.getHeight());
Size dstSize = new Size(dst.getWidth(), dst.getHeight());
if (!srcSize.equals(dstSize)) {
throw new IllegalArgumentException("source image size " + srcSize + " is different" + " with " + "destination image size " + dstSize);
}
Plane[] srcPlanes = src.getPlanes();
Plane[] dstPlanes = dst.getPlanes();
ByteBuffer srcBuffer = null;
ByteBuffer dstBuffer = null;
for (int i = 0; i < srcPlanes.length; i++) {
int srcRowStride = srcPlanes[i].getRowStride();
int dstRowStride = dstPlanes[i].getRowStride();
srcBuffer = srcPlanes[i].getBuffer();
dstBuffer = dstPlanes[i].getBuffer();
if (!(srcBuffer.isDirect() && dstBuffer.isDirect())) {
throw new IllegalArgumentException("Source and destination ByteBuffers must be" + " direct byteBuffer!");
}
if (srcPlanes[i].getPixelStride() != dstPlanes[i].getPixelStride()) {
throw new IllegalArgumentException("Source plane image pixel stride " + srcPlanes[i].getPixelStride() + " must be same as destination image pixel stride " + dstPlanes[i].getPixelStride());
}
int srcPos = srcBuffer.position();
srcBuffer.rewind();
dstBuffer.rewind();
if (srcRowStride == dstRowStride) {
// Fast path, just copy the content if the byteBuffer all together.
dstBuffer.put(srcBuffer);
} else {
// Source and destination images may have different alignment requirements,
// therefore may have different strides. Copy row by row for such case.
int srcOffset = srcBuffer.position();
int dstOffset = dstBuffer.position();
Size effectivePlaneSize = getEffectivePlaneSizeForImage(src, i);
int srcByteCount = effectivePlaneSize.getWidth() * srcPlanes[i].getPixelStride();
for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
if (row == effectivePlaneSize.getHeight() - 1) {
// Special case for NV21 backed YUV420_888: need handle the last row
// carefully to avoid memory corruption. Check if we have enough bytes to
// copy.
int remainingBytes = srcBuffer.remaining() - srcOffset;
if (srcByteCount > remainingBytes) {
srcByteCount = remainingBytes;
}
}
directByteBufferCopy(srcBuffer, srcOffset, dstBuffer, dstOffset, srcByteCount);
srcOffset += srcRowStride;
dstOffset += dstRowStride;
}
}
srcBuffer.position(srcPos);
dstBuffer.rewind();
}
}
use of android.util.Size in project android_frameworks_base by crdroidandroid.
the class Camera2StillCaptureTest method fullRawCaptureTestByCamera.
private void fullRawCaptureTestByCamera() throws Exception {
Size maxPreviewSz = mOrderedPreviewSizes.get(0);
Size maxStillSz = mOrderedStillSizes.get(0);
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
SimpleImageReaderListener jpegListener = new SimpleImageReaderListener();
SimpleImageReaderListener rawListener = new SimpleImageReaderListener();
Size size = mStaticInfo.getRawDimensChecked();
if (VERBOSE) {
Log.v(TAG, "Testing multi capture with size " + size.toString() + ", preview size " + maxPreviewSz);
}
// Prepare raw capture and start preview.
CaptureRequest.Builder previewBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
CaptureRequest.Builder multiBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
ImageReader rawReader = null;
ImageReader jpegReader = null;
try {
// Create ImageReaders.
rawReader = makeImageReader(size, ImageFormat.RAW_SENSOR, MAX_READER_IMAGES, rawListener, mHandler);
jpegReader = makeImageReader(maxStillSz, ImageFormat.JPEG, MAX_READER_IMAGES, jpegListener, mHandler);
updatePreviewSurface(maxPreviewSz);
// Configure output streams with preview and jpeg streams.
List<Surface> outputSurfaces = new ArrayList<Surface>();
outputSurfaces.add(rawReader.getSurface());
outputSurfaces.add(jpegReader.getSurface());
outputSurfaces.add(mPreviewSurface);
mSessionListener = new BlockingSessionCallback();
mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);
// Configure the requests.
previewBuilder.addTarget(mPreviewSurface);
multiBuilder.addTarget(mPreviewSurface);
multiBuilder.addTarget(rawReader.getSurface());
multiBuilder.addTarget(jpegReader.getSurface());
// Start preview.
mSession.setRepeatingRequest(previewBuilder.build(), null, mHandler);
// Poor man's 3A, wait 2 seconds for AE/AF (if any) to settle.
// TODO: Do proper 3A trigger and lock (see testTakePictureTest).
Thread.sleep(3000);
multiBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
CaptureRequest multiRequest = multiBuilder.build();
mSession.capture(multiRequest, resultListener, mHandler);
CaptureResult result = resultListener.getCaptureResultForRequest(multiRequest, NUM_RESULTS_WAIT_TIMEOUT);
Image jpegImage = jpegListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
basicValidateJpegImage(jpegImage, maxStillSz);
Image rawImage = rawListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
validateRaw16Image(rawImage, size);
verifyRawCaptureResult(multiRequest, result);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try (DngCreator dngCreator = new DngCreator(mStaticInfo.getCharacteristics(), result)) {
dngCreator.writeImage(outputStream, rawImage);
}
if (DEBUG) {
byte[] rawBuffer = outputStream.toByteArray();
String rawFileName = DEBUG_FILE_NAME_BASE + "/raw16_" + TAG + size.toString() + "_cam_" + mCamera.getId() + ".dng";
Log.d(TAG, "Dump raw file into " + rawFileName);
dumpFile(rawFileName, rawBuffer);
byte[] jpegBuffer = getDataFromImage(jpegImage);
String jpegFileName = DEBUG_FILE_NAME_BASE + "/jpeg_" + TAG + size.toString() + "_cam_" + mCamera.getId() + ".jpg";
Log.d(TAG, "Dump jpeg file into " + rawFileName);
dumpFile(jpegFileName, jpegBuffer);
}
stopPreview();
} finally {
CameraTestUtils.closeImageReader(rawReader);
CameraTestUtils.closeImageReader(jpegReader);
rawReader = null;
jpegReader = null;
}
}
Aggregations