use of android.util.Size in project android_frameworks_base by ResurrectionRemix.
the class CameraMetadataTest method testReadWriteSize.
@SmallTest
public void testReadWriteSize() {
// int32 x n
checkKeyGetAndSet("android.jpeg.thumbnailSize", Size.class, new Size(123, 456));
// int32 x 2 x n
checkKeyGetAndSet("android.scaler.availableJpegSizes", Size[].class, new Size[] { new Size(123, 456), new Size(0xDEAD, 0xF00D), new Size(0xF00, 0xB00) });
}
use of android.util.Size in project android_frameworks_base by ResurrectionRemix.
the class RequestThreadManager method calculatePictureSize.
/**
* Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
* than all of the configured {@code JPEG} outputs (by both width and height).
*
* <p>If multiple supported JPEG sizes are larger, select the smallest of them which
* still satisfies the above constraint.</p>
*
* <p>As a result, the returned size is guaranteed to be usable without needing
* to upscale any of the outputs. If only one {@code JPEG} surface is used,
* then no scaling/cropping is necessary between the taken picture and
* the {@code JPEG} output surface.</p>
*
* @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats
* @param params api1 parameters (used for reading only)
*
* @return a size large enough to fit all of the configured {@code JPEG} outputs, or
* {@code null} if the {@code callbackOutputs} did not have any {@code JPEG}
* surfaces.
*/
private Size calculatePictureSize(List<Surface> callbackOutputs, List<Size> callbackSizes, Camera.Parameters params) {
/*
* Find the largest JPEG size (if any), from the configured outputs:
* - the api1 picture size should be set to the smallest legal size that's at least as large
* as the largest configured JPEG size
*/
if (callbackOutputs.size() != callbackSizes.size()) {
throw new IllegalStateException("Input collections must be same length");
}
List<Size> configuredJpegSizes = new ArrayList<>();
Iterator<Size> sizeIterator = callbackSizes.iterator();
for (Surface callbackSurface : callbackOutputs) {
Size jpegSize = sizeIterator.next();
if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) {
// Ignore non-JPEG callback formats
continue;
}
configuredJpegSizes.add(jpegSize);
}
if (!configuredJpegSizes.isEmpty()) {
/*
* Find the largest configured JPEG width, and height, independently
* of the rest.
*
* The rest of the JPEG streams can be cropped out of this smallest bounding
* rectangle.
*/
int maxConfiguredJpegWidth = -1;
int maxConfiguredJpegHeight = -1;
for (Size jpegSize : configuredJpegSizes) {
maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ? jpegSize.getWidth() : maxConfiguredJpegWidth;
maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ? jpegSize.getHeight() : maxConfiguredJpegHeight;
}
Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
List<Size> supportedJpegSizes = ParameterUtils.convertSizeList(params.getSupportedPictureSizes());
/*
* Find the smallest supported JPEG size that can fit the smallest bounding
* rectangle for the configured JPEG sizes.
*/
List<Size> candidateSupportedJpegSizes = new ArrayList<>();
for (Size supportedJpegSize : supportedJpegSizes) {
if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth && supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
candidateSupportedJpegSizes.add(supportedJpegSize);
}
}
if (candidateSupportedJpegSizes.isEmpty()) {
throw new AssertionError("Could not find any supported JPEG sizes large enough to fit " + smallestBoundJpegSize);
}
Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes, new SizeAreaComparator());
if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
Log.w(TAG, String.format("configureOutputs - Will need to crop picture %s into " + "smallest bound size %s", smallestSupportedJpegSize, smallestBoundJpegSize));
}
return smallestSupportedJpegSize;
}
return null;
}
use of android.util.Size in project android_frameworks_base by ResurrectionRemix.
the class OutputConfiguration method setDeferredSurface.
/**
* Set the deferred surface to this OutputConfiguration.
*
* <p>
* The deferred surface must be obtained from either from {@link android.view.SurfaceView} by
* calling {@link android.view.SurfaceHolder#getSurface}, or from
* {@link android.graphics.SurfaceTexture} via
* {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}). After the deferred
* surface is set, the application must finish the deferred surface configuration via
* {@link CameraCaptureSession#finishDeferredConfiguration} before submitting a request with
* this surface target.
* </p>
*
* @param surface The deferred surface to be set.
* @throws IllegalArgumentException if the Surface is invalid.
* @throws IllegalStateException if a Surface was already set to this deferred
* OutputConfiguration.
* @hide
*/
public void setDeferredSurface(@NonNull Surface surface) {
checkNotNull(surface, "Surface must not be null");
if (mSurface != null) {
throw new IllegalStateException("Deferred surface is already set!");
}
// This will throw IAE is the surface was abandoned.
Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
if (!surfaceSize.equals(mConfiguredSize)) {
Log.w(TAG, "Deferred surface size " + surfaceSize + " is different with pre-configured size " + mConfiguredSize + ", the pre-configured size will be used.");
}
mSurface = surface;
}
use of android.util.Size in project android_frameworks_base by ResurrectionRemix.
the class SurfaceTextureRenderer method configureSurfaces.
/**
* Set a collection of output {@link Surface}s that can be drawn to.
*
* @param surfaces a {@link Collection} of surfaces.
*/
public void configureSurfaces(Collection<Pair<Surface, Size>> surfaces) {
releaseEGLContext();
if (surfaces == null || surfaces.size() == 0) {
Log.w(TAG, "No output surfaces configured for GL drawing.");
return;
}
for (Pair<Surface, Size> p : surfaces) {
Surface s = p.first;
Size surfaceSize = p.second;
// If pixel conversions aren't handled by egl, use a pbuffer
try {
EGLSurfaceHolder holder = new EGLSurfaceHolder();
holder.surface = s;
holder.width = surfaceSize.getWidth();
holder.height = surfaceSize.getHeight();
if (LegacyCameraDevice.needsConversion(s)) {
mConversionSurfaces.add(holder);
// LegacyCameraDevice is the producer of surfaces if it's not handled by EGL,
// so LegacyCameraDevice needs to connect to the surfaces.
LegacyCameraDevice.connectSurface(s);
} else {
mSurfaces.add(holder);
}
} catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
Log.w(TAG, "Surface abandoned, skipping configuration... ", e);
}
}
// Set up egl display
configureEGLContext();
// Set up regular egl surfaces if needed
if (mSurfaces.size() > 0) {
configureEGLOutputSurfaces(mSurfaces);
}
// Set up pbuffer surface if needed
if (mConversionSurfaces.size() > 0) {
configureEGLPbufferSurfaces(mConversionSurfaces);
}
makeCurrent((mSurfaces.size() > 0) ? mSurfaces.get(0).eglSurface : mConversionSurfaces.get(0).eglSurface);
initializeGLState();
mSurfaceTexture = new SurfaceTexture(getTextureId());
// Set up performance tracking if enabled
if (SystemProperties.getBoolean(LEGACY_PERF_PROPERTY, false)) {
setupGlTiming();
}
}
use of android.util.Size in project android_frameworks_base by ResurrectionRemix.
the class SurfaceTextureRenderer method drawFrame.
private void drawFrame(SurfaceTexture st, int width, int height, int flipType) {
checkGlError("onDrawFrame start");
st.getTransformMatrix(mSTMatrix);
Matrix.setIdentityM(mMVPMatrix, /*smOffset*/
0);
// Find intermediate buffer dimensions
Size dimens;
try {
dimens = LegacyCameraDevice.getTextureSize(st);
} catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
// Should never hit this.
throw new IllegalStateException("Surface abandoned, skipping drawFrame...", e);
}
float texWidth = dimens.getWidth();
float texHeight = dimens.getHeight();
if (texWidth <= 0 || texHeight <= 0) {
throw new IllegalStateException("Illegal intermediate texture with dimension of 0");
}
// Letterbox or pillar-box output dimensions into intermediate dimensions.
RectF intermediate = new RectF(/*left*/
0, /*top*/
0, /*right*/
texWidth, /*bottom*/
texHeight);
RectF output = new RectF(/*left*/
0, /*top*/
0, /*right*/
width, /*bottom*/
height);
android.graphics.Matrix boxingXform = new android.graphics.Matrix();
boxingXform.setRectToRect(output, intermediate, android.graphics.Matrix.ScaleToFit.CENTER);
boxingXform.mapRect(output);
// Find scaling factor from pillar-boxed/letter-boxed output dimensions to intermediate
// buffer dimensions.
float scaleX = intermediate.width() / output.width();
float scaleY = intermediate.height() / output.height();
// Intermediate texture is implicitly scaled to 'fill' the output dimensions in clip space
// coordinates in the shader. To avoid stretching, we need to scale the larger dimension
// of the intermediate buffer so that the output buffer is actually letter-boxed
// or pillar-boxed into the intermediate buffer after clipping.
Matrix.scaleM(mMVPMatrix, /*offset*/
0, /*x*/
scaleX, /*y*/
scaleY, /*z*/
1);
if (DEBUG) {
Log.d(TAG, "Scaling factors (S_x = " + scaleX + ",S_y = " + scaleY + ") used for " + width + "x" + height + " surface, intermediate buffer size is " + texWidth + "x" + texHeight);
}
// Set viewport to be output buffer dimensions
GLES20.glViewport(0, 0, width, height);
if (DEBUG) {
GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
}
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
FloatBuffer triangleVertices;
switch(flipType) {
case FLIP_TYPE_HORIZONTAL:
triangleVertices = mHorizontalFlipTriangleVertices;
break;
case FLIP_TYPE_VERTICAL:
triangleVertices = mVerticalFlipTriangleVertices;
break;
case FLIP_TYPE_BOTH:
triangleVertices = mBothFlipTriangleVertices;
break;
default:
triangleVertices = mRegularTriangleVertices;
break;
}
triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, VERTEX_POS_SIZE, GLES20.GL_FLOAT, /*normalized*/
false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, VERTEX_UV_SIZE, GLES20.GL_FLOAT, /*normalized*/
false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, /*count*/
1, /*transpose*/
false, mMVPMatrix, /*offset*/
0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, /*count*/
1, /*transpose*/
false, mSTMatrix, /*offset*/
0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /*offset*/
0, /*count*/
4);
checkGlError("glDrawArrays");
}
Aggregations