Search in sources :

Example 46 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class SurfaceTextureRenderer method configureSurfaces.

/**
     * Set a collection of output {@link Surface}s that can be drawn to.
     *
     * @param surfaces a {@link Collection} of surfaces.
     */
public void configureSurfaces(Collection<Pair<Surface, Size>> surfaces) {
    releaseEGLContext();
    if (surfaces == null || surfaces.size() == 0) {
        Log.w(TAG, "No output surfaces configured for GL drawing.");
        return;
    }
    for (Pair<Surface, Size> p : surfaces) {
        Surface s = p.first;
        Size surfaceSize = p.second;
        // If pixel conversions aren't handled by egl, use a pbuffer
        try {
            EGLSurfaceHolder holder = new EGLSurfaceHolder();
            holder.surface = s;
            holder.width = surfaceSize.getWidth();
            holder.height = surfaceSize.getHeight();
            if (LegacyCameraDevice.needsConversion(s)) {
                mConversionSurfaces.add(holder);
                // LegacyCameraDevice is the producer of surfaces if it's not handled by EGL,
                // so LegacyCameraDevice needs to connect to the surfaces.
                LegacyCameraDevice.connectSurface(s);
            } else {
                mSurfaces.add(holder);
            }
        } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
            Log.w(TAG, "Surface abandoned, skipping configuration... ", e);
        }
    }
    // Set up egl display
    configureEGLContext();
    // Set up regular egl surfaces if needed
    if (mSurfaces.size() > 0) {
        configureEGLOutputSurfaces(mSurfaces);
    }
    // Set up pbuffer surface if needed
    if (mConversionSurfaces.size() > 0) {
        configureEGLPbufferSurfaces(mConversionSurfaces);
    }
    makeCurrent((mSurfaces.size() > 0) ? mSurfaces.get(0).eglSurface : mConversionSurfaces.get(0).eglSurface);
    initializeGLState();
    mSurfaceTexture = new SurfaceTexture(getTextureId());
    // Set up performance tracking if enabled
    if (SystemProperties.getBoolean(LEGACY_PERF_PROPERTY, false)) {
        setupGlTiming();
    }
}
Also used : SurfaceTexture(android.graphics.SurfaceTexture) Size(android.util.Size) EGLSurface(android.opengl.EGLSurface) Surface(android.view.Surface)

Example 47 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class SurfaceTextureRenderer method drawFrame.

private void drawFrame(SurfaceTexture st, int width, int height, int flipType) {
    checkGlError("onDrawFrame start");
    st.getTransformMatrix(mSTMatrix);
    Matrix.setIdentityM(mMVPMatrix, /*smOffset*/
    0);
    // Find intermediate buffer dimensions
    Size dimens;
    try {
        dimens = LegacyCameraDevice.getTextureSize(st);
    } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
        // Should never hit this.
        throw new IllegalStateException("Surface abandoned, skipping drawFrame...", e);
    }
    float texWidth = dimens.getWidth();
    float texHeight = dimens.getHeight();
    if (texWidth <= 0 || texHeight <= 0) {
        throw new IllegalStateException("Illegal intermediate texture with dimension of 0");
    }
    // Letterbox or pillar-box output dimensions into intermediate dimensions.
    RectF intermediate = new RectF(/*left*/
    0, /*top*/
    0, /*right*/
    texWidth, /*bottom*/
    texHeight);
    RectF output = new RectF(/*left*/
    0, /*top*/
    0, /*right*/
    width, /*bottom*/
    height);
    android.graphics.Matrix boxingXform = new android.graphics.Matrix();
    boxingXform.setRectToRect(output, intermediate, android.graphics.Matrix.ScaleToFit.CENTER);
    boxingXform.mapRect(output);
    // Find scaling factor from pillar-boxed/letter-boxed output dimensions to intermediate
    // buffer dimensions.
    float scaleX = intermediate.width() / output.width();
    float scaleY = intermediate.height() / output.height();
    // Intermediate texture is implicitly scaled to 'fill' the output dimensions in clip space
    // coordinates in the shader.  To avoid stretching, we need to scale the larger dimension
    // of the intermediate buffer so that the output buffer is actually letter-boxed
    // or pillar-boxed into the intermediate buffer after clipping.
    Matrix.scaleM(mMVPMatrix, /*offset*/
    0, /*x*/
    scaleX, /*y*/
    scaleY, /*z*/
    1);
    if (DEBUG) {
        Log.d(TAG, "Scaling factors (S_x = " + scaleX + ",S_y = " + scaleY + ") used for " + width + "x" + height + " surface, intermediate buffer size is " + texWidth + "x" + texHeight);
    }
    // Set viewport to be output buffer dimensions
    GLES20.glViewport(0, 0, width, height);
    if (DEBUG) {
        GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
    }
    GLES20.glUseProgram(mProgram);
    checkGlError("glUseProgram");
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
    FloatBuffer triangleVertices;
    switch(flipType) {
        case FLIP_TYPE_HORIZONTAL:
            triangleVertices = mHorizontalFlipTriangleVertices;
            break;
        case FLIP_TYPE_VERTICAL:
            triangleVertices = mVerticalFlipTriangleVertices;
            break;
        case FLIP_TYPE_BOTH:
            triangleVertices = mBothFlipTriangleVertices;
            break;
        default:
            triangleVertices = mRegularTriangleVertices;
            break;
    }
    triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
    GLES20.glVertexAttribPointer(maPositionHandle, VERTEX_POS_SIZE, GLES20.GL_FLOAT, /*normalized*/
    false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
    checkGlError("glVertexAttribPointer maPosition");
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    checkGlError("glEnableVertexAttribArray maPositionHandle");
    triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
    GLES20.glVertexAttribPointer(maTextureHandle, VERTEX_UV_SIZE, GLES20.GL_FLOAT, /*normalized*/
    false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
    checkGlError("glVertexAttribPointer maTextureHandle");
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    checkGlError("glEnableVertexAttribArray maTextureHandle");
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, /*count*/
    1, /*transpose*/
    false, mMVPMatrix, /*offset*/
    0);
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, /*count*/
    1, /*transpose*/
    false, mSTMatrix, /*offset*/
    0);
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /*offset*/
    0, /*count*/
    4);
    checkGlError("glDrawArrays");
}
Also used : RectF(android.graphics.RectF) Matrix(android.opengl.Matrix) Size(android.util.Size) FloatBuffer(java.nio.FloatBuffer)

Example 48 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class RequestThreadManager method configureOutputs.

private void configureOutputs(Collection<Pair<Surface, Size>> outputs) {
    if (DEBUG) {
        String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces");
        Log.d(TAG, "configureOutputs with " + outputsStr);
    }
    try {
        stopPreview();
    } catch (RuntimeException e) {
        Log.e(TAG, "Received device exception in configure call: ", e);
        mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
        return;
    }
    /*
         * Try to release the previous preview's surface texture earlier if we end up
         * using a different one; this also reduces the likelihood of getting into a deadlock
         * when disconnecting from the old previous texture at a later time.
         */
    try {
        mCamera.setPreviewTexture(/*surfaceTexture*/
        null);
    } catch (IOException e) {
        Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e);
    } catch (RuntimeException e) {
        Log.e(TAG, "Received device exception in configure call: ", e);
        mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
        return;
    }
    if (mGLThreadManager != null) {
        mGLThreadManager.waitUntilStarted();
        mGLThreadManager.ignoreNewFrames();
        mGLThreadManager.waitUntilIdle();
    }
    resetJpegSurfaceFormats(mCallbackOutputs);
    for (Surface s : mCallbackOutputs) {
        try {
            LegacyCameraDevice.disconnectSurface(s);
        } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
            Log.w(TAG, "Surface abandoned, skipping...", e);
        }
    }
    mPreviewOutputs.clear();
    mCallbackOutputs.clear();
    mJpegSurfaceIds.clear();
    mPreviewTexture = null;
    List<Size> previewOutputSizes = new ArrayList<>();
    List<Size> callbackOutputSizes = new ArrayList<>();
    int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING);
    int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
    if (outputs != null) {
        for (Pair<Surface, Size> outPair : outputs) {
            Surface s = outPair.first;
            Size outSize = outPair.second;
            try {
                int format = LegacyCameraDevice.detectSurfaceType(s);
                LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation);
                switch(format) {
                    case CameraMetadataNative.NATIVE_JPEG_FORMAT:
                        if (USE_BLOB_FORMAT_OVERRIDE) {
                            // Override to RGBA_8888 format.
                            LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888);
                        }
                        mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s));
                        mCallbackOutputs.add(s);
                        callbackOutputSizes.add(outSize);
                        // LegacyCameraDevice is the producer of JPEG output surfaces
                        // so LegacyCameraDevice needs to connect to the surfaces.
                        LegacyCameraDevice.connectSurface(s);
                        break;
                    default:
                        LegacyCameraDevice.setScalingMode(s, LegacyCameraDevice.NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
                        mPreviewOutputs.add(s);
                        previewOutputSizes.add(outSize);
                        break;
                }
            } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
                Log.w(TAG, "Surface abandoned, skipping...", e);
            }
        }
    }
    try {
        mParams = mCamera.getParameters();
    } catch (RuntimeException e) {
        Log.e(TAG, "Received device exception: ", e);
        mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
        return;
    }
    List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange();
    int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
    if (DEBUG) {
        Log.d(TAG, "doPreviewCapture - Selected range [" + bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," + bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
    }
    mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
    Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, callbackOutputSizes, mParams);
    if (previewOutputSizes.size() > 0) {
        Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes);
        // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
        Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams);
        Size chosenJpegDimen = (smallestSupportedJpegSize != null) ? smallestSupportedJpegSize : largestJpegDimen;
        List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList(mParams.getSupportedPreviewSizes());
        // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
        // of the configured output dimensions.  If none exists, fall back to using the largest
        // supported preview size.
        long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
        Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes);
        for (Size s : supportedPreviewSizes) {
            long currArea = s.getWidth() * s.getHeight();
            long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
            if (checkAspectRatiosMatch(chosenJpegDimen, s) && (currArea < bestArea && currArea >= largestOutputArea)) {
                bestPreviewDimen = s;
            }
        }
        mIntermediateBufferSize = bestPreviewDimen;
        mParams.setPreviewSize(mIntermediateBufferSize.getWidth(), mIntermediateBufferSize.getHeight());
        if (DEBUG) {
            Log.d(TAG, "Intermediate buffer selected with dimens: " + bestPreviewDimen.toString());
        }
    } else {
        mIntermediateBufferSize = null;
        if (DEBUG) {
            Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
        }
    }
    if (smallestSupportedJpegSize != null) {
        /*
             * Set takePicture size to the smallest supported JPEG size large enough
             * to scale/crop out of for the bounding rectangle of the configured JPEG sizes.
             */
        Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize);
        mParams.setPictureSize(smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight());
    }
    // TODO: Detect and optimize single-output paths here to skip stream teeing.
    if (mGLThreadManager == null) {
        mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState);
        mGLThreadManager.start();
    }
    mGLThreadManager.waitUntilStarted();
    List<Pair<Surface, Size>> previews = new ArrayList<>();
    Iterator<Size> previewSizeIter = previewOutputSizes.iterator();
    for (Surface p : mPreviewOutputs) {
        previews.add(new Pair<>(p, previewSizeIter.next()));
    }
    mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector);
    mGLThreadManager.allowNewFrames();
    mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
    if (mPreviewTexture != null) {
        mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
    }
    try {
        mCamera.setParameters(mParams);
    } catch (RuntimeException e) {
        Log.e(TAG, "Received device exception while configuring: ", e);
        mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
    }
}
Also used : Size(android.util.Size) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Surface(android.view.Surface) Pair(android.util.Pair)

Example 49 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class ParameterUtils method convertSizeListToArray.

/**
     * Convert a camera API1 list of sizes into an array of sizes
     */
public static Size[] convertSizeListToArray(List<Camera.Size> sizeList) {
    checkNotNull(sizeList, "sizeList must not be null");
    Size[] array = new Size[sizeList.size()];
    int ctr = 0;
    for (Camera.Size s : sizeList) {
        array[ctr++] = new Size(s.width, s.height);
    }
    return array;
}
Also used : Size(android.util.Size) Camera(android.hardware.Camera) Point(android.graphics.Point)

Example 50 with Size

use of android.util.Size in project android_frameworks_base by ResurrectionRemix.

the class ParameterUtils method convertSizeList.

/**
     * Convert a camera API1 list of sizes into a util list of sizes
     */
public static List<Size> convertSizeList(List<Camera.Size> sizeList) {
    checkNotNull(sizeList, "sizeList must not be null");
    List<Size> sizes = new ArrayList<>(sizeList.size());
    for (Camera.Size s : sizeList) {
        sizes.add(new Size(s.width, s.height));
    }
    return sizes;
}
Also used : Size(android.util.Size) ArrayList(java.util.ArrayList) Camera(android.hardware.Camera)

Aggregations

Size (android.util.Size)320 ArrayList (java.util.ArrayList)66 StreamConfigurationMap (android.hardware.camera2.params.StreamConfigurationMap)41 Rect (android.graphics.Rect)40 CaptureRequest (android.hardware.camera2.CaptureRequest)40 Range (android.util.Range)40 Surface (android.view.Surface)35 SimpleCaptureCallback (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback)35 Camera (android.hardware.Camera)30 CameraCharacteristics (android.hardware.camera2.CameraCharacteristics)26 Point (android.graphics.Point)22 Image (android.media.Image)21 SimpleImageReaderListener (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener)20 MeteringRectangle (android.hardware.camera2.params.MeteringRectangle)15 CamcorderProfile (android.media.CamcorderProfile)15 Pair (android.util.Pair)15 CameraTestUtils.getDataFromImage (com.android.mediaframeworktest.helpers.CameraTestUtils.getDataFromImage)15 SurfaceTexture (android.graphics.SurfaceTexture)10 Parameters (android.hardware.Camera.Parameters)10 CaptureResult (android.hardware.camera2.CaptureResult)10