Search in sources :

Example 71 with Surface

use of android.view.Surface in project Resurrection_packages_apps_Settings by ResurrectionRemix.

the class FingerprintLocationAnimationVideoView method onFinishInflate.

@Override
protected void onFinishInflate() {
    super.onFinishInflate();
    setSurfaceTextureListener(new SurfaceTextureListener() {

        private SurfaceTexture mTextureToDestroy = null;

        @Override
        public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
            setVisibility(View.INVISIBLE);
            Uri videoUri = getFingerprintLocationAnimation();
            if (mMediaPlayer != null) {
                mMediaPlayer.release();
            }
            if (mTextureToDestroy != null) {
                mTextureToDestroy.release();
                mTextureToDestroy = null;
            }
            mMediaPlayer = MediaPlayer.create(mContext, videoUri);
            mMediaPlayer.setSurface(new Surface(surfaceTexture));
            mMediaPlayer.setOnPreparedListener(new OnPreparedListener() {

                @Override
                public void onPrepared(MediaPlayer mediaPlayer) {
                    mediaPlayer.setLooping(true);
                }
            });
            mMediaPlayer.setOnInfoListener(new OnInfoListener() {

                @Override
                public boolean onInfo(MediaPlayer mediaPlayer, int what, int extra) {
                    if (what == MediaPlayer.MEDIA_INFO_VIDEO_RENDERING_START) {
                        // Keep the view hidden until video starts
                        setVisibility(View.VISIBLE);
                    }
                    return false;
                }
            });
            mAspect = (float) mMediaPlayer.getVideoHeight() / mMediaPlayer.getVideoWidth();
            requestLayout();
            startAnimation();
        }

        @Override
        public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
        }

        @Override
        public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
            mTextureToDestroy = surfaceTexture;
            return false;
        }

        @Override
        public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
        }
    });
}
Also used : OnInfoListener(android.media.MediaPlayer.OnInfoListener) SurfaceTexture(android.graphics.SurfaceTexture) OnPreparedListener(android.media.MediaPlayer.OnPreparedListener) Uri(android.net.Uri) Surface(android.view.Surface) MediaPlayer(android.media.MediaPlayer)

Example 72 with Surface

use of android.view.Surface in project android_frameworks_base by ResurrectionRemix.

the class RequestThreadManager method createDummySurface.

/**
     * Fake preview for jpeg captures when there is no active preview
     */
private void createDummySurface() {
    if (mDummyTexture == null || mDummySurface == null) {
        mDummyTexture = new SurfaceTexture(/*ignored*/
        0);
        // TODO: use smallest default sizes
        mDummyTexture.setDefaultBufferSize(640, 480);
        mDummySurface = new Surface(mDummyTexture);
    }
}
Also used : SurfaceTexture(android.graphics.SurfaceTexture) Surface(android.view.Surface)

Example 73 with Surface

use of android.view.Surface in project android_frameworks_base by ResurrectionRemix.

the class RequestThreadManager method calculatePictureSize.

/**
     * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
     * than all of the configured {@code JPEG} outputs (by both width and height).
     *
     * <p>If multiple supported JPEG sizes are larger, select the smallest of them which
     * still satisfies the above constraint.</p>
     *
     * <p>As a result, the returned size is guaranteed to be usable without needing
     * to upscale any of the outputs. If only one {@code JPEG} surface is used,
     * then no scaling/cropping is necessary between the taken picture and
     * the {@code JPEG} output surface.</p>
     *
     * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats
     * @param params api1 parameters (used for reading only)
     *
     * @return a size large enough to fit all of the configured {@code JPEG} outputs, or
     *          {@code null} if the {@code callbackOutputs} did not have any {@code JPEG}
     *          surfaces.
     */
private Size calculatePictureSize(List<Surface> callbackOutputs, List<Size> callbackSizes, Camera.Parameters params) {
    /*
         * Find the largest JPEG size (if any), from the configured outputs:
         * - the api1 picture size should be set to the smallest legal size that's at least as large
         *   as the largest configured JPEG size
         */
    if (callbackOutputs.size() != callbackSizes.size()) {
        throw new IllegalStateException("Input collections must be same length");
    }
    List<Size> configuredJpegSizes = new ArrayList<>();
    Iterator<Size> sizeIterator = callbackSizes.iterator();
    for (Surface callbackSurface : callbackOutputs) {
        Size jpegSize = sizeIterator.next();
        if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) {
            // Ignore non-JPEG callback formats
            continue;
        }
        configuredJpegSizes.add(jpegSize);
    }
    if (!configuredJpegSizes.isEmpty()) {
        /*
             * Find the largest configured JPEG width, and height, independently
             * of the rest.
             *
             * The rest of the JPEG streams can be cropped out of this smallest bounding
             * rectangle.
             */
        int maxConfiguredJpegWidth = -1;
        int maxConfiguredJpegHeight = -1;
        for (Size jpegSize : configuredJpegSizes) {
            maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ? jpegSize.getWidth() : maxConfiguredJpegWidth;
            maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ? jpegSize.getHeight() : maxConfiguredJpegHeight;
        }
        Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
        List<Size> supportedJpegSizes = ParameterUtils.convertSizeList(params.getSupportedPictureSizes());
        /*
             * Find the smallest supported JPEG size that can fit the smallest bounding
             * rectangle for the configured JPEG sizes.
             */
        List<Size> candidateSupportedJpegSizes = new ArrayList<>();
        for (Size supportedJpegSize : supportedJpegSizes) {
            if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth && supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
                candidateSupportedJpegSizes.add(supportedJpegSize);
            }
        }
        if (candidateSupportedJpegSizes.isEmpty()) {
            throw new AssertionError("Could not find any supported JPEG sizes large enough to fit " + smallestBoundJpegSize);
        }
        Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes, new SizeAreaComparator());
        if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
            Log.w(TAG, String.format("configureOutputs - Will need to crop picture %s into " + "smallest bound size %s", smallestSupportedJpegSize, smallestBoundJpegSize));
        }
        return smallestSupportedJpegSize;
    }
    return null;
}
Also used : Size(android.util.Size) ArrayList(java.util.ArrayList) SizeAreaComparator(android.hardware.camera2.utils.SizeAreaComparator) Surface(android.view.Surface)

Example 74 with Surface

use of android.view.Surface in project android_frameworks_base by ResurrectionRemix.

the class SurfaceTextureRenderer method configureSurfaces.

/**
     * Set a collection of output {@link Surface}s that can be drawn to.
     *
     * @param surfaces a {@link Collection} of surfaces.
     */
public void configureSurfaces(Collection<Pair<Surface, Size>> surfaces) {
    releaseEGLContext();
    if (surfaces == null || surfaces.size() == 0) {
        Log.w(TAG, "No output surfaces configured for GL drawing.");
        return;
    }
    for (Pair<Surface, Size> p : surfaces) {
        Surface s = p.first;
        Size surfaceSize = p.second;
        // If pixel conversions aren't handled by egl, use a pbuffer
        try {
            EGLSurfaceHolder holder = new EGLSurfaceHolder();
            holder.surface = s;
            holder.width = surfaceSize.getWidth();
            holder.height = surfaceSize.getHeight();
            if (LegacyCameraDevice.needsConversion(s)) {
                mConversionSurfaces.add(holder);
                // LegacyCameraDevice is the producer of surfaces if it's not handled by EGL,
                // so LegacyCameraDevice needs to connect to the surfaces.
                LegacyCameraDevice.connectSurface(s);
            } else {
                mSurfaces.add(holder);
            }
        } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
            Log.w(TAG, "Surface abandoned, skipping configuration... ", e);
        }
    }
    // Set up egl display
    configureEGLContext();
    // Set up regular egl surfaces if needed
    if (mSurfaces.size() > 0) {
        configureEGLOutputSurfaces(mSurfaces);
    }
    // Set up pbuffer surface if needed
    if (mConversionSurfaces.size() > 0) {
        configureEGLPbufferSurfaces(mConversionSurfaces);
    }
    makeCurrent((mSurfaces.size() > 0) ? mSurfaces.get(0).eglSurface : mConversionSurfaces.get(0).eglSurface);
    initializeGLState();
    mSurfaceTexture = new SurfaceTexture(getTextureId());
    // Set up performance tracking if enabled
    if (SystemProperties.getBoolean(LEGACY_PERF_PROPERTY, false)) {
        setupGlTiming();
    }
}
Also used : SurfaceTexture(android.graphics.SurfaceTexture) Size(android.util.Size) EGLSurface(android.opengl.EGLSurface) Surface(android.view.Surface)

Example 75 with Surface

use of android.view.Surface in project android_frameworks_base by ResurrectionRemix.

the class RequestThreadManager method configureOutputs.

private void configureOutputs(Collection<Pair<Surface, Size>> outputs) {
    if (DEBUG) {
        String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces");
        Log.d(TAG, "configureOutputs with " + outputsStr);
    }
    try {
        stopPreview();
    } catch (RuntimeException e) {
        Log.e(TAG, "Received device exception in configure call: ", e);
        mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
        return;
    }
    /*
         * Try to release the previous preview's surface texture earlier if we end up
         * using a different one; this also reduces the likelihood of getting into a deadlock
         * when disconnecting from the old previous texture at a later time.
         */
    try {
        mCamera.setPreviewTexture(/*surfaceTexture*/
        null);
    } catch (IOException e) {
        Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e);
    } catch (RuntimeException e) {
        Log.e(TAG, "Received device exception in configure call: ", e);
        mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
        return;
    }
    if (mGLThreadManager != null) {
        mGLThreadManager.waitUntilStarted();
        mGLThreadManager.ignoreNewFrames();
        mGLThreadManager.waitUntilIdle();
    }
    resetJpegSurfaceFormats(mCallbackOutputs);
    for (Surface s : mCallbackOutputs) {
        try {
            LegacyCameraDevice.disconnectSurface(s);
        } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
            Log.w(TAG, "Surface abandoned, skipping...", e);
        }
    }
    mPreviewOutputs.clear();
    mCallbackOutputs.clear();
    mJpegSurfaceIds.clear();
    mPreviewTexture = null;
    List<Size> previewOutputSizes = new ArrayList<>();
    List<Size> callbackOutputSizes = new ArrayList<>();
    int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING);
    int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
    if (outputs != null) {
        for (Pair<Surface, Size> outPair : outputs) {
            Surface s = outPair.first;
            Size outSize = outPair.second;
            try {
                int format = LegacyCameraDevice.detectSurfaceType(s);
                LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation);
                switch(format) {
                    case CameraMetadataNative.NATIVE_JPEG_FORMAT:
                        if (USE_BLOB_FORMAT_OVERRIDE) {
                            // Override to RGBA_8888 format.
                            LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888);
                        }
                        mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s));
                        mCallbackOutputs.add(s);
                        callbackOutputSizes.add(outSize);
                        // LegacyCameraDevice is the producer of JPEG output surfaces
                        // so LegacyCameraDevice needs to connect to the surfaces.
                        LegacyCameraDevice.connectSurface(s);
                        break;
                    default:
                        LegacyCameraDevice.setScalingMode(s, LegacyCameraDevice.NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
                        mPreviewOutputs.add(s);
                        previewOutputSizes.add(outSize);
                        break;
                }
            } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
                Log.w(TAG, "Surface abandoned, skipping...", e);
            }
        }
    }
    try {
        mParams = mCamera.getParameters();
    } catch (RuntimeException e) {
        Log.e(TAG, "Received device exception: ", e);
        mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
        return;
    }
    List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange();
    int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
    if (DEBUG) {
        Log.d(TAG, "doPreviewCapture - Selected range [" + bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," + bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
    }
    mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
    Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, callbackOutputSizes, mParams);
    if (previewOutputSizes.size() > 0) {
        Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes);
        // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
        Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams);
        Size chosenJpegDimen = (smallestSupportedJpegSize != null) ? smallestSupportedJpegSize : largestJpegDimen;
        List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList(mParams.getSupportedPreviewSizes());
        // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
        // of the configured output dimensions.  If none exists, fall back to using the largest
        // supported preview size.
        long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
        Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes);
        for (Size s : supportedPreviewSizes) {
            long currArea = s.getWidth() * s.getHeight();
            long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
            if (checkAspectRatiosMatch(chosenJpegDimen, s) && (currArea < bestArea && currArea >= largestOutputArea)) {
                bestPreviewDimen = s;
            }
        }
        mIntermediateBufferSize = bestPreviewDimen;
        mParams.setPreviewSize(mIntermediateBufferSize.getWidth(), mIntermediateBufferSize.getHeight());
        if (DEBUG) {
            Log.d(TAG, "Intermediate buffer selected with dimens: " + bestPreviewDimen.toString());
        }
    } else {
        mIntermediateBufferSize = null;
        if (DEBUG) {
            Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
        }
    }
    if (smallestSupportedJpegSize != null) {
        /*
             * Set takePicture size to the smallest supported JPEG size large enough
             * to scale/crop out of for the bounding rectangle of the configured JPEG sizes.
             */
        Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize);
        mParams.setPictureSize(smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight());
    }
    // TODO: Detect and optimize single-output paths here to skip stream teeing.
    if (mGLThreadManager == null) {
        mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState);
        mGLThreadManager.start();
    }
    mGLThreadManager.waitUntilStarted();
    List<Pair<Surface, Size>> previews = new ArrayList<>();
    Iterator<Size> previewSizeIter = previewOutputSizes.iterator();
    for (Surface p : mPreviewOutputs) {
        previews.add(new Pair<>(p, previewSizeIter.next()));
    }
    mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector);
    mGLThreadManager.allowNewFrames();
    mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
    if (mPreviewTexture != null) {
        mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
    }
    try {
        mCamera.setParameters(mParams);
    } catch (RuntimeException e) {
        Log.e(TAG, "Received device exception while configuring: ", e);
        mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
    }
}
Also used : Size(android.util.Size) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Surface(android.view.Surface) Pair(android.util.Pair)

Aggregations

Surface (android.view.Surface)300 ArrayList (java.util.ArrayList)100 SurfaceTexture (android.graphics.SurfaceTexture)49 BlockingSessionCallback (com.android.ex.camera2.blocking.BlockingSessionCallback)44 Size (android.util.Size)35 CaptureRequest (android.hardware.camera2.CaptureRequest)34 OutputConfiguration (android.hardware.camera2.params.OutputConfiguration)30 IOException (java.io.IOException)27 EGLSurface (android.opengl.EGLSurface)19 Paint (android.graphics.Paint)16 StreamConfigurationMap (android.hardware.camera2.params.StreamConfigurationMap)15 SurfaceHolder (android.view.SurfaceHolder)15 Rect (android.graphics.Rect)13 SurfaceView (android.view.SurfaceView)13 Canvas (android.graphics.Canvas)12 CameraAccessException (android.hardware.camera2.CameraAccessException)12 WifiDisplay (android.hardware.display.WifiDisplay)12 MediaRecorder (android.media.MediaRecorder)11 SurfaceControl (android.view.SurfaceControl)11 Image (android.media.Image)10