Search in sources :

Example 26 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by DirtyUnicorns.

the class LegacyFaceDetectMapper method mapResultFaces.

/**
     * Update the {@code result} camera metadata map with the new value for the
     * {@code statistics.faces} and {@code statistics.faceDetectMode}.
     *
     * <p>Face detect callbacks are processed in the background, and each call to
     * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p>
     *
     * <p>If the scene mode was set to {@code FACE_PRIORITY} but face detection is disabled,
     * the camera will still run face detection in the background, but no faces will be reported
     * in the capture result.</p>
     *
     * @param result a non-{@code null} result
     * @param legacyRequest a non-{@code null} request (read-only)
     */
public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) {
    checkNotNull(result, "result must not be null");
    checkNotNull(legacyRequest, "legacyRequest must not be null");
    Camera.Face[] faces, previousFaces;
    int fdMode;
    boolean fdScenePriority;
    synchronized (mLock) {
        fdMode = mFaceDetectReporting ? STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF;
        if (mFaceDetectReporting) {
            faces = mFaces;
        } else {
            faces = null;
        }
        fdScenePriority = mFaceDetectScenePriority;
        previousFaces = mFacesPrev;
        mFacesPrev = faces;
    }
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;
    Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
    List<Face> convertedFaces = new ArrayList<>();
    if (faces != null) {
        for (Camera.Face face : faces) {
            if (face != null) {
                convertedFaces.add(ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData));
            } else {
                Log.w(TAG, "mapResultFaces - read NULL face from camera1 device");
            }
        }
    }
    if (DEBUG && previousFaces != faces) {
        // Log only in verbose and IF the faces changed
        Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces));
    }
    result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0]));
    result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode);
    // Override scene mode with FACE_PRIORITY if the request was using FACE_PRIORITY
    if (fdScenePriority) {
        result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_FACE_PRIORITY);
    }
}
Also used : Rect(android.graphics.Rect) Size(android.util.Size) ArrayList(java.util.ArrayList) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) ZoomData(android.hardware.camera2.legacy.ParameterUtils.ZoomData) CaptureRequest(android.hardware.camera2.CaptureRequest) Camera(android.hardware.Camera) Face(android.hardware.camera2.params.Face)

Example 27 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by DirtyUnicorns.

the class LegacyResultMapper method convertResultMetadata.

/**
     * Generate capture result metadata from the legacy camera request.
     *
     * @param legacyRequest a non-{@code null} legacy request containing the latest parameters
     * @return a {@link CameraMetadataNative} object containing result metadata.
     */
private static CameraMetadataNative convertResultMetadata(LegacyRequest legacyRequest) {
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;
    CameraMetadataNative result = new CameraMetadataNative();
    Rect activeArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArraySize, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
    /*
         * colorCorrection
         */
    // colorCorrection.aberrationMode
    {
        result.set(COLOR_CORRECTION_ABERRATION_MODE, request.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE));
    }
    /*
         * control
         */
    /*
         * control.ae*
         */
    mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/
    params);
    /*
         * control.af*
         */
    mapAf(result, activeArraySize, zoomData, /*out*/
    params);
    /*
         * control.awb*
         */
    mapAwb(result, /*out*/
    params);
    /*
         * control.captureIntent
         */
    {
        int captureIntent = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_CAPTURE_INTENT, /*defaultValue*/
        CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
        captureIntent = LegacyRequestMapper.filterSupportedCaptureIntent(captureIntent);
        result.set(CONTROL_CAPTURE_INTENT, captureIntent);
    }
    /*
         * control.mode
         */
    {
        int controlMode = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
        if (controlMode == CaptureResult.CONTROL_MODE_USE_SCENE_MODE) {
            result.set(CONTROL_MODE, CONTROL_MODE_USE_SCENE_MODE);
        } else {
            result.set(CONTROL_MODE, CONTROL_MODE_AUTO);
        }
    }
    /*
         * control.sceneMode
         */
    {
        String legacySceneMode = params.getSceneMode();
        int mode = LegacyMetadataMapper.convertSceneModeFromLegacy(legacySceneMode);
        if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
            result.set(CaptureResult.CONTROL_SCENE_MODE, mode);
        // In case of SCENE_MODE == FACE_PRIORITY, LegacyFaceDetectMapper will override
        // the result to say SCENE_MODE == FACE_PRIORITY.
        } else {
            Log.w(TAG, "Unknown scene mode " + legacySceneMode + " returned by camera HAL, setting to disabled.");
            result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
        }
    }
    /*
         * control.effectMode
         */
    {
        String legacyEffectMode = params.getColorEffect();
        int mode = LegacyMetadataMapper.convertEffectModeFromLegacy(legacyEffectMode);
        if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
            result.set(CaptureResult.CONTROL_EFFECT_MODE, mode);
        } else {
            Log.w(TAG, "Unknown effect mode " + legacyEffectMode + " returned by camera HAL, setting to off.");
            result.set(CaptureResult.CONTROL_EFFECT_MODE, CONTROL_EFFECT_MODE_OFF);
        }
    }
    // control.videoStabilizationMode
    {
        int stabMode = (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ? CONTROL_VIDEO_STABILIZATION_MODE_ON : CONTROL_VIDEO_STABILIZATION_MODE_OFF;
        result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
    }
    /*
         * flash
         */
    {
    // flash.mode, flash.state mapped in mapAeAndFlashMode
    }
    /*
         * lens
         */
    // lens.focusDistance
    {
        if (Parameters.FOCUS_MODE_INFINITY.equals(params.getFocusMode())) {
            result.set(CaptureResult.LENS_FOCUS_DISTANCE, 0.0f);
        }
    }
    // lens.focalLength
    result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
    /*
         * request
         */
    // request.pipelineDepth
    result.set(REQUEST_PIPELINE_DEPTH, characteristics.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
    /*
         * scaler
         */
    mapScaler(result, zoomData, /*out*/
    params);
    /*
         * sensor
         */
    // sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
    {
        // Unconditionally no test patterns
        result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
    }
    /*
         * jpeg
         */
    // jpeg.gpsLocation
    result.set(JPEG_GPS_LOCATION, request.get(CaptureRequest.JPEG_GPS_LOCATION));
    // jpeg.orientation
    result.set(JPEG_ORIENTATION, request.get(CaptureRequest.JPEG_ORIENTATION));
    // jpeg.quality
    result.set(JPEG_QUALITY, (byte) params.getJpegQuality());
    // jpeg.thumbnailQuality
    result.set(JPEG_THUMBNAIL_QUALITY, (byte) params.getJpegThumbnailQuality());
    // jpeg.thumbnailSize
    Camera.Size s = params.getJpegThumbnailSize();
    if (s != null) {
        result.set(JPEG_THUMBNAIL_SIZE, ParameterUtils.convertSize(s));
    } else {
        Log.w(TAG, "Null thumbnail size received from parameters.");
    }
    /*
         * noiseReduction.*
         */
    // noiseReduction.mode
    result.set(NOISE_REDUCTION_MODE, request.get(CaptureRequest.NOISE_REDUCTION_MODE));
    return result;
}
Also used : Parameters(android.hardware.Camera.Parameters) Rect(android.graphics.Rect) Size(android.util.Size) CameraMetadataNative(android.hardware.camera2.impl.CameraMetadataNative) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) ZoomData(android.hardware.camera2.legacy.ParameterUtils.ZoomData) CaptureRequest(android.hardware.camera2.CaptureRequest) Camera(android.hardware.Camera)

Example 28 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by DirtyUnicorns.

the class Camera2StillCaptureTest method fullRawCaptureTestByCamera.

private void fullRawCaptureTestByCamera() throws Exception {
    Size maxPreviewSz = mOrderedPreviewSizes.get(0);
    Size maxStillSz = mOrderedStillSizes.get(0);
    SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    SimpleImageReaderListener jpegListener = new SimpleImageReaderListener();
    SimpleImageReaderListener rawListener = new SimpleImageReaderListener();
    Size size = mStaticInfo.getRawDimensChecked();
    if (VERBOSE) {
        Log.v(TAG, "Testing multi capture with size " + size.toString() + ", preview size " + maxPreviewSz);
    }
    // Prepare raw capture and start preview.
    CaptureRequest.Builder previewBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    CaptureRequest.Builder multiBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
    ImageReader rawReader = null;
    ImageReader jpegReader = null;
    try {
        // Create ImageReaders.
        rawReader = makeImageReader(size, ImageFormat.RAW_SENSOR, MAX_READER_IMAGES, rawListener, mHandler);
        jpegReader = makeImageReader(maxStillSz, ImageFormat.JPEG, MAX_READER_IMAGES, jpegListener, mHandler);
        updatePreviewSurface(maxPreviewSz);
        // Configure output streams with preview and jpeg streams.
        List<Surface> outputSurfaces = new ArrayList<Surface>();
        outputSurfaces.add(rawReader.getSurface());
        outputSurfaces.add(jpegReader.getSurface());
        outputSurfaces.add(mPreviewSurface);
        mSessionListener = new BlockingSessionCallback();
        mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);
        // Configure the requests.
        previewBuilder.addTarget(mPreviewSurface);
        multiBuilder.addTarget(mPreviewSurface);
        multiBuilder.addTarget(rawReader.getSurface());
        multiBuilder.addTarget(jpegReader.getSurface());
        // Start preview.
        mSession.setRepeatingRequest(previewBuilder.build(), null, mHandler);
        // Poor man's 3A, wait 2 seconds for AE/AF (if any) to settle.
        // TODO: Do proper 3A trigger and lock (see testTakePictureTest).
        Thread.sleep(3000);
        multiBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
        CaptureRequest multiRequest = multiBuilder.build();
        mSession.capture(multiRequest, resultListener, mHandler);
        CaptureResult result = resultListener.getCaptureResultForRequest(multiRequest, NUM_RESULTS_WAIT_TIMEOUT);
        Image jpegImage = jpegListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
        basicValidateJpegImage(jpegImage, maxStillSz);
        Image rawImage = rawListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
        validateRaw16Image(rawImage, size);
        verifyRawCaptureResult(multiRequest, result);
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        try (DngCreator dngCreator = new DngCreator(mStaticInfo.getCharacteristics(), result)) {
            dngCreator.writeImage(outputStream, rawImage);
        }
        if (DEBUG) {
            byte[] rawBuffer = outputStream.toByteArray();
            String rawFileName = DEBUG_FILE_NAME_BASE + "/raw16_" + TAG + size.toString() + "_cam_" + mCamera.getId() + ".dng";
            Log.d(TAG, "Dump raw file into " + rawFileName);
            dumpFile(rawFileName, rawBuffer);
            byte[] jpegBuffer = getDataFromImage(jpegImage);
            String jpegFileName = DEBUG_FILE_NAME_BASE + "/jpeg_" + TAG + size.toString() + "_cam_" + mCamera.getId() + ".jpg";
            Log.d(TAG, "Dump jpeg file into " + rawFileName);
            dumpFile(jpegFileName, jpegBuffer);
        }
        stopPreview();
    } finally {
        CameraTestUtils.closeImageReader(rawReader);
        CameraTestUtils.closeImageReader(jpegReader);
        rawReader = null;
        jpegReader = null;
    }
}
Also used : BlockingSessionCallback(com.android.ex.camera2.blocking.BlockingSessionCallback) Size(android.util.Size) DngCreator(android.hardware.camera2.DngCreator) CaptureResult(android.hardware.camera2.CaptureResult) ArrayList(java.util.ArrayList) SimpleImageReaderListener(com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener) ByteArrayOutputStream(java.io.ByteArrayOutputStream) CameraTestUtils.basicValidateJpegImage(com.android.mediaframeworktest.helpers.CameraTestUtils.basicValidateJpegImage) CameraTestUtils.getDataFromImage(com.android.mediaframeworktest.helpers.CameraTestUtils.getDataFromImage) Image(android.media.Image) Surface(android.view.Surface) CaptureRequest(android.hardware.camera2.CaptureRequest) SimpleCaptureCallback(com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback) CameraTestUtils.makeImageReader(com.android.mediaframeworktest.helpers.CameraTestUtils.makeImageReader) ImageReader(android.media.ImageReader)

Example 29 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by DirtyUnicorns.

the class CameraCaptureSessionImpl method captureBurst.

@Override
public synchronized int captureBurst(List<CaptureRequest> requests, CaptureCallback callback, Handler handler) throws CameraAccessException {
    if (requests == null) {
        throw new IllegalArgumentException("Requests must not be null");
    } else if (requests.isEmpty()) {
        throw new IllegalArgumentException("Requests must have at least one element");
    }
    for (CaptureRequest request : requests) {
        if (request.isReprocess()) {
            if (!isReprocessable()) {
                throw new IllegalArgumentException("This capture session cannot handle " + "reprocess requests");
            } else if (request.getReprocessableSessionId() != mId) {
                throw new IllegalArgumentException("Capture request was created for another " + "session");
            }
        }
    }
    checkNotClosed();
    handler = checkHandler(handler, callback);
    if (DEBUG) {
        CaptureRequest[] requestArray = requests.toArray(new CaptureRequest[0]);
        Log.v(TAG, mIdString + "captureBurst - requests " + Arrays.toString(requestArray) + ", callback " + callback + " handler " + handler);
    }
    return addPendingSequence(mDeviceImpl.captureBurst(requests, createCaptureCallbackProxy(handler, callback), mDeviceHandler));
}
Also used : CaptureRequest(android.hardware.camera2.CaptureRequest)

Example 30 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by DirtyUnicorns.

the class CameraConstrainedHighSpeedCaptureSessionImpl method createHighSpeedRequestList.

@Override
public List<CaptureRequest> createHighSpeedRequestList(CaptureRequest request) throws CameraAccessException {
    if (request == null) {
        throw new IllegalArgumentException("Input capture request must not be null");
    }
    Collection<Surface> outputSurfaces = request.getTargets();
    Range<Integer> fpsRange = request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
    StreamConfigurationMap config = mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputSurfaces, fpsRange, config);
    // Request list size: to limit the preview to 30fps, need use maxFps/30; to maximize
    // the preview frame rate, should use maxBatch size for that high speed stream
    // configuration. We choose the former for now.
    int requestListSize = fpsRange.getUpper() / 30;
    List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
    // Prepare the Request builders: need carry over the request controls.
    // First, create a request builder that will only include preview or recording target.
    CameraMetadataNative requestMetadata = new CameraMetadataNative(request.getNativeCopy());
    // Note that after this step, the requestMetadata is mutated (swapped) and can not be used
    // for next request builder creation.
    CaptureRequest.Builder singleTargetRequestBuilder = new CaptureRequest.Builder(requestMetadata, /*reprocess*/
    false, CameraCaptureSession.SESSION_ID_NONE);
    // Overwrite the capture intent to make sure a good value is set.
    Iterator<Surface> iterator = outputSurfaces.iterator();
    Surface firstSurface = iterator.next();
    Surface secondSurface = null;
    if (outputSurfaces.size() == 1 && SurfaceUtils.isSurfaceForHwVideoEncoder(firstSurface)) {
        singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
    } else {
        // Video only, or preview + video
        singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
    }
    singleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/
    true);
    // Second, Create a request builder that will include both preview and recording targets.
    CaptureRequest.Builder doubleTargetRequestBuilder = null;
    if (outputSurfaces.size() == 2) {
        // Have to create a new copy, the original one was mutated after a new
        // CaptureRequest.Builder creation.
        requestMetadata = new CameraMetadataNative(request.getNativeCopy());
        doubleTargetRequestBuilder = new CaptureRequest.Builder(requestMetadata, /*reprocess*/
        false, CameraCaptureSession.SESSION_ID_NONE);
        doubleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
        doubleTargetRequestBuilder.addTarget(firstSurface);
        secondSurface = iterator.next();
        doubleTargetRequestBuilder.addTarget(secondSurface);
        doubleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/
        true);
        // Make sure singleTargetRequestBuilder contains only recording surface for
        // preview + recording case.
        Surface recordingSurface = firstSurface;
        if (!SurfaceUtils.isSurfaceForHwVideoEncoder(recordingSurface)) {
            recordingSurface = secondSurface;
        }
        singleTargetRequestBuilder.addTarget(recordingSurface);
    } else {
        // Single output case: either recording or preview.
        singleTargetRequestBuilder.addTarget(firstSurface);
    }
    // Generate the final request list.
    for (int i = 0; i < requestListSize; i++) {
        if (i == 0 && doubleTargetRequestBuilder != null) {
            // First request should be recording + preview request
            requestList.add(doubleTargetRequestBuilder.build());
        } else {
            requestList.add(singleTargetRequestBuilder.build());
        }
    }
    return Collections.unmodifiableList(requestList);
}
Also used : ArrayList(java.util.ArrayList) StreamConfigurationMap(android.hardware.camera2.params.StreamConfigurationMap) Surface(android.view.Surface) CaptureRequest(android.hardware.camera2.CaptureRequest)

Aggregations

CaptureRequest (android.hardware.camera2.CaptureRequest)126 ArrayList (java.util.ArrayList)60 SubmitInfo (android.hardware.camera2.utils.SubmitInfo)50 SimpleCaptureCallback (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback)45 SmallTest (android.test.suitebuilder.annotation.SmallTest)35 Size (android.util.Size)35 TotalCaptureResult (android.hardware.camera2.TotalCaptureResult)26 Image (android.media.Image)25 Surface (android.view.Surface)25 CameraTestUtils.getDataFromImage (com.android.mediaframeworktest.helpers.CameraTestUtils.getDataFromImage)20 CameraCharacteristics (android.hardware.camera2.CameraCharacteristics)16 Rect (android.graphics.Rect)15 Camera (android.hardware.Camera)15 CaptureResult (android.hardware.camera2.CaptureResult)15 ServiceSpecificException (android.os.ServiceSpecificException)15 SimpleImageReaderListener (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener)15 Parameters (android.hardware.Camera.Parameters)10 CaptureResultExtras (android.hardware.camera2.impl.CaptureResultExtras)10 ZoomData (android.hardware.camera2.legacy.ParameterUtils.ZoomData)10 MeteringRectangle (android.hardware.camera2.params.MeteringRectangle)10