Search in sources :

Example 56 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by crdroidandroid.

the class LegacyResultMapper method convertResultMetadata.

/**
     * Generate capture result metadata from the legacy camera request.
     *
     * @param legacyRequest a non-{@code null} legacy request containing the latest parameters
     * @return a {@link CameraMetadataNative} object containing result metadata.
     */
private static CameraMetadataNative convertResultMetadata(LegacyRequest legacyRequest) {
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;
    CameraMetadataNative result = new CameraMetadataNative();
    Rect activeArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArraySize, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
    /*
         * colorCorrection
         */
    // colorCorrection.aberrationMode
    {
        result.set(COLOR_CORRECTION_ABERRATION_MODE, request.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE));
    }
    /*
         * control
         */
    /*
         * control.ae*
         */
    mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/
    params);
    /*
         * control.af*
         */
    mapAf(result, activeArraySize, zoomData, /*out*/
    params);
    /*
         * control.awb*
         */
    mapAwb(result, /*out*/
    params);
    /*
         * control.captureIntent
         */
    {
        int captureIntent = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_CAPTURE_INTENT, /*defaultValue*/
        CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
        captureIntent = LegacyRequestMapper.filterSupportedCaptureIntent(captureIntent);
        result.set(CONTROL_CAPTURE_INTENT, captureIntent);
    }
    /*
         * control.mode
         */
    {
        int controlMode = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
        if (controlMode == CaptureResult.CONTROL_MODE_USE_SCENE_MODE) {
            result.set(CONTROL_MODE, CONTROL_MODE_USE_SCENE_MODE);
        } else {
            result.set(CONTROL_MODE, CONTROL_MODE_AUTO);
        }
    }
    /*
         * control.sceneMode
         */
    {
        String legacySceneMode = params.getSceneMode();
        int mode = LegacyMetadataMapper.convertSceneModeFromLegacy(legacySceneMode);
        if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
            result.set(CaptureResult.CONTROL_SCENE_MODE, mode);
        // In case of SCENE_MODE == FACE_PRIORITY, LegacyFaceDetectMapper will override
        // the result to say SCENE_MODE == FACE_PRIORITY.
        } else {
            Log.w(TAG, "Unknown scene mode " + legacySceneMode + " returned by camera HAL, setting to disabled.");
            result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
        }
    }
    /*
         * control.effectMode
         */
    {
        String legacyEffectMode = params.getColorEffect();
        int mode = LegacyMetadataMapper.convertEffectModeFromLegacy(legacyEffectMode);
        if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
            result.set(CaptureResult.CONTROL_EFFECT_MODE, mode);
        } else {
            Log.w(TAG, "Unknown effect mode " + legacyEffectMode + " returned by camera HAL, setting to off.");
            result.set(CaptureResult.CONTROL_EFFECT_MODE, CONTROL_EFFECT_MODE_OFF);
        }
    }
    // control.videoStabilizationMode
    {
        int stabMode = (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ? CONTROL_VIDEO_STABILIZATION_MODE_ON : CONTROL_VIDEO_STABILIZATION_MODE_OFF;
        result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
    }
    /*
         * flash
         */
    {
    // flash.mode, flash.state mapped in mapAeAndFlashMode
    }
    /*
         * lens
         */
    // lens.focusDistance
    {
        if (Parameters.FOCUS_MODE_INFINITY.equals(params.getFocusMode())) {
            result.set(CaptureResult.LENS_FOCUS_DISTANCE, 0.0f);
        }
    }
    // lens.focalLength
    result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
    /*
         * request
         */
    // request.pipelineDepth
    result.set(REQUEST_PIPELINE_DEPTH, characteristics.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
    /*
         * scaler
         */
    mapScaler(result, zoomData, /*out*/
    params);
    /*
         * sensor
         */
    // sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
    {
        // Unconditionally no test patterns
        result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
    }
    /*
         * jpeg
         */
    // jpeg.gpsLocation
    result.set(JPEG_GPS_LOCATION, request.get(CaptureRequest.JPEG_GPS_LOCATION));
    // jpeg.orientation
    result.set(JPEG_ORIENTATION, request.get(CaptureRequest.JPEG_ORIENTATION));
    // jpeg.quality
    result.set(JPEG_QUALITY, (byte) params.getJpegQuality());
    // jpeg.thumbnailQuality
    result.set(JPEG_THUMBNAIL_QUALITY, (byte) params.getJpegThumbnailQuality());
    // jpeg.thumbnailSize
    Camera.Size s = params.getJpegThumbnailSize();
    if (s != null) {
        result.set(JPEG_THUMBNAIL_SIZE, ParameterUtils.convertSize(s));
    } else {
        Log.w(TAG, "Null thumbnail size received from parameters.");
    }
    /*
         * noiseReduction.*
         */
    // noiseReduction.mode
    result.set(NOISE_REDUCTION_MODE, request.get(CaptureRequest.NOISE_REDUCTION_MODE));
    return result;
}
Also used : Parameters(android.hardware.Camera.Parameters) Rect(android.graphics.Rect) Size(android.util.Size) CameraMetadataNative(android.hardware.camera2.impl.CameraMetadataNative) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) ZoomData(android.hardware.camera2.legacy.ParameterUtils.ZoomData) CaptureRequest(android.hardware.camera2.CaptureRequest) Camera(android.hardware.Camera)

Example 57 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by crdroidandroid.

the class CameraCaptureSessionImpl method setRepeatingBurst.

@Override
public synchronized int setRepeatingBurst(List<CaptureRequest> requests, CaptureCallback callback, Handler handler) throws CameraAccessException {
    if (requests == null) {
        throw new IllegalArgumentException("requests must not be null");
    } else if (requests.isEmpty()) {
        throw new IllegalArgumentException("requests must have at least one element");
    }
    for (CaptureRequest r : requests) {
        if (r.isReprocess()) {
            throw new IllegalArgumentException("repeating reprocess burst requests are not " + "supported");
        }
    }
    checkNotClosed();
    handler = checkHandler(handler, callback);
    if (DEBUG) {
        CaptureRequest[] requestArray = requests.toArray(new CaptureRequest[0]);
        Log.v(TAG, mIdString + "setRepeatingBurst - requests " + Arrays.toString(requestArray) + ", callback " + callback + " handler" + "" + handler);
    }
    return addPendingSequence(mDeviceImpl.setRepeatingBurst(requests, createCaptureCallbackProxy(handler, callback), mDeviceHandler));
}
Also used : CaptureRequest(android.hardware.camera2.CaptureRequest)

Example 58 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by crdroidandroid.

the class Camera2CaptureRequestTest method autoAeMultipleCapturesThenTestLock.

/**
     * Issue multiple auto AE captures, then lock AE, validate the AE lock vs.
     * the first capture result after the AE lock. The right AE lock behavior is:
     * When it is locked, it locks to the current exposure value, and all subsequent
     * request with lock ON will have the same exposure value locked.
     */
private void autoAeMultipleCapturesThenTestLock(CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock) throws Exception {
    if (numCapturesDuringLock < 1) {
        throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1");
    }
    if (VERBOSE) {
        Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode " + aeMode + " with " + numCapturesDuringLock + " captures before lock");
    }
    final int NUM_CAPTURES_BEFORE_LOCK = 2;
    SimpleCaptureCallback listener = new SimpleCaptureCallback();
    CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock];
    boolean canSetAeLock = mStaticInfo.isAeLockSupported();
    // Reset the AE lock to OFF, since we are reusing this builder many times
    if (canSetAeLock) {
        requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
    }
    // Just send several captures with auto AE, lock off.
    CaptureRequest request = requestBuilder.build();
    for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) {
        mSession.capture(request, listener, mHandler);
    }
    waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK);
    if (!canSetAeLock) {
        // Without AE lock, the remaining tests items won't work
        return;
    }
    // Then fire several capture to lock the AE.
    requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    int requestCount = captureRequestsSynchronized(requestBuilder.build(), numCapturesDuringLock, listener, mHandler);
    int[] sensitivities = new int[numCapturesDuringLock];
    long[] expTimes = new long[numCapturesDuringLock];
    Arrays.fill(sensitivities, -1);
    Arrays.fill(expTimes, -1L);
    // Get the AE lock on result and validate the exposure values.
    waitForNumResults(listener, requestCount - numCapturesDuringLock);
    for (int i = 0; i < resultsDuringLock.length; i++) {
        resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
    }
    for (int i = 0; i < numCapturesDuringLock; i++) {
        mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true);
    }
    // Can't read manual sensor/exposure settings without manual sensor
    if (mStaticInfo.isCapabilitySupported(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
        int sensitivityLocked = getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY);
        long expTimeLocked = getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME);
        for (int i = 1; i < resultsDuringLock.length; i++) {
            mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked);
            mCollector.expectKeyValueEquals(resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked);
        }
    }
}
Also used : CaptureResult(android.hardware.camera2.CaptureResult) CaptureRequest(android.hardware.camera2.CaptureRequest) SimpleCaptureCallback(com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback)

Example 59 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by crdroidandroid.

the class CameraDeviceBinderTest method testFlush.

@SmallTest
public void testFlush() throws Exception {
    int status;
    // Initial flush should work
    long lastFrameNumber = mCameraUser.flush();
    // Then set up a stream
    CaptureRequest request = createDefaultBuilder(/* needStream */
    true).build();
    // Flush should still be a no-op, really
    lastFrameNumber = mCameraUser.flush();
    // Submit a few capture requests
    SubmitInfo requestInfo1 = submitCameraRequest(request, /* streaming */
    false);
    SubmitInfo requestInfo2 = submitCameraRequest(request, /* streaming */
    false);
    SubmitInfo requestInfo3 = submitCameraRequest(request, /* streaming */
    false);
    SubmitInfo requestInfo4 = submitCameraRequest(request, /* streaming */
    false);
    SubmitInfo requestInfo5 = submitCameraRequest(request, /* streaming */
    false);
    // Then flush and wait for idle
    lastFrameNumber = mCameraUser.flush();
    verify(mMockCb, timeout(WAIT_FOR_FLUSH_TIMEOUT_MS).times(1)).onDeviceIdle();
    // Now a streaming request
    SubmitInfo streamingInfo = submitCameraRequest(request, /* streaming */
    true);
    // Wait a bit to fill up the queue
    SystemClock.sleep(WAIT_FOR_WORK_MS);
    // Then flush and wait for the idle callback
    lastFrameNumber = mCameraUser.flush();
    verify(mMockCb, timeout(WAIT_FOR_FLUSH_TIMEOUT_MS).times(2)).onDeviceIdle();
// TODO: When errors are hooked up, count that errors + successful
// requests equal to 5.
}
Also used : SubmitInfo(android.hardware.camera2.utils.SubmitInfo) CaptureRequest(android.hardware.camera2.CaptureRequest) SmallTest(android.test.suitebuilder.annotation.SmallTest)

Example 60 with CaptureRequest

use of android.hardware.camera2.CaptureRequest in project android_frameworks_base by crdroidandroid.

the class CameraDeviceBinderTest method testSubmitGoodRequest.

@SmallTest
public void testSubmitGoodRequest() throws Exception {
    CaptureRequest.Builder builder = createDefaultBuilder(/* needStream */
    true);
    CaptureRequest request = builder.build();
    // Submit valid request twice.
    SubmitInfo requestInfo1 = submitCameraRequest(request, /* streaming */
    false);
    SubmitInfo requestInfo2 = submitCameraRequest(request, /* streaming */
    false);
    assertNotSame("Request IDs should be unique for multiple requests", requestInfo1.getRequestId(), requestInfo2.getRequestId());
}
Also used : SubmitInfo(android.hardware.camera2.utils.SubmitInfo) CaptureRequest(android.hardware.camera2.CaptureRequest) SmallTest(android.test.suitebuilder.annotation.SmallTest)

Aggregations

CaptureRequest (android.hardware.camera2.CaptureRequest)126 ArrayList (java.util.ArrayList)60 SubmitInfo (android.hardware.camera2.utils.SubmitInfo)50 SimpleCaptureCallback (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback)45 SmallTest (android.test.suitebuilder.annotation.SmallTest)35 Size (android.util.Size)35 TotalCaptureResult (android.hardware.camera2.TotalCaptureResult)26 Image (android.media.Image)25 Surface (android.view.Surface)25 CameraTestUtils.getDataFromImage (com.android.mediaframeworktest.helpers.CameraTestUtils.getDataFromImage)20 CameraCharacteristics (android.hardware.camera2.CameraCharacteristics)16 Rect (android.graphics.Rect)15 Camera (android.hardware.Camera)15 CaptureResult (android.hardware.camera2.CaptureResult)15 ServiceSpecificException (android.os.ServiceSpecificException)15 SimpleImageReaderListener (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener)15 Parameters (android.hardware.Camera.Parameters)10 CaptureResultExtras (android.hardware.camera2.impl.CaptureResultExtras)10 ZoomData (android.hardware.camera2.legacy.ParameterUtils.ZoomData)10 MeteringRectangle (android.hardware.camera2.params.MeteringRectangle)10