Search in sources :

Example 11 with MeteringRectangle

use of android.hardware.camera2.params.MeteringRectangle in project android_frameworks_base by crdroidandroid.

the class CameraTestUtils method getExpectedOutputRegion.

/**
     * Calculate output 3A region from the intersection of input 3A region and cropped region.
     *
     * @param requestRegions The input 3A regions
     * @param cropRect The cropped region
     * @return expected 3A regions output in capture result
     */
public static MeteringRectangle[] getExpectedOutputRegion(MeteringRectangle[] requestRegions, Rect cropRect) {
    MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length];
    for (int i = 0; i < requestRegions.length; i++) {
        Rect requestRect = requestRegions[i].getRect();
        Rect resultRect = new Rect();
        assertTrue("Input 3A region must intersect cropped region", resultRect.setIntersect(requestRect, cropRect));
        resultRegions[i] = new MeteringRectangle(resultRect, requestRegions[i].getMeteringWeight());
    }
    return resultRegions;
}
Also used : Rect(android.graphics.Rect) MeteringRectangle(android.hardware.camera2.params.MeteringRectangle)

Example 12 with MeteringRectangle

use of android.hardware.camera2.params.MeteringRectangle in project android_frameworks_base by AOSPA.

the class CameraTestUtils method getExpectedOutputRegion.

/**
     * Calculate output 3A region from the intersection of input 3A region and cropped region.
     *
     * @param requestRegions The input 3A regions
     * @param cropRect The cropped region
     * @return expected 3A regions output in capture result
     */
public static MeteringRectangle[] getExpectedOutputRegion(MeteringRectangle[] requestRegions, Rect cropRect) {
    MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length];
    for (int i = 0; i < requestRegions.length; i++) {
        Rect requestRect = requestRegions[i].getRect();
        Rect resultRect = new Rect();
        assertTrue("Input 3A region must intersect cropped region", resultRect.setIntersect(requestRect, cropRect));
        resultRegions[i] = new MeteringRectangle(resultRect, requestRegions[i].getMeteringWeight());
    }
    return resultRegions;
}
Also used : Rect(android.graphics.Rect) MeteringRectangle(android.hardware.camera2.params.MeteringRectangle)

Example 13 with MeteringRectangle

use of android.hardware.camera2.params.MeteringRectangle in project android_frameworks_base by AOSPA.

the class LegacyRequestMapper method convertMeteringRegionsToLegacy.

private static List<Camera.Area> convertMeteringRegionsToLegacy(Rect activeArray, ParameterUtils.ZoomData zoomData, MeteringRectangle[] meteringRegions, int maxNumMeteringAreas, String regionName) {
    if (meteringRegions == null || maxNumMeteringAreas <= 0) {
        if (maxNumMeteringAreas > 0) {
            return Arrays.asList(ParameterUtils.CAMERA_AREA_DEFAULT);
        } else {
            return null;
        }
    }
    // Add all non-zero weight regions to the list
    List<MeteringRectangle> meteringRectangleList = new ArrayList<>();
    for (MeteringRectangle rect : meteringRegions) {
        if (rect.getMeteringWeight() != MeteringRectangle.METERING_WEIGHT_DONT_CARE) {
            meteringRectangleList.add(rect);
        }
    }
    if (meteringRectangleList.size() == 0) {
        Log.w(TAG, "Only received metering rectangles with weight 0.");
        return Arrays.asList(ParameterUtils.CAMERA_AREA_DEFAULT);
    }
    // Ignore any regions beyond our maximum supported count
    int countMeteringAreas = Math.min(maxNumMeteringAreas, meteringRectangleList.size());
    List<Camera.Area> meteringAreaList = new ArrayList<>(countMeteringAreas);
    for (int i = 0; i < countMeteringAreas; ++i) {
        MeteringRectangle rect = meteringRectangleList.get(i);
        ParameterUtils.MeteringData meteringData = ParameterUtils.convertMeteringRectangleToLegacy(activeArray, rect, zoomData);
        meteringAreaList.add(meteringData.meteringArea);
    }
    if (maxNumMeteringAreas < meteringRectangleList.size()) {
        Log.w(TAG, "convertMeteringRegionsToLegacy - Too many requested " + regionName + " regions, ignoring all beyond the first " + maxNumMeteringAreas);
    }
    if (DEBUG) {
        Log.v(TAG, "convertMeteringRegionsToLegacy - " + regionName + " areas = " + ParameterUtils.stringFromAreaList(meteringAreaList));
    }
    return meteringAreaList;
}
Also used : ArrayList(java.util.ArrayList) MeteringRectangle(android.hardware.camera2.params.MeteringRectangle)

Example 14 with MeteringRectangle

use of android.hardware.camera2.params.MeteringRectangle in project android_frameworks_base by AOSPA.

the class LegacyRequestMapper method convertRequestMetadata.

/**
     * Set the legacy parameters using the {@link LegacyRequest legacy request}.
     *
     * <p>The legacy request's parameters are changed as a side effect of calling this
     * method.</p>
     *
     * @param legacyRequest a non-{@code null} legacy request
     */
public static void convertRequestMetadata(LegacyRequest legacyRequest) {
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;
    Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    /*
         * scaler.cropRegion
         */
    ParameterUtils.ZoomData zoomData;
    {
        zoomData = ParameterUtils.convertScalerCropRegion(activeArray, request.get(SCALER_CROP_REGION), previewSize, params);
        if (params.isZoomSupported()) {
            params.setZoom(zoomData.zoomIndex);
        } else if (DEBUG) {
            Log.v(TAG, "convertRequestToMetadata - zoom is not supported");
        }
    }
    /*
         * colorCorrection.*
         */
    // colorCorrection.aberrationMode
    {
        int aberrationMode = ParamsUtils.getOrDefault(request, COLOR_CORRECTION_ABERRATION_MODE, /*defaultValue*/
        COLOR_CORRECTION_ABERRATION_MODE_FAST);
        if (aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_FAST && aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
            Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " + "colorCorrection.aberrationMode = " + aberrationMode);
        }
    }
    /*
         * control.ae*
         */
    // control.aeAntibandingMode
    {
        String legacyMode;
        Integer antiBandingMode = request.get(CONTROL_AE_ANTIBANDING_MODE);
        if (antiBandingMode != null) {
            legacyMode = convertAeAntiBandingModeToLegacy(antiBandingMode);
        } else {
            legacyMode = ListUtils.listSelectFirstFrom(params.getSupportedAntibanding(), new String[] { Parameters.ANTIBANDING_AUTO, Parameters.ANTIBANDING_OFF, Parameters.ANTIBANDING_50HZ, Parameters.ANTIBANDING_60HZ });
        }
        if (legacyMode != null) {
            params.setAntibanding(legacyMode);
        }
    }
    /*
         * control.aeRegions, afRegions
         */
    {
        // aeRegions
        {
            // Use aeRegions if available, fall back to using awbRegions if present
            MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
            if (request.get(CONTROL_AWB_REGIONS) != null) {
                Log.w(TAG, "convertRequestMetadata - control.awbRegions setting is not " + "supported, ignoring value");
            }
            int maxNumMeteringAreas = params.getMaxNumMeteringAreas();
            List<Camera.Area> meteringAreaList = convertMeteringRegionsToLegacy(activeArray, zoomData, aeRegions, maxNumMeteringAreas, /*regionName*/
            "AE");
            // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
            if (maxNumMeteringAreas > 0) {
                params.setMeteringAreas(meteringAreaList);
            }
        }
        // afRegions
        {
            MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
            int maxNumFocusAreas = params.getMaxNumFocusAreas();
            List<Camera.Area> focusAreaList = convertMeteringRegionsToLegacy(activeArray, zoomData, afRegions, maxNumFocusAreas, /*regionName*/
            "AF");
            // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
            if (maxNumFocusAreas > 0) {
                params.setFocusAreas(focusAreaList);
            }
        }
    }
    // control.aeTargetFpsRange
    Range<Integer> aeFpsRange = request.get(CONTROL_AE_TARGET_FPS_RANGE);
    if (aeFpsRange != null) {
        int[] legacyFps = convertAeFpsRangeToLegacy(aeFpsRange);
        int[] rangeToApply = null;
        for (int[] range : params.getSupportedPreviewFpsRange()) {
            // Round range up/down to integer FPS value
            int intRangeLow = (int) Math.floor(range[0] / 1000.0) * 1000;
            int intRangeHigh = (int) Math.ceil(range[1] / 1000.0) * 1000;
            if (legacyFps[0] == intRangeLow && legacyFps[1] == intRangeHigh) {
                rangeToApply = range;
                break;
            }
        }
        if (rangeToApply != null) {
            params.setPreviewFpsRange(rangeToApply[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], rangeToApply[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
        } else {
            Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
        }
    }
    /*
         * control
         */
    // control.aeExposureCompensation
    {
        Range<Integer> compensationRange = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
        int compensation = ParamsUtils.getOrDefault(request, CONTROL_AE_EXPOSURE_COMPENSATION, /*defaultValue*/
        0);
        if (!compensationRange.contains(compensation)) {
            Log.w(TAG, "convertRequestMetadata - control.aeExposureCompensation " + "is out of range, ignoring value");
            compensation = 0;
        }
        params.setExposureCompensation(compensation);
    }
    // control.aeLock
    {
        Boolean aeLock = getIfSupported(request, CONTROL_AE_LOCK, /*defaultValue*/
        false, params.isAutoExposureLockSupported(), /*allowedValue*/
        false);
        if (aeLock != null) {
            params.setAutoExposureLock(aeLock);
        }
        if (DEBUG) {
            Log.v(TAG, "convertRequestToMetadata - control.aeLock set to " + aeLock);
        }
    // TODO: Don't add control.aeLock to availableRequestKeys if it's not supported
    }
    // control.aeMode, flash.mode
    mapAeAndFlashMode(request, /*out*/
    params);
    // control.afMode
    {
        int afMode = ParamsUtils.getOrDefault(request, CONTROL_AF_MODE, /*defaultValue*/
        CONTROL_AF_MODE_OFF);
        String focusMode = LegacyMetadataMapper.convertAfModeToLegacy(afMode, params.getSupportedFocusModes());
        if (focusMode != null) {
            params.setFocusMode(focusMode);
        }
        if (DEBUG) {
            Log.v(TAG, "convertRequestToMetadata - control.afMode " + afMode + " mapped to " + focusMode);
        }
    }
    // control.awbMode
    {
        Integer awbMode = getIfSupported(request, CONTROL_AWB_MODE, /*defaultValue*/
        CONTROL_AWB_MODE_AUTO, params.getSupportedWhiteBalance() != null, /*allowedValue*/
        CONTROL_AWB_MODE_AUTO);
        String whiteBalanceMode = null;
        if (awbMode != null) {
            // null iff AWB is not supported by camera1 api
            whiteBalanceMode = convertAwbModeToLegacy(awbMode);
            params.setWhiteBalance(whiteBalanceMode);
        }
        if (DEBUG) {
            Log.v(TAG, "convertRequestToMetadata - control.awbMode " + awbMode + " mapped to " + whiteBalanceMode);
        }
    }
    // control.awbLock
    {
        Boolean awbLock = getIfSupported(request, CONTROL_AWB_LOCK, /*defaultValue*/
        false, params.isAutoWhiteBalanceLockSupported(), /*allowedValue*/
        false);
        if (awbLock != null) {
            params.setAutoWhiteBalanceLock(awbLock);
        }
    // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
    }
    // control.captureIntent
    {
        int captureIntent = ParamsUtils.getOrDefault(request, CONTROL_CAPTURE_INTENT, /*defaultValue*/
        CONTROL_CAPTURE_INTENT_PREVIEW);
        captureIntent = filterSupportedCaptureIntent(captureIntent);
        params.setRecordingHint(captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_RECORD || captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
    }
    // control.videoStabilizationMode
    {
        Integer stabMode = getIfSupported(request, CONTROL_VIDEO_STABILIZATION_MODE, /*defaultValue*/
        CONTROL_VIDEO_STABILIZATION_MODE_OFF, params.isVideoStabilizationSupported(), /*allowedValue*/
        CONTROL_VIDEO_STABILIZATION_MODE_OFF);
        if (stabMode != null) {
            params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
        }
    }
    // lens.focusDistance
    {
        boolean infinityFocusSupported = ListUtils.listContains(params.getSupportedFocusModes(), Parameters.FOCUS_MODE_INFINITY);
        Float focusDistance = getIfSupported(request, LENS_FOCUS_DISTANCE, /*defaultValue*/
        0f, infinityFocusSupported, /*allowedValue*/
        0f);
        if (focusDistance == null || focusDistance != 0f) {
            Log.w(TAG, "convertRequestToMetadata - Ignoring android.lens.focusDistance " + infinityFocusSupported + ", only 0.0f is supported");
        }
    }
    // control.sceneMode, control.mode
    {
        if (params.getSupportedSceneModes() != null) {
            int controlMode = ParamsUtils.getOrDefault(request, CONTROL_MODE, /*defaultValue*/
            CONTROL_MODE_AUTO);
            String modeToSet;
            switch(controlMode) {
                case CONTROL_MODE_USE_SCENE_MODE:
                    {
                        int sceneMode = ParamsUtils.getOrDefault(request, CONTROL_SCENE_MODE, /*defaultValue*/
                        CONTROL_SCENE_MODE_DISABLED);
                        String legacySceneMode = LegacyMetadataMapper.convertSceneModeToLegacy(sceneMode);
                        if (legacySceneMode != null) {
                            modeToSet = legacySceneMode;
                        } else {
                            modeToSet = Parameters.SCENE_MODE_AUTO;
                            Log.w(TAG, "Skipping unknown requested scene mode: " + sceneMode);
                        }
                        break;
                    }
                case CONTROL_MODE_AUTO:
                    {
                        modeToSet = Parameters.SCENE_MODE_AUTO;
                        break;
                    }
                default:
                    {
                        Log.w(TAG, "Control mode " + controlMode + " is unsupported, defaulting to AUTO");
                        modeToSet = Parameters.SCENE_MODE_AUTO;
                    }
            }
            params.setSceneMode(modeToSet);
        }
    }
    // control.effectMode
    {
        if (params.getSupportedColorEffects() != null) {
            int effectMode = ParamsUtils.getOrDefault(request, CONTROL_EFFECT_MODE, /*defaultValue*/
            CONTROL_EFFECT_MODE_OFF);
            String legacyEffectMode = LegacyMetadataMapper.convertEffectModeToLegacy(effectMode);
            if (legacyEffectMode != null) {
                params.setColorEffect(legacyEffectMode);
            } else {
                params.setColorEffect(Parameters.EFFECT_NONE);
                Log.w(TAG, "Skipping unknown requested effect mode: " + effectMode);
            }
        }
    }
    /*
         * sensor
         */
    // sensor.testPattern
    {
        int testPatternMode = ParamsUtils.getOrDefault(request, SENSOR_TEST_PATTERN_MODE, /*defaultValue*/
        SENSOR_TEST_PATTERN_MODE_OFF);
        if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) {
            Log.w(TAG, "convertRequestToMetadata - ignoring sensor.testPatternMode " + testPatternMode + "; only OFF is supported");
        }
    }
    /*
         * jpeg.*
         */
    // jpeg.gpsLocation
    {
        Location location = request.get(JPEG_GPS_LOCATION);
        if (location != null) {
            if (checkForCompleteGpsData(location)) {
                params.setGpsAltitude(location.getAltitude());
                params.setGpsLatitude(location.getLatitude());
                params.setGpsLongitude(location.getLongitude());
                params.setGpsProcessingMethod(location.getProvider().toUpperCase());
                params.setGpsTimestamp(location.getTime());
            } else {
                Log.w(TAG, "Incomplete GPS parameters provided in location " + location);
            }
        } else {
            params.removeGpsData();
        }
    }
    // jpeg.orientation
    {
        Integer orientation = request.get(CaptureRequest.JPEG_ORIENTATION);
        params.setRotation(ParamsUtils.getOrDefault(request, JPEG_ORIENTATION, (orientation == null) ? 0 : orientation));
    }
    // jpeg.quality
    {
        params.setJpegQuality(0xFF & ParamsUtils.getOrDefault(request, JPEG_QUALITY, DEFAULT_JPEG_QUALITY));
    }
    // jpeg.thumbnailQuality
    {
        params.setJpegThumbnailQuality(0xFF & ParamsUtils.getOrDefault(request, JPEG_THUMBNAIL_QUALITY, DEFAULT_JPEG_QUALITY));
    }
    // jpeg.thumbnailSize
    {
        List<Camera.Size> sizes = params.getSupportedJpegThumbnailSizes();
        if (sizes != null && sizes.size() > 0) {
            Size s = request.get(JPEG_THUMBNAIL_SIZE);
            boolean invalidSize = (s == null) ? false : !ParameterUtils.containsSize(sizes, s.getWidth(), s.getHeight());
            if (invalidSize) {
                Log.w(TAG, "Invalid JPEG thumbnail size set " + s + ", skipping thumbnail...");
            }
            if (s == null || invalidSize) {
                // (0,0) = "no thumbnail" in Camera API 1
                params.setJpegThumbnailSize(/*width*/
                0, /*height*/
                0);
            } else {
                params.setJpegThumbnailSize(s.getWidth(), s.getHeight());
            }
        }
    }
    /*
         * noiseReduction.*
         */
    // noiseReduction.mode
    {
        int mode = ParamsUtils.getOrDefault(request, NOISE_REDUCTION_MODE, /*defaultValue*/
        NOISE_REDUCTION_MODE_FAST);
        if (mode != NOISE_REDUCTION_MODE_FAST && mode != NOISE_REDUCTION_MODE_HIGH_QUALITY) {
            Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " + "noiseReduction.mode = " + mode);
        }
    }
}
Also used : Parameters(android.hardware.Camera.Parameters) Rect(android.graphics.Rect) Size(android.util.Size) MeteringRectangle(android.hardware.camera2.params.MeteringRectangle) Range(android.util.Range) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) ArrayList(java.util.ArrayList) List(java.util.List) CaptureRequest(android.hardware.camera2.CaptureRequest) Camera(android.hardware.Camera) Location(android.location.Location)

Example 15 with MeteringRectangle

use of android.hardware.camera2.params.MeteringRectangle in project platform_frameworks_base by android.

the class Camera2StillCaptureTest method takePictureTestByCamera.

/**
     * Take a picture for a given set of 3A regions for a particular camera.
     * <p>
     * Before take a still capture, it triggers an auto focus and lock it first,
     * then wait for AWB to converge and lock it, then trigger a precapture
     * metering sequence and wait for AE converged. After capture is received, the
     * capture result and image are validated. If {@code addAeTriggerCancel} is true,
     * a precapture trigger cancel will be inserted between two adjacent triggers, which
     * should effective cancel the first trigger.
     * </p>
     *
     * @param aeRegions AE regions for this capture
     * @param awbRegions AWB regions for this capture
     * @param afRegions AF regions for this capture
     * @param addAeTriggerCancel If a AE precapture trigger cancel is sent after the trigger.
     */
private void takePictureTestByCamera(MeteringRectangle[] aeRegions, MeteringRectangle[] awbRegions, MeteringRectangle[] afRegions, boolean addAeTriggerCancel) throws Exception {
    boolean hasFocuser = mStaticInfo.hasFocuser();
    Size maxStillSz = mOrderedStillSizes.get(0);
    Size maxPreviewSz = mOrderedPreviewSizes.get(0);
    CaptureResult result;
    SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
    CaptureRequest.Builder previewRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    CaptureRequest.Builder stillRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
    prepareStillCaptureAndStartPreview(previewRequest, stillRequest, maxPreviewSz, maxStillSz, resultListener, imageListener);
    // Set AE mode to ON_AUTO_FLASH if flash is available.
    if (mStaticInfo.hasFlash()) {
        previewRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
        stillRequest.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
    }
    Camera2Focuser focuser = null;
    /**
         * Step 1: trigger an auto focus run, and wait for AF locked.
         */
    boolean canSetAfRegion = hasFocuser && (afRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AF_INDEX);
    if (hasFocuser) {
        SimpleAutoFocusListener afListener = new SimpleAutoFocusListener();
        focuser = new Camera2Focuser(mCamera, mSession, mPreviewSurface, afListener, mStaticInfo.getCharacteristics(), mHandler);
        if (canSetAfRegion) {
            stillRequest.set(CaptureRequest.CONTROL_AF_REGIONS, afRegions);
        }
        focuser.startAutoFocus(afRegions);
        afListener.waitForAutoFocusDone(WAIT_FOR_FOCUS_DONE_TIMEOUT_MS);
    }
    /**
         * Have to get the current AF mode to be used for other 3A repeating
         * request, otherwise, the new AF mode in AE/AWB request could be
         * different with existing repeating requests being sent by focuser,
         * then it could make AF unlocked too early. Beside that, for still
         * capture, AF mode must not be different with the one in current
         * repeating request, otherwise, the still capture itself would trigger
         * an AF mode change, and the AF lock would be lost for this capture.
         */
    int currentAfMode = CaptureRequest.CONTROL_AF_MODE_OFF;
    if (hasFocuser) {
        currentAfMode = focuser.getCurrentAfMode();
    }
    previewRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
    stillRequest.set(CaptureRequest.CONTROL_AF_MODE, currentAfMode);
    /**
         * Step 2: AF is already locked, wait for AWB converged, then lock it.
         */
    resultListener = new SimpleCaptureCallback();
    boolean canSetAwbRegion = (awbRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AWB_INDEX);
    if (canSetAwbRegion) {
        previewRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
        stillRequest.set(CaptureRequest.CONTROL_AWB_REGIONS, awbRegions);
    }
    mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
    if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
        waitForResultValue(resultListener, CaptureResult.CONTROL_AWB_STATE, CaptureResult.CONTROL_AWB_STATE_CONVERGED, NUM_RESULTS_WAIT_TIMEOUT);
    } else {
        // LEGACY Devices don't have the AWB_STATE reported in results, so just wait
        waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
    }
    boolean canSetAwbLock = mStaticInfo.isAwbLockSupported();
    if (canSetAwbLock) {
        previewRequest.set(CaptureRequest.CONTROL_AWB_LOCK, true);
    }
    mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
    // Validate the next result immediately for region and mode.
    result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
    mCollector.expectEquals("AWB mode in result and request should be same", previewRequest.get(CaptureRequest.CONTROL_AWB_MODE), result.get(CaptureResult.CONTROL_AWB_MODE));
    if (canSetAwbRegion) {
        MeteringRectangle[] resultAwbRegions = getValueNotNull(result, CaptureResult.CONTROL_AWB_REGIONS);
        mCollector.expectEquals("AWB regions in result and request should be same", awbRegions, resultAwbRegions);
    }
    /**
         * Step 3: trigger an AE precapture metering sequence and wait for AE converged.
         */
    resultListener = new SimpleCaptureCallback();
    boolean canSetAeRegion = (aeRegions != null) && isRegionsSupportedFor3A(MAX_REGIONS_AE_INDEX);
    if (canSetAeRegion) {
        previewRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
        stillRequest.set(CaptureRequest.CONTROL_AE_REGIONS, aeRegions);
    }
    mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
    previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
    mSession.capture(previewRequest.build(), resultListener, mHandler);
    if (addAeTriggerCancel) {
        // Cancel the current precapture trigger, then send another trigger.
        // The camera device should behave as if the first trigger is not sent.
        // Wait one request to make the trigger start doing something before cancel.
        waitForNumResults(resultListener, /*numResultsWait*/
        1);
        previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL);
        mSession.capture(previewRequest.build(), resultListener, mHandler);
        waitForResultValue(resultListener, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
        // Issue another trigger
        previewRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
        mSession.capture(previewRequest.build(), resultListener, mHandler);
    }
    waitForAeStable(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
    // Validate the next result immediately for region and mode.
    result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
    mCollector.expectEquals("AE mode in result and request should be same", previewRequest.get(CaptureRequest.CONTROL_AE_MODE), result.get(CaptureResult.CONTROL_AE_MODE));
    if (canSetAeRegion) {
        MeteringRectangle[] resultAeRegions = getValueNotNull(result, CaptureResult.CONTROL_AE_REGIONS);
        mCollector.expectMeteringRegionsAreSimilar("AE regions in result and request should be similar", aeRegions, resultAeRegions, METERING_REGION_ERROR_PERCENT_DELTA);
    }
    /**
         * Step 4: take a picture when all 3A are in good state.
         */
    resultListener = new SimpleCaptureCallback();
    CaptureRequest request = stillRequest.build();
    mSession.capture(request, resultListener, mHandler);
    // Validate the next result immediately for region and mode.
    result = resultListener.getCaptureResultForRequest(request, WAIT_FOR_RESULT_TIMEOUT_MS);
    mCollector.expectEquals("AF mode in result and request should be same", stillRequest.get(CaptureRequest.CONTROL_AF_MODE), result.get(CaptureResult.CONTROL_AF_MODE));
    if (canSetAfRegion) {
        MeteringRectangle[] resultAfRegions = getValueNotNull(result, CaptureResult.CONTROL_AF_REGIONS);
        mCollector.expectMeteringRegionsAreSimilar("AF regions in result and request should be similar", afRegions, resultAfRegions, METERING_REGION_ERROR_PERCENT_DELTA);
    }
    if (hasFocuser) {
        // Unlock auto focus.
        focuser.cancelAutoFocus();
    }
    // validate image
    Image image = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS);
    validateJpegCapture(image, maxStillSz);
    // Free image resources
    image.close();
    stopPreview();
}
Also used : Size(android.util.Size) CaptureResult(android.hardware.camera2.CaptureResult) MeteringRectangle(android.hardware.camera2.params.MeteringRectangle) SimpleImageReaderListener(com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener) Camera2Focuser(com.android.mediaframeworktest.helpers.Camera2Focuser) CameraTestUtils.basicValidateJpegImage(com.android.mediaframeworktest.helpers.CameraTestUtils.basicValidateJpegImage) CameraTestUtils.getDataFromImage(com.android.mediaframeworktest.helpers.CameraTestUtils.getDataFromImage) Image(android.media.Image) Point(android.graphics.Point) CaptureRequest(android.hardware.camera2.CaptureRequest) SimpleCaptureCallback(com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback)

Aggregations

MeteringRectangle (android.hardware.camera2.params.MeteringRectangle)36 Rect (android.graphics.Rect)16 Size (android.util.Size)15 ArrayList (java.util.ArrayList)15 Camera (android.hardware.Camera)10 CaptureRequest (android.hardware.camera2.CaptureRequest)10 Range (android.util.Range)10 Point (android.graphics.Point)5 Parameters (android.hardware.Camera.Parameters)5 CameraCharacteristics (android.hardware.camera2.CameraCharacteristics)5 CaptureResult (android.hardware.camera2.CaptureResult)5 CameraMetadataNative (android.hardware.camera2.impl.CameraMetadataNative)5 WeightedRectangle (android.hardware.camera2.legacy.ParameterUtils.WeightedRectangle)5 Location (android.location.Location)5 Image (android.media.Image)5 SmallTest (android.test.suitebuilder.annotation.SmallTest)5 Camera2Focuser (com.android.mediaframeworktest.helpers.Camera2Focuser)5 SimpleCaptureCallback (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleCaptureCallback)5 SimpleImageReaderListener (com.android.mediaframeworktest.helpers.CameraTestUtils.SimpleImageReaderListener)5 CameraTestUtils.basicValidateJpegImage (com.android.mediaframeworktest.helpers.CameraTestUtils.basicValidateJpegImage)5