Search in sources :

Example 66 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by crdroidandroid.

the class CameraTestUtils method getSupportedSizeForFormat.

/**
     * Get the available output sizes for the user-defined {@code format}.
     *
     * <p>Note that implementation-defined/hidden formats are not supported.</p>
     */
public static Size[] getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager) throws CameraAccessException {
    CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
    assertNotNull("Can't get camera characteristics!", properties);
    if (VERBOSE) {
        Log.v(TAG, "get camera characteristics for camera: " + cameraId);
    }
    StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    Size[] availableSizes = configMap.getOutputSizes(format);
    assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: " + format);
    Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
    if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
        Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
        System.arraycopy(availableSizes, 0, allSizes, 0, availableSizes.length);
        System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, highResAvailableSizes.length);
        availableSizes = allSizes;
    }
    if (VERBOSE)
        Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
    return availableSizes;
}
Also used : Size(android.util.Size) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) StreamConfigurationMap(android.hardware.camera2.params.StreamConfigurationMap)

Example 67 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by crdroidandroid.

the class CameraUtils method isLegacyHAL.

/**
     * Returns {@code true} if this device only supports {@code LEGACY} mode operation in the
     * Camera2 API for the given camera ID.
     *
     * @param context {@link Context} to access the {@link CameraManager} in.
     * @param cameraId the ID of the camera device to check.
     * @return {@code true} if this device only supports {@code LEGACY} mode.
     */
public static boolean isLegacyHAL(Context context, int cameraId) throws Exception {
    CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
    CameraCharacteristics characteristics = manager.getCameraCharacteristics(Integer.toString(cameraId));
    return characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
Also used : CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) CameraManager(android.hardware.camera2.CameraManager)

Example 68 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by crdroidandroid.

the class LegacyMetadataMapper method createRequestTemplate.

/**
     * Create a request template
     *
     * @param c a non-{@code null} camera characteristics for this camera
     * @param templateId a non-negative template ID
     *
     * @return a non-{@code null} request template
     *
     * @throws IllegalArgumentException if {@code templateId} was invalid
     *
     * @see android.hardware.camera2.CameraDevice#TEMPLATE_MANUAL
     */
public static CameraMetadataNative createRequestTemplate(CameraCharacteristics c, int templateId) {
    if (!ArrayUtils.contains(sAllowedTemplates, templateId)) {
        throw new IllegalArgumentException("templateId out of range");
    }
    CameraMetadataNative m = new CameraMetadataNative();
    /*
         * NOTE: If adding new code here and it needs to query the static info,
         * query the camera characteristics, so we can reuse this for api2 code later
         * to create our own templates in the framework
         */
    /*
         * control.*
         */
    // control.awbMode
    m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
    // AWB is always unconditionally available in API1 devices
    // control.aeAntibandingMode
    m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO);
    // control.aeExposureCompensation
    m.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
    // control.aeLock
    m.set(CaptureRequest.CONTROL_AE_LOCK, false);
    // control.aePrecaptureTrigger
    m.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
    // control.afTrigger
    m.set(CaptureRequest.CONTROL_AF_TRIGGER, CONTROL_AF_TRIGGER_IDLE);
    // control.awbMode
    m.set(CaptureRequest.CONTROL_AWB_MODE, CONTROL_AWB_MODE_AUTO);
    // control.awbLock
    m.set(CaptureRequest.CONTROL_AWB_LOCK, false);
    // control.aeRegions, control.awbRegions, control.afRegions
    {
        Rect activeArray = c.get(SENSOR_INFO_ACTIVE_ARRAY_SIZE);
        MeteringRectangle[] activeRegions = new MeteringRectangle[] { new MeteringRectangle(/*x*/
        0, /*y*/
        0, /*width*/
        activeArray.width() - 1, /*height*/
        activeArray.height() - 1, /*weight*/
        0) };
        m.set(CaptureRequest.CONTROL_AE_REGIONS, activeRegions);
        m.set(CaptureRequest.CONTROL_AWB_REGIONS, activeRegions);
        m.set(CaptureRequest.CONTROL_AF_REGIONS, activeRegions);
    }
    // control.captureIntent
    {
        int captureIntent;
        switch(templateId) {
            case CameraDevice.TEMPLATE_PREVIEW:
                captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
                break;
            case CameraDevice.TEMPLATE_STILL_CAPTURE:
                captureIntent = CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
                break;
            case CameraDevice.TEMPLATE_RECORD:
                captureIntent = CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
                break;
            default:
                // Can't get anything else since it's guarded by the IAE check
                throw new AssertionError("Impossible; keep in sync with sAllowedTemplates");
        }
        m.set(CaptureRequest.CONTROL_CAPTURE_INTENT, captureIntent);
    }
    // control.aeMode
    m.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
    // AE is always unconditionally available in API1 devices
    // control.mode
    m.set(CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
    // control.afMode
    {
        Float minimumFocusDistance = c.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
        int afMode;
        if (minimumFocusDistance != null && minimumFocusDistance == LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS) {
            // Cannot control auto-focus with fixed-focus cameras
            afMode = CameraMetadata.CONTROL_AF_MODE_OFF;
        } else {
            // If a minimum focus distance is reported; the camera must have AF
            afMode = CameraMetadata.CONTROL_AF_MODE_AUTO;
            if (templateId == CameraDevice.TEMPLATE_RECORD || templateId == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
                if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
                    afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO;
                }
            } else if (templateId == CameraDevice.TEMPLATE_PREVIEW || templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
                if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
                    afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
                }
            }
        }
        if (DEBUG) {
            Log.v(TAG, "createRequestTemplate (templateId=" + templateId + ")," + " afMode=" + afMode + ", minimumFocusDistance=" + minimumFocusDistance);
        }
        m.set(CaptureRequest.CONTROL_AF_MODE, afMode);
    }
    {
        // control.aeTargetFpsRange
        Range<Integer>[] availableFpsRange = c.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
        // Pick FPS range with highest max value, tiebreak on higher min value
        Range<Integer> bestRange = availableFpsRange[0];
        for (Range<Integer> r : availableFpsRange) {
            if (bestRange.getUpper() < r.getUpper()) {
                bestRange = r;
            } else if (bestRange.getUpper() == r.getUpper() && bestRange.getLower() < r.getLower()) {
                bestRange = r;
            }
        }
        m.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, bestRange);
    }
    // control.sceneMode -- DISABLED is always available
    m.set(CaptureRequest.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
    /*
         * statistics.*
         */
    // statistics.faceDetectMode
    m.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, STATISTICS_FACE_DETECT_MODE_OFF);
    /*
         * flash.*
         */
    // flash.mode
    m.set(CaptureRequest.FLASH_MODE, FLASH_MODE_OFF);
    /*
         * noiseReduction.*
         */
    if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
        m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_HIGH_QUALITY);
    } else {
        m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_FAST);
    }
    /*
        * colorCorrection.*
        */
    if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
        m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY);
    } else {
        m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_FAST);
    }
    /*
         * lens.*
         */
    // lens.focalLength
    m.set(CaptureRequest.LENS_FOCAL_LENGTH, c.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)[0]);
    /*
         * jpeg.*
         */
    // jpeg.thumbnailSize - set smallest non-zero size if possible
    Size[] sizes = c.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES);
    m.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, (sizes.length > 1) ? sizes[1] : sizes[0]);
    // TODO: map other request template values
    return m;
}
Also used : Rect(android.graphics.Rect) Size(android.util.Size) CameraMetadataNative(android.hardware.camera2.impl.CameraMetadataNative) MeteringRectangle(android.hardware.camera2.params.MeteringRectangle) Range(android.util.Range)

Example 69 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by crdroidandroid.

the class LegacyRequestMapper method convertRequestMetadata.

/**
     * Set the legacy parameters using the {@link LegacyRequest legacy request}.
     *
     * <p>The legacy request's parameters are changed as a side effect of calling this
     * method.</p>
     *
     * @param legacyRequest a non-{@code null} legacy request
     */
public static void convertRequestMetadata(LegacyRequest legacyRequest) {
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;
    Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    /*
         * scaler.cropRegion
         */
    ParameterUtils.ZoomData zoomData;
    {
        zoomData = ParameterUtils.convertScalerCropRegion(activeArray, request.get(SCALER_CROP_REGION), previewSize, params);
        if (params.isZoomSupported()) {
            params.setZoom(zoomData.zoomIndex);
        } else if (DEBUG) {
            Log.v(TAG, "convertRequestToMetadata - zoom is not supported");
        }
    }
    /*
         * colorCorrection.*
         */
    // colorCorrection.aberrationMode
    {
        int aberrationMode = ParamsUtils.getOrDefault(request, COLOR_CORRECTION_ABERRATION_MODE, /*defaultValue*/
        COLOR_CORRECTION_ABERRATION_MODE_FAST);
        if (aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_FAST && aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
            Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " + "colorCorrection.aberrationMode = " + aberrationMode);
        }
    }
    /*
         * control.ae*
         */
    // control.aeAntibandingMode
    {
        String legacyMode;
        Integer antiBandingMode = request.get(CONTROL_AE_ANTIBANDING_MODE);
        if (antiBandingMode != null) {
            legacyMode = convertAeAntiBandingModeToLegacy(antiBandingMode);
        } else {
            legacyMode = ListUtils.listSelectFirstFrom(params.getSupportedAntibanding(), new String[] { Parameters.ANTIBANDING_AUTO, Parameters.ANTIBANDING_OFF, Parameters.ANTIBANDING_50HZ, Parameters.ANTIBANDING_60HZ });
        }
        if (legacyMode != null) {
            params.setAntibanding(legacyMode);
        }
    }
    /*
         * control.aeRegions, afRegions
         */
    {
        // aeRegions
        {
            // Use aeRegions if available, fall back to using awbRegions if present
            MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
            if (request.get(CONTROL_AWB_REGIONS) != null) {
                Log.w(TAG, "convertRequestMetadata - control.awbRegions setting is not " + "supported, ignoring value");
            }
            int maxNumMeteringAreas = params.getMaxNumMeteringAreas();
            List<Camera.Area> meteringAreaList = convertMeteringRegionsToLegacy(activeArray, zoomData, aeRegions, maxNumMeteringAreas, /*regionName*/
            "AE");
            // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
            if (maxNumMeteringAreas > 0) {
                params.setMeteringAreas(meteringAreaList);
            }
        }
        // afRegions
        {
            MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
            int maxNumFocusAreas = params.getMaxNumFocusAreas();
            List<Camera.Area> focusAreaList = convertMeteringRegionsToLegacy(activeArray, zoomData, afRegions, maxNumFocusAreas, /*regionName*/
            "AF");
            // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
            if (maxNumFocusAreas > 0) {
                params.setFocusAreas(focusAreaList);
            }
        }
    }
    // control.aeTargetFpsRange
    Range<Integer> aeFpsRange = request.get(CONTROL_AE_TARGET_FPS_RANGE);
    if (aeFpsRange != null) {
        int[] legacyFps = convertAeFpsRangeToLegacy(aeFpsRange);
        int[] rangeToApply = null;
        for (int[] range : params.getSupportedPreviewFpsRange()) {
            // Round range up/down to integer FPS value
            int intRangeLow = (int) Math.floor(range[0] / 1000.0) * 1000;
            int intRangeHigh = (int) Math.ceil(range[1] / 1000.0) * 1000;
            if (legacyFps[0] == intRangeLow && legacyFps[1] == intRangeHigh) {
                rangeToApply = range;
                break;
            }
        }
        if (rangeToApply != null) {
            params.setPreviewFpsRange(rangeToApply[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], rangeToApply[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
        } else {
            Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
        }
    }
    /*
         * control
         */
    // control.aeExposureCompensation
    {
        Range<Integer> compensationRange = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
        int compensation = ParamsUtils.getOrDefault(request, CONTROL_AE_EXPOSURE_COMPENSATION, /*defaultValue*/
        0);
        if (!compensationRange.contains(compensation)) {
            Log.w(TAG, "convertRequestMetadata - control.aeExposureCompensation " + "is out of range, ignoring value");
            compensation = 0;
        }
        params.setExposureCompensation(compensation);
    }
    // control.aeLock
    {
        Boolean aeLock = getIfSupported(request, CONTROL_AE_LOCK, /*defaultValue*/
        false, params.isAutoExposureLockSupported(), /*allowedValue*/
        false);
        if (aeLock != null) {
            params.setAutoExposureLock(aeLock);
        }
        if (DEBUG) {
            Log.v(TAG, "convertRequestToMetadata - control.aeLock set to " + aeLock);
        }
    // TODO: Don't add control.aeLock to availableRequestKeys if it's not supported
    }
    // control.aeMode, flash.mode
    mapAeAndFlashMode(request, /*out*/
    params);
    // control.afMode
    {
        int afMode = ParamsUtils.getOrDefault(request, CONTROL_AF_MODE, /*defaultValue*/
        CONTROL_AF_MODE_OFF);
        String focusMode = LegacyMetadataMapper.convertAfModeToLegacy(afMode, params.getSupportedFocusModes());
        if (focusMode != null) {
            params.setFocusMode(focusMode);
        }
        if (DEBUG) {
            Log.v(TAG, "convertRequestToMetadata - control.afMode " + afMode + " mapped to " + focusMode);
        }
    }
    // control.awbMode
    {
        Integer awbMode = getIfSupported(request, CONTROL_AWB_MODE, /*defaultValue*/
        CONTROL_AWB_MODE_AUTO, params.getSupportedWhiteBalance() != null, /*allowedValue*/
        CONTROL_AWB_MODE_AUTO);
        String whiteBalanceMode = null;
        if (awbMode != null) {
            // null iff AWB is not supported by camera1 api
            whiteBalanceMode = convertAwbModeToLegacy(awbMode);
            params.setWhiteBalance(whiteBalanceMode);
        }
        if (DEBUG) {
            Log.v(TAG, "convertRequestToMetadata - control.awbMode " + awbMode + " mapped to " + whiteBalanceMode);
        }
    }
    // control.awbLock
    {
        Boolean awbLock = getIfSupported(request, CONTROL_AWB_LOCK, /*defaultValue*/
        false, params.isAutoWhiteBalanceLockSupported(), /*allowedValue*/
        false);
        if (awbLock != null) {
            params.setAutoWhiteBalanceLock(awbLock);
        }
    // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
    }
    // control.captureIntent
    {
        int captureIntent = ParamsUtils.getOrDefault(request, CONTROL_CAPTURE_INTENT, /*defaultValue*/
        CONTROL_CAPTURE_INTENT_PREVIEW);
        captureIntent = filterSupportedCaptureIntent(captureIntent);
        params.setRecordingHint(captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_RECORD || captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
    }
    // control.videoStabilizationMode
    {
        Integer stabMode = getIfSupported(request, CONTROL_VIDEO_STABILIZATION_MODE, /*defaultValue*/
        CONTROL_VIDEO_STABILIZATION_MODE_OFF, params.isVideoStabilizationSupported(), /*allowedValue*/
        CONTROL_VIDEO_STABILIZATION_MODE_OFF);
        if (stabMode != null) {
            params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
        }
    }
    // lens.focusDistance
    {
        boolean infinityFocusSupported = ListUtils.listContains(params.getSupportedFocusModes(), Parameters.FOCUS_MODE_INFINITY);
        Float focusDistance = getIfSupported(request, LENS_FOCUS_DISTANCE, /*defaultValue*/
        0f, infinityFocusSupported, /*allowedValue*/
        0f);
        if (focusDistance == null || focusDistance != 0f) {
            Log.w(TAG, "convertRequestToMetadata - Ignoring android.lens.focusDistance " + infinityFocusSupported + ", only 0.0f is supported");
        }
    }
    // control.sceneMode, control.mode
    {
        if (params.getSupportedSceneModes() != null) {
            int controlMode = ParamsUtils.getOrDefault(request, CONTROL_MODE, /*defaultValue*/
            CONTROL_MODE_AUTO);
            String modeToSet;
            switch(controlMode) {
                case CONTROL_MODE_USE_SCENE_MODE:
                    {
                        int sceneMode = ParamsUtils.getOrDefault(request, CONTROL_SCENE_MODE, /*defaultValue*/
                        CONTROL_SCENE_MODE_DISABLED);
                        String legacySceneMode = LegacyMetadataMapper.convertSceneModeToLegacy(sceneMode);
                        if (legacySceneMode != null) {
                            modeToSet = legacySceneMode;
                        } else {
                            modeToSet = Parameters.SCENE_MODE_AUTO;
                            Log.w(TAG, "Skipping unknown requested scene mode: " + sceneMode);
                        }
                        break;
                    }
                case CONTROL_MODE_AUTO:
                    {
                        modeToSet = Parameters.SCENE_MODE_AUTO;
                        break;
                    }
                default:
                    {
                        Log.w(TAG, "Control mode " + controlMode + " is unsupported, defaulting to AUTO");
                        modeToSet = Parameters.SCENE_MODE_AUTO;
                    }
            }
            params.setSceneMode(modeToSet);
        }
    }
    // control.effectMode
    {
        if (params.getSupportedColorEffects() != null) {
            int effectMode = ParamsUtils.getOrDefault(request, CONTROL_EFFECT_MODE, /*defaultValue*/
            CONTROL_EFFECT_MODE_OFF);
            String legacyEffectMode = LegacyMetadataMapper.convertEffectModeToLegacy(effectMode);
            if (legacyEffectMode != null) {
                params.setColorEffect(legacyEffectMode);
            } else {
                params.setColorEffect(Parameters.EFFECT_NONE);
                Log.w(TAG, "Skipping unknown requested effect mode: " + effectMode);
            }
        }
    }
    /*
         * sensor
         */
    // sensor.testPattern
    {
        int testPatternMode = ParamsUtils.getOrDefault(request, SENSOR_TEST_PATTERN_MODE, /*defaultValue*/
        SENSOR_TEST_PATTERN_MODE_OFF);
        if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) {
            Log.w(TAG, "convertRequestToMetadata - ignoring sensor.testPatternMode " + testPatternMode + "; only OFF is supported");
        }
    }
    /*
         * jpeg.*
         */
    // jpeg.gpsLocation
    {
        Location location = request.get(JPEG_GPS_LOCATION);
        if (location != null) {
            if (checkForCompleteGpsData(location)) {
                params.setGpsAltitude(location.getAltitude());
                params.setGpsLatitude(location.getLatitude());
                params.setGpsLongitude(location.getLongitude());
                params.setGpsProcessingMethod(location.getProvider().toUpperCase());
                params.setGpsTimestamp(location.getTime());
            } else {
                Log.w(TAG, "Incomplete GPS parameters provided in location " + location);
            }
        } else {
            params.removeGpsData();
        }
    }
    // jpeg.orientation
    {
        Integer orientation = request.get(CaptureRequest.JPEG_ORIENTATION);
        params.setRotation(ParamsUtils.getOrDefault(request, JPEG_ORIENTATION, (orientation == null) ? 0 : orientation));
    }
    // jpeg.quality
    {
        params.setJpegQuality(0xFF & ParamsUtils.getOrDefault(request, JPEG_QUALITY, DEFAULT_JPEG_QUALITY));
    }
    // jpeg.thumbnailQuality
    {
        params.setJpegThumbnailQuality(0xFF & ParamsUtils.getOrDefault(request, JPEG_THUMBNAIL_QUALITY, DEFAULT_JPEG_QUALITY));
    }
    // jpeg.thumbnailSize
    {
        List<Camera.Size> sizes = params.getSupportedJpegThumbnailSizes();
        if (sizes != null && sizes.size() > 0) {
            Size s = request.get(JPEG_THUMBNAIL_SIZE);
            boolean invalidSize = (s == null) ? false : !ParameterUtils.containsSize(sizes, s.getWidth(), s.getHeight());
            if (invalidSize) {
                Log.w(TAG, "Invalid JPEG thumbnail size set " + s + ", skipping thumbnail...");
            }
            if (s == null || invalidSize) {
                // (0,0) = "no thumbnail" in Camera API 1
                params.setJpegThumbnailSize(/*width*/
                0, /*height*/
                0);
            } else {
                params.setJpegThumbnailSize(s.getWidth(), s.getHeight());
            }
        }
    }
    /*
         * noiseReduction.*
         */
    // noiseReduction.mode
    {
        int mode = ParamsUtils.getOrDefault(request, NOISE_REDUCTION_MODE, /*defaultValue*/
        NOISE_REDUCTION_MODE_FAST);
        if (mode != NOISE_REDUCTION_MODE_FAST && mode != NOISE_REDUCTION_MODE_HIGH_QUALITY) {
            Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " + "noiseReduction.mode = " + mode);
        }
    }
}
Also used : Parameters(android.hardware.Camera.Parameters) Rect(android.graphics.Rect) Size(android.util.Size) MeteringRectangle(android.hardware.camera2.params.MeteringRectangle) Range(android.util.Range) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) ArrayList(java.util.ArrayList) List(java.util.List) CaptureRequest(android.hardware.camera2.CaptureRequest) Camera(android.hardware.Camera) Location(android.location.Location)

Example 70 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by crdroidandroid.

the class LegacyFaceDetectMapper method mapResultFaces.

/**
     * Update the {@code result} camera metadata map with the new value for the
     * {@code statistics.faces} and {@code statistics.faceDetectMode}.
     *
     * <p>Face detect callbacks are processed in the background, and each call to
     * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p>
     *
     * <p>If the scene mode was set to {@code FACE_PRIORITY} but face detection is disabled,
     * the camera will still run face detection in the background, but no faces will be reported
     * in the capture result.</p>
     *
     * @param result a non-{@code null} result
     * @param legacyRequest a non-{@code null} request (read-only)
     */
public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) {
    checkNotNull(result, "result must not be null");
    checkNotNull(legacyRequest, "legacyRequest must not be null");
    Camera.Face[] faces, previousFaces;
    int fdMode;
    boolean fdScenePriority;
    synchronized (mLock) {
        fdMode = mFaceDetectReporting ? STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF;
        if (mFaceDetectReporting) {
            faces = mFaces;
        } else {
            faces = null;
        }
        fdScenePriority = mFaceDetectScenePriority;
        previousFaces = mFacesPrev;
        mFacesPrev = faces;
    }
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;
    Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
    List<Face> convertedFaces = new ArrayList<>();
    if (faces != null) {
        for (Camera.Face face : faces) {
            if (face != null) {
                convertedFaces.add(ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData));
            } else {
                Log.w(TAG, "mapResultFaces - read NULL face from camera1 device");
            }
        }
    }
    if (DEBUG && previousFaces != faces) {
        // Log only in verbose and IF the faces changed
        Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces));
    }
    result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0]));
    result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode);
    // Override scene mode with FACE_PRIORITY if the request was using FACE_PRIORITY
    if (fdScenePriority) {
        result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_FACE_PRIORITY);
    }
}
Also used : Rect(android.graphics.Rect) Size(android.util.Size) ArrayList(java.util.ArrayList) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) ZoomData(android.hardware.camera2.legacy.ParameterUtils.ZoomData) CaptureRequest(android.hardware.camera2.CaptureRequest) Camera(android.hardware.Camera) Face(android.hardware.camera2.params.Face)

Aggregations

CameraCharacteristics (android.hardware.camera2.CameraCharacteristics)61 Size (android.util.Size)31 Camera (android.hardware.Camera)25 Rect (android.graphics.Rect)20 CameraMetadataNative (android.hardware.camera2.impl.CameraMetadataNative)20 CaptureRequest (android.hardware.camera2.CaptureRequest)16 StreamConfigurationMap (android.hardware.camera2.params.StreamConfigurationMap)16 Parameters (android.hardware.Camera.Parameters)15 CameraManager (android.hardware.camera2.CameraManager)12 ZoomData (android.hardware.camera2.legacy.ParameterUtils.ZoomData)10 MeteringRectangle (android.hardware.camera2.params.MeteringRectangle)10 ServiceSpecificException (android.os.ServiceSpecificException)10 Range (android.util.Range)10 ArrayList (java.util.ArrayList)10 CameraAccessException (android.hardware.camera2.CameraAccessException)8 NonNull (android.annotation.NonNull)5 CameraInfo (android.hardware.Camera.CameraInfo)5 CameraInfo (android.hardware.CameraInfo)5 ICameraService (android.hardware.ICameraService)5 Face (android.hardware.camera2.params.Face)5