Search in sources :

Example 51 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by AOSPA.

the class CameraDeviceUserShim method connectBinderShim.

public static CameraDeviceUserShim connectBinderShim(ICameraDeviceCallbacks callbacks, int cameraId) {
    if (DEBUG) {
        Log.d(TAG, "Opening shim Camera device");
    }
    /*
         * Put the camera open on a separate thread with its own looper; otherwise
         * if the main thread is used then the callbacks might never get delivered
         * (e.g. in CTS which run its own default looper only after tests)
         */
    CameraLooper init = new CameraLooper(cameraId);
    CameraCallbackThread threadCallbacks = new CameraCallbackThread(callbacks);
    // TODO: Make this async instead of blocking
    int initErrors = init.waitForOpen(OPEN_CAMERA_TIMEOUT_MS);
    Camera legacyCamera = init.getCamera();
    // Check errors old HAL initialization
    LegacyExceptionUtils.throwOnServiceError(initErrors);
    // Disable shutter sounds (this will work unconditionally) for api2 clients
    legacyCamera.disableShutterSound();
    CameraInfo info = new CameraInfo();
    Camera.getCameraInfo(cameraId, info);
    Camera.Parameters legacyParameters = null;
    try {
        legacyParameters = legacyCamera.getParameters();
    } catch (RuntimeException e) {
        throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, "Unable to get initial parameters: " + e.getMessage());
    }
    CameraCharacteristics characteristics = LegacyMetadataMapper.createCharacteristics(legacyParameters, info);
    LegacyCameraDevice device = new LegacyCameraDevice(cameraId, legacyCamera, characteristics, threadCallbacks);
    return new CameraDeviceUserShim(cameraId, device, characteristics, init, threadCallbacks);
}
Also used : ServiceSpecificException(android.os.ServiceSpecificException) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) Camera(android.hardware.Camera) CameraInfo(android.hardware.Camera.CameraInfo)

Example 52 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by AOSPA.

the class LegacyMetadataMapper method createCharacteristics.

/**
     * Create characteristics for a legacy device by mapping the {@code parameters}
     * and {@code info}
     *
     * @param parameters A string parseable by {@link Camera.Parameters#unflatten}
     * @param info Camera info with camera facing direction and angle of orientation
     * @return static camera characteristics for a camera device
     *
     * @throws NullPointerException if any of the args were {@code null}
     */
public static CameraCharacteristics createCharacteristics(String parameters, android.hardware.CameraInfo info) {
    checkNotNull(parameters, "parameters must not be null");
    checkNotNull(info, "info must not be null");
    checkNotNull(info.info, "info.info must not be null");
    CameraMetadataNative m = new CameraMetadataNative();
    mapCharacteristicsFromInfo(m, info.info);
    Camera.Parameters params = Camera.getEmptyParameters();
    params.unflatten(parameters);
    mapCharacteristicsFromParameters(m, params);
    if (DEBUG) {
        Log.v(TAG, "createCharacteristics metadata:");
        Log.v(TAG, "--------------------------------------------------- (start)");
        m.dumpToLog();
        Log.v(TAG, "--------------------------------------------------- (end)");
    }
    return new CameraCharacteristics(m);
}
Also used : Parameters(android.hardware.Camera.Parameters) CameraMetadataNative(android.hardware.camera2.impl.CameraMetadataNative) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) Camera(android.hardware.Camera)

Example 53 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by AOSPA.

the class LegacyMetadataMapper method createRequestTemplate.

/**
     * Create a request template
     *
     * @param c a non-{@code null} camera characteristics for this camera
     * @param templateId a non-negative template ID
     *
     * @return a non-{@code null} request template
     *
     * @throws IllegalArgumentException if {@code templateId} was invalid
     *
     * @see android.hardware.camera2.CameraDevice#TEMPLATE_MANUAL
     */
public static CameraMetadataNative createRequestTemplate(CameraCharacteristics c, int templateId) {
    if (!ArrayUtils.contains(sAllowedTemplates, templateId)) {
        throw new IllegalArgumentException("templateId out of range");
    }
    CameraMetadataNative m = new CameraMetadataNative();
    /*
         * NOTE: If adding new code here and it needs to query the static info,
         * query the camera characteristics, so we can reuse this for api2 code later
         * to create our own templates in the framework
         */
    /*
         * control.*
         */
    // control.awbMode
    m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
    // AWB is always unconditionally available in API1 devices
    // control.aeAntibandingMode
    m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO);
    // control.aeExposureCompensation
    m.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
    // control.aeLock
    m.set(CaptureRequest.CONTROL_AE_LOCK, false);
    // control.aePrecaptureTrigger
    m.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
    // control.afTrigger
    m.set(CaptureRequest.CONTROL_AF_TRIGGER, CONTROL_AF_TRIGGER_IDLE);
    // control.awbMode
    m.set(CaptureRequest.CONTROL_AWB_MODE, CONTROL_AWB_MODE_AUTO);
    // control.awbLock
    m.set(CaptureRequest.CONTROL_AWB_LOCK, false);
    // control.aeRegions, control.awbRegions, control.afRegions
    {
        Rect activeArray = c.get(SENSOR_INFO_ACTIVE_ARRAY_SIZE);
        MeteringRectangle[] activeRegions = new MeteringRectangle[] { new MeteringRectangle(/*x*/
        0, /*y*/
        0, /*width*/
        activeArray.width() - 1, /*height*/
        activeArray.height() - 1, /*weight*/
        0) };
        m.set(CaptureRequest.CONTROL_AE_REGIONS, activeRegions);
        m.set(CaptureRequest.CONTROL_AWB_REGIONS, activeRegions);
        m.set(CaptureRequest.CONTROL_AF_REGIONS, activeRegions);
    }
    // control.captureIntent
    {
        int captureIntent;
        switch(templateId) {
            case CameraDevice.TEMPLATE_PREVIEW:
                captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
                break;
            case CameraDevice.TEMPLATE_STILL_CAPTURE:
                captureIntent = CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
                break;
            case CameraDevice.TEMPLATE_RECORD:
                captureIntent = CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
                break;
            default:
                // Can't get anything else since it's guarded by the IAE check
                throw new AssertionError("Impossible; keep in sync with sAllowedTemplates");
        }
        m.set(CaptureRequest.CONTROL_CAPTURE_INTENT, captureIntent);
    }
    // control.aeMode
    m.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
    // AE is always unconditionally available in API1 devices
    // control.mode
    m.set(CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
    // control.afMode
    {
        Float minimumFocusDistance = c.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
        int afMode;
        if (minimumFocusDistance != null && minimumFocusDistance == LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS) {
            // Cannot control auto-focus with fixed-focus cameras
            afMode = CameraMetadata.CONTROL_AF_MODE_OFF;
        } else {
            // If a minimum focus distance is reported; the camera must have AF
            afMode = CameraMetadata.CONTROL_AF_MODE_AUTO;
            if (templateId == CameraDevice.TEMPLATE_RECORD || templateId == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
                if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
                    afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO;
                }
            } else if (templateId == CameraDevice.TEMPLATE_PREVIEW || templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
                if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
                    afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
                }
            }
        }
        if (DEBUG) {
            Log.v(TAG, "createRequestTemplate (templateId=" + templateId + ")," + " afMode=" + afMode + ", minimumFocusDistance=" + minimumFocusDistance);
        }
        m.set(CaptureRequest.CONTROL_AF_MODE, afMode);
    }
    {
        // control.aeTargetFpsRange
        Range<Integer>[] availableFpsRange = c.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
        // Pick FPS range with highest max value, tiebreak on higher min value
        Range<Integer> bestRange = availableFpsRange[0];
        for (Range<Integer> r : availableFpsRange) {
            if (bestRange.getUpper() < r.getUpper()) {
                bestRange = r;
            } else if (bestRange.getUpper() == r.getUpper() && bestRange.getLower() < r.getLower()) {
                bestRange = r;
            }
        }
        m.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, bestRange);
    }
    // control.sceneMode -- DISABLED is always available
    m.set(CaptureRequest.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
    /*
         * statistics.*
         */
    // statistics.faceDetectMode
    m.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, STATISTICS_FACE_DETECT_MODE_OFF);
    /*
         * flash.*
         */
    // flash.mode
    m.set(CaptureRequest.FLASH_MODE, FLASH_MODE_OFF);
    /*
         * noiseReduction.*
         */
    if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
        m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_HIGH_QUALITY);
    } else {
        m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_FAST);
    }
    /*
        * colorCorrection.*
        */
    if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
        m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY);
    } else {
        m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_FAST);
    }
    /*
         * lens.*
         */
    // lens.focalLength
    m.set(CaptureRequest.LENS_FOCAL_LENGTH, c.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)[0]);
    /*
         * jpeg.*
         */
    // jpeg.thumbnailSize - set smallest non-zero size if possible
    Size[] sizes = c.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES);
    m.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, (sizes.length > 1) ? sizes[1] : sizes[0]);
    // TODO: map other request template values
    return m;
}
Also used : Rect(android.graphics.Rect) Size(android.util.Size) CameraMetadataNative(android.hardware.camera2.impl.CameraMetadataNative) MeteringRectangle(android.hardware.camera2.params.MeteringRectangle) Range(android.util.Range)

Example 54 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by AOSPA.

the class Camera2SurfaceViewTestCase method openDevice.

/**
     * Open a camera device and get the StaticMetadata for a given camera id.
     *
     * @param cameraId The id of the camera device to be opened.
     */
protected void openDevice(String cameraId) throws Exception {
    mCamera = CameraTestUtils.openCamera(mCameraManager, cameraId, mCameraListener, mHandler);
    mCollector.setCameraId(cameraId);
    CameraCharacteristics properties = mCameraManager.getCameraCharacteristics(cameraId);
    mStaticInfo = new StaticMetadata(properties, CheckLevel.ASSERT, /*collector*/
    null);
    StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    mSupportRAW10 = configMap.isOutputSupportedFor(ImageFormat.RAW10);
    if (mStaticInfo.isColorOutputSupported()) {
        mOrderedPreviewSizes = getSupportedPreviewSizes(cameraId, mCameraManager, getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
        mOrderedVideoSizes = getSupportedVideoSizes(cameraId, mCameraManager, PREVIEW_SIZE_BOUND);
        mOrderedStillSizes = getSupportedStillSizes(cameraId, mCameraManager, null);
        if (mSupportRAW10) {
            mOrderedRAW10Sizes = getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.RAW10, null);
        }
        mOrderedYUV420888Sizes = getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.YUV_420_888, null);
        // Use ImageFormat.YUV_420_888 for now. TODO: need figure out what's format for preview
        // in public API side.
        mMinPreviewFrameDurationMap = mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888);
    }
}
Also used : CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) StreamConfigurationMap(android.hardware.camera2.params.StreamConfigurationMap) StaticMetadata(com.android.mediaframeworktest.helpers.StaticMetadata)

Example 55 with CameraCharacteristics

use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by AOSPA.

the class CameraTestUtils method getSupportedSizeForClass.

/**
     * Get the available output sizes for the given class.
     *
     */
public static Size[] getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager) throws CameraAccessException {
    CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
    assertNotNull("Can't get camera characteristics!", properties);
    if (VERBOSE) {
        Log.v(TAG, "get camera characteristics for camera: " + cameraId);
    }
    StreamConfigurationMap configMap = properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    Size[] availableSizes = configMap.getOutputSizes(klass);
    assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: " + klass);
    Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE);
    if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
        Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
        System.arraycopy(availableSizes, 0, allSizes, 0, availableSizes.length);
        System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, highResAvailableSizes.length);
        availableSizes = allSizes;
    }
    if (VERBOSE)
        Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
    return availableSizes;
}
Also used : Size(android.util.Size) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) StreamConfigurationMap(android.hardware.camera2.params.StreamConfigurationMap)

Aggregations

CameraCharacteristics (android.hardware.camera2.CameraCharacteristics)61 Size (android.util.Size)31 Camera (android.hardware.Camera)25 Rect (android.graphics.Rect)20 CameraMetadataNative (android.hardware.camera2.impl.CameraMetadataNative)20 CaptureRequest (android.hardware.camera2.CaptureRequest)16 StreamConfigurationMap (android.hardware.camera2.params.StreamConfigurationMap)16 Parameters (android.hardware.Camera.Parameters)15 CameraManager (android.hardware.camera2.CameraManager)12 ZoomData (android.hardware.camera2.legacy.ParameterUtils.ZoomData)10 MeteringRectangle (android.hardware.camera2.params.MeteringRectangle)10 ServiceSpecificException (android.os.ServiceSpecificException)10 Range (android.util.Range)10 ArrayList (java.util.ArrayList)10 CameraAccessException (android.hardware.camera2.CameraAccessException)8 NonNull (android.annotation.NonNull)5 CameraInfo (android.hardware.Camera.CameraInfo)5 CameraInfo (android.hardware.CameraInfo)5 ICameraService (android.hardware.ICameraService)5 Face (android.hardware.camera2.params.Face)5