use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by DirtyUnicorns.
the class CameraUtils method isLegacyHAL.
/**
* Returns {@code true} if this device only supports {@code LEGACY} mode operation in the
* Camera2 API for the given camera ID.
*
* @param context {@link Context} to access the {@link CameraManager} in.
* @param cameraId the ID of the camera device to check.
* @return {@code true} if this device only supports {@code LEGACY} mode.
*/
public static boolean isLegacyHAL(Context context, int cameraId) throws Exception {
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(Integer.toString(cameraId));
return characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by DirtyUnicorns.
the class PhoneWindowManager method getCameraId.
private String getCameraId() throws CameraAccessException {
String[] ids = mCameraManager.getCameraIdList();
int length = ids.length;
for (int i = 0; i < length; i += 1) {
String id = ids[i];
CameraCharacteristics c = mCameraManager.getCameraCharacteristics(id);
Boolean flashAvailable = c.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
Integer lensFacing = c.get(CameraCharacteristics.LENS_FACING);
if (flashAvailable != null && flashAvailable && lensFacing != null && lensFacing == CameraCharacteristics.LENS_FACING_BACK) {
return id;
}
}
return null;
}
use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by crdroidandroid.
the class LegacyMetadataMapper method createCharacteristics.
/**
* Create characteristics for a legacy device by mapping the {@code parameters}
* and {@code info}
*
* @param parameters A string parseable by {@link Camera.Parameters#unflatten}
* @param info Camera info with camera facing direction and angle of orientation
* @return static camera characteristics for a camera device
*
* @throws NullPointerException if any of the args were {@code null}
*/
public static CameraCharacteristics createCharacteristics(String parameters, android.hardware.CameraInfo info) {
checkNotNull(parameters, "parameters must not be null");
checkNotNull(info, "info must not be null");
checkNotNull(info.info, "info.info must not be null");
CameraMetadataNative m = new CameraMetadataNative();
mapCharacteristicsFromInfo(m, info.info);
Camera.Parameters params = Camera.getEmptyParameters();
params.unflatten(parameters);
mapCharacteristicsFromParameters(m, params);
if (DEBUG) {
Log.v(TAG, "createCharacteristics metadata:");
Log.v(TAG, "--------------------------------------------------- (start)");
m.dumpToLog();
Log.v(TAG, "--------------------------------------------------- (end)");
}
return new CameraCharacteristics(m);
}
use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by crdroidandroid.
the class LegacyResultMapper method convertResultMetadata.
/**
* Generate capture result metadata from the legacy camera request.
*
* @param legacyRequest a non-{@code null} legacy request containing the latest parameters
* @return a {@link CameraMetadataNative} object containing result metadata.
*/
private static CameraMetadataNative convertResultMetadata(LegacyRequest legacyRequest) {
CameraCharacteristics characteristics = legacyRequest.characteristics;
CaptureRequest request = legacyRequest.captureRequest;
Size previewSize = legacyRequest.previewSize;
Camera.Parameters params = legacyRequest.parameters;
CameraMetadataNative result = new CameraMetadataNative();
Rect activeArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArraySize, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
/*
* colorCorrection
*/
// colorCorrection.aberrationMode
{
result.set(COLOR_CORRECTION_ABERRATION_MODE, request.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE));
}
/*
* control
*/
/*
* control.ae*
*/
mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/
params);
/*
* control.af*
*/
mapAf(result, activeArraySize, zoomData, /*out*/
params);
/*
* control.awb*
*/
mapAwb(result, /*out*/
params);
/*
* control.captureIntent
*/
{
int captureIntent = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_CAPTURE_INTENT, /*defaultValue*/
CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
captureIntent = LegacyRequestMapper.filterSupportedCaptureIntent(captureIntent);
result.set(CONTROL_CAPTURE_INTENT, captureIntent);
}
/*
* control.mode
*/
{
int controlMode = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
if (controlMode == CaptureResult.CONTROL_MODE_USE_SCENE_MODE) {
result.set(CONTROL_MODE, CONTROL_MODE_USE_SCENE_MODE);
} else {
result.set(CONTROL_MODE, CONTROL_MODE_AUTO);
}
}
/*
* control.sceneMode
*/
{
String legacySceneMode = params.getSceneMode();
int mode = LegacyMetadataMapper.convertSceneModeFromLegacy(legacySceneMode);
if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
result.set(CaptureResult.CONTROL_SCENE_MODE, mode);
// In case of SCENE_MODE == FACE_PRIORITY, LegacyFaceDetectMapper will override
// the result to say SCENE_MODE == FACE_PRIORITY.
} else {
Log.w(TAG, "Unknown scene mode " + legacySceneMode + " returned by camera HAL, setting to disabled.");
result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
}
}
/*
* control.effectMode
*/
{
String legacyEffectMode = params.getColorEffect();
int mode = LegacyMetadataMapper.convertEffectModeFromLegacy(legacyEffectMode);
if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
result.set(CaptureResult.CONTROL_EFFECT_MODE, mode);
} else {
Log.w(TAG, "Unknown effect mode " + legacyEffectMode + " returned by camera HAL, setting to off.");
result.set(CaptureResult.CONTROL_EFFECT_MODE, CONTROL_EFFECT_MODE_OFF);
}
}
// control.videoStabilizationMode
{
int stabMode = (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ? CONTROL_VIDEO_STABILIZATION_MODE_ON : CONTROL_VIDEO_STABILIZATION_MODE_OFF;
result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
}
/*
* flash
*/
{
// flash.mode, flash.state mapped in mapAeAndFlashMode
}
/*
* lens
*/
// lens.focusDistance
{
if (Parameters.FOCUS_MODE_INFINITY.equals(params.getFocusMode())) {
result.set(CaptureResult.LENS_FOCUS_DISTANCE, 0.0f);
}
}
// lens.focalLength
result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
/*
* request
*/
// request.pipelineDepth
result.set(REQUEST_PIPELINE_DEPTH, characteristics.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
/*
* scaler
*/
mapScaler(result, zoomData, /*out*/
params);
/*
* sensor
*/
// sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
{
// Unconditionally no test patterns
result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
}
/*
* jpeg
*/
// jpeg.gpsLocation
result.set(JPEG_GPS_LOCATION, request.get(CaptureRequest.JPEG_GPS_LOCATION));
// jpeg.orientation
result.set(JPEG_ORIENTATION, request.get(CaptureRequest.JPEG_ORIENTATION));
// jpeg.quality
result.set(JPEG_QUALITY, (byte) params.getJpegQuality());
// jpeg.thumbnailQuality
result.set(JPEG_THUMBNAIL_QUALITY, (byte) params.getJpegThumbnailQuality());
// jpeg.thumbnailSize
Camera.Size s = params.getJpegThumbnailSize();
if (s != null) {
result.set(JPEG_THUMBNAIL_SIZE, ParameterUtils.convertSize(s));
} else {
Log.w(TAG, "Null thumbnail size received from parameters.");
}
/*
* noiseReduction.*
*/
// noiseReduction.mode
result.set(NOISE_REDUCTION_MODE, request.get(CaptureRequest.NOISE_REDUCTION_MODE));
return result;
}
use of android.hardware.camera2.CameraCharacteristics in project android_frameworks_base by crdroidandroid.
the class CameraDeviceUserShim method connectBinderShim.
public static CameraDeviceUserShim connectBinderShim(ICameraDeviceCallbacks callbacks, int cameraId) {
if (DEBUG) {
Log.d(TAG, "Opening shim Camera device");
}
/*
* Put the camera open on a separate thread with its own looper; otherwise
* if the main thread is used then the callbacks might never get delivered
* (e.g. in CTS which run its own default looper only after tests)
*/
CameraLooper init = new CameraLooper(cameraId);
CameraCallbackThread threadCallbacks = new CameraCallbackThread(callbacks);
// TODO: Make this async instead of blocking
int initErrors = init.waitForOpen(OPEN_CAMERA_TIMEOUT_MS);
Camera legacyCamera = init.getCamera();
// Check errors old HAL initialization
LegacyExceptionUtils.throwOnServiceError(initErrors);
// Disable shutter sounds (this will work unconditionally) for api2 clients
legacyCamera.disableShutterSound();
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(cameraId, info);
Camera.Parameters legacyParameters = null;
try {
legacyParameters = legacyCamera.getParameters();
} catch (RuntimeException e) {
throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, "Unable to get initial parameters: " + e.getMessage());
}
CameraCharacteristics characteristics = LegacyMetadataMapper.createCharacteristics(legacyParameters, info);
LegacyCameraDevice device = new LegacyCameraDevice(cameraId, legacyCamera, characteristics, threadCallbacks);
return new CameraDeviceUserShim(cameraId, device, characteristics, init, threadCallbacks);
}
Aggregations