use of android.hardware.Camera in project ViseFace by xiaoyaoyou1212.
the class FaceUtil method setCameraDisplayOrientation.
/**
* 设置相机显示方向
*
* @param context
* @param faceDetector
* @param camera
* @param cameraId
* @param <T>
* @return
*/
public static <T> int setCameraDisplayOrientation(Context context, IFaceDetector<T> faceDetector, Camera camera, int cameraId) {
if (context == null) {
return 0;
}
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = ((Activity) context).getWindowManager().getDefaultDisplay().getRotation();
int degree = 0;
switch(rotation) {
case Surface.ROTATION_0:
degree = 0;
break;
case Surface.ROTATION_90:
degree = 90;
break;
case Surface.ROTATION_180:
degree = 180;
break;
case Surface.ROTATION_270:
degree = 270;
break;
}
if (faceDetector != null) {
faceDetector.setOrientionOfCamera(info.orientation);
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degree) % 360;
result = (360 - result) % 360;
} else {
result = (info.orientation - degree + 360) % 360;
}
if (camera != null) {
camera.setDisplayOrientation(result);
}
return result;
}
use of android.hardware.Camera in project android_frameworks_base by crdroidandroid.
the class LegacyMetadataMapper method createCharacteristics.
/**
* Create characteristics for a legacy device by mapping the {@code parameters}
* and {@code info}
*
* @param parameters A string parseable by {@link Camera.Parameters#unflatten}
* @param info Camera info with camera facing direction and angle of orientation
* @return static camera characteristics for a camera device
*
* @throws NullPointerException if any of the args were {@code null}
*/
public static CameraCharacteristics createCharacteristics(String parameters, android.hardware.CameraInfo info) {
checkNotNull(parameters, "parameters must not be null");
checkNotNull(info, "info must not be null");
checkNotNull(info.info, "info.info must not be null");
CameraMetadataNative m = new CameraMetadataNative();
mapCharacteristicsFromInfo(m, info.info);
Camera.Parameters params = Camera.getEmptyParameters();
params.unflatten(parameters);
mapCharacteristicsFromParameters(m, params);
if (DEBUG) {
Log.v(TAG, "createCharacteristics metadata:");
Log.v(TAG, "--------------------------------------------------- (start)");
m.dumpToLog();
Log.v(TAG, "--------------------------------------------------- (end)");
}
return new CameraCharacteristics(m);
}
use of android.hardware.Camera in project android_frameworks_base by crdroidandroid.
the class LegacyResultMapper method convertResultMetadata.
/**
* Generate capture result metadata from the legacy camera request.
*
* @param legacyRequest a non-{@code null} legacy request containing the latest parameters
* @return a {@link CameraMetadataNative} object containing result metadata.
*/
private static CameraMetadataNative convertResultMetadata(LegacyRequest legacyRequest) {
CameraCharacteristics characteristics = legacyRequest.characteristics;
CaptureRequest request = legacyRequest.captureRequest;
Size previewSize = legacyRequest.previewSize;
Camera.Parameters params = legacyRequest.parameters;
CameraMetadataNative result = new CameraMetadataNative();
Rect activeArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArraySize, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
/*
* colorCorrection
*/
// colorCorrection.aberrationMode
{
result.set(COLOR_CORRECTION_ABERRATION_MODE, request.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE));
}
/*
* control
*/
/*
* control.ae*
*/
mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/
params);
/*
* control.af*
*/
mapAf(result, activeArraySize, zoomData, /*out*/
params);
/*
* control.awb*
*/
mapAwb(result, /*out*/
params);
/*
* control.captureIntent
*/
{
int captureIntent = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_CAPTURE_INTENT, /*defaultValue*/
CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
captureIntent = LegacyRequestMapper.filterSupportedCaptureIntent(captureIntent);
result.set(CONTROL_CAPTURE_INTENT, captureIntent);
}
/*
* control.mode
*/
{
int controlMode = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
if (controlMode == CaptureResult.CONTROL_MODE_USE_SCENE_MODE) {
result.set(CONTROL_MODE, CONTROL_MODE_USE_SCENE_MODE);
} else {
result.set(CONTROL_MODE, CONTROL_MODE_AUTO);
}
}
/*
* control.sceneMode
*/
{
String legacySceneMode = params.getSceneMode();
int mode = LegacyMetadataMapper.convertSceneModeFromLegacy(legacySceneMode);
if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
result.set(CaptureResult.CONTROL_SCENE_MODE, mode);
// In case of SCENE_MODE == FACE_PRIORITY, LegacyFaceDetectMapper will override
// the result to say SCENE_MODE == FACE_PRIORITY.
} else {
Log.w(TAG, "Unknown scene mode " + legacySceneMode + " returned by camera HAL, setting to disabled.");
result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
}
}
/*
* control.effectMode
*/
{
String legacyEffectMode = params.getColorEffect();
int mode = LegacyMetadataMapper.convertEffectModeFromLegacy(legacyEffectMode);
if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
result.set(CaptureResult.CONTROL_EFFECT_MODE, mode);
} else {
Log.w(TAG, "Unknown effect mode " + legacyEffectMode + " returned by camera HAL, setting to off.");
result.set(CaptureResult.CONTROL_EFFECT_MODE, CONTROL_EFFECT_MODE_OFF);
}
}
// control.videoStabilizationMode
{
int stabMode = (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ? CONTROL_VIDEO_STABILIZATION_MODE_ON : CONTROL_VIDEO_STABILIZATION_MODE_OFF;
result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
}
/*
* flash
*/
{
// flash.mode, flash.state mapped in mapAeAndFlashMode
}
/*
* lens
*/
// lens.focusDistance
{
if (Parameters.FOCUS_MODE_INFINITY.equals(params.getFocusMode())) {
result.set(CaptureResult.LENS_FOCUS_DISTANCE, 0.0f);
}
}
// lens.focalLength
result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
/*
* request
*/
// request.pipelineDepth
result.set(REQUEST_PIPELINE_DEPTH, characteristics.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
/*
* scaler
*/
mapScaler(result, zoomData, /*out*/
params);
/*
* sensor
*/
// sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
{
// Unconditionally no test patterns
result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
}
/*
* jpeg
*/
// jpeg.gpsLocation
result.set(JPEG_GPS_LOCATION, request.get(CaptureRequest.JPEG_GPS_LOCATION));
// jpeg.orientation
result.set(JPEG_ORIENTATION, request.get(CaptureRequest.JPEG_ORIENTATION));
// jpeg.quality
result.set(JPEG_QUALITY, (byte) params.getJpegQuality());
// jpeg.thumbnailQuality
result.set(JPEG_THUMBNAIL_QUALITY, (byte) params.getJpegThumbnailQuality());
// jpeg.thumbnailSize
Camera.Size s = params.getJpegThumbnailSize();
if (s != null) {
result.set(JPEG_THUMBNAIL_SIZE, ParameterUtils.convertSize(s));
} else {
Log.w(TAG, "Null thumbnail size received from parameters.");
}
/*
* noiseReduction.*
*/
// noiseReduction.mode
result.set(NOISE_REDUCTION_MODE, request.get(CaptureRequest.NOISE_REDUCTION_MODE));
return result;
}
use of android.hardware.Camera in project android_frameworks_base by crdroidandroid.
the class CameraDeviceUserShim method connectBinderShim.
public static CameraDeviceUserShim connectBinderShim(ICameraDeviceCallbacks callbacks, int cameraId) {
if (DEBUG) {
Log.d(TAG, "Opening shim Camera device");
}
/*
* Put the camera open on a separate thread with its own looper; otherwise
* if the main thread is used then the callbacks might never get delivered
* (e.g. in CTS which run its own default looper only after tests)
*/
CameraLooper init = new CameraLooper(cameraId);
CameraCallbackThread threadCallbacks = new CameraCallbackThread(callbacks);
// TODO: Make this async instead of blocking
int initErrors = init.waitForOpen(OPEN_CAMERA_TIMEOUT_MS);
Camera legacyCamera = init.getCamera();
// Check errors old HAL initialization
LegacyExceptionUtils.throwOnServiceError(initErrors);
// Disable shutter sounds (this will work unconditionally) for api2 clients
legacyCamera.disableShutterSound();
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(cameraId, info);
Camera.Parameters legacyParameters = null;
try {
legacyParameters = legacyCamera.getParameters();
} catch (RuntimeException e) {
throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, "Unable to get initial parameters: " + e.getMessage());
}
CameraCharacteristics characteristics = LegacyMetadataMapper.createCharacteristics(legacyParameters, info);
LegacyCameraDevice device = new LegacyCameraDevice(cameraId, legacyCamera, characteristics, threadCallbacks);
return new CameraDeviceUserShim(cameraId, device, characteristics, init, threadCallbacks);
}
use of android.hardware.Camera in project android_frameworks_base by AOSPA.
the class MediaRecorderTest method validateGetSurface.
private boolean validateGetSurface(boolean useSurface) {
Log.v(TAG, "validateGetSurface, useSurface=" + useSurface);
MediaRecorder recorder = new MediaRecorder();
Surface surface;
boolean success = true;
try {
/* initialization */
if (!useSurface) {
mCamera = Camera.open(CAMERA_ID);
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(352, 288);
parameters.set("orientation", "portrait");
mCamera.setParameters(parameters);
mCamera.unlock();
recorder.setCamera(mCamera);
mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
recorder.setPreviewDisplay(mSurfaceHolder.getSurface());
}
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
int videoSource = useSurface ? MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA;
recorder.setVideoSource(videoSource);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setOutputFile(MediaNames.RECORDED_SURFACE_3GP);
recorder.setVideoFrameRate(30);
recorder.setVideoSize(352, 288);
recorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
/* Test: getSurface() before prepare()
* should throw IllegalStateException
*/
try {
surface = recorder.getSurface();
throw new Exception("getSurface failed to throw IllegalStateException");
} catch (IllegalStateException e) {
// OK
}
recorder.prepare();
/* Test: getSurface() after prepare()
* should succeed for surface source
* should fail for camera source
*/
try {
surface = recorder.getSurface();
if (!useSurface) {
throw new Exception("getSurface failed to throw IllegalStateException");
}
} catch (IllegalStateException e) {
if (useSurface) {
throw new Exception("getSurface failed to throw IllegalStateException");
}
}
recorder.start();
/* Test: getSurface() after start()
* should succeed for surface source
* should fail for camera source
*/
try {
surface = recorder.getSurface();
if (!useSurface) {
throw new Exception("getSurface failed to throw IllegalStateException");
}
} catch (IllegalStateException e) {
if (useSurface) {
throw new Exception("getSurface failed to throw IllegalStateException");
}
}
try {
recorder.stop();
} catch (Exception e) {
// stop() could fail if the recording is empty, as we didn't render anything.
// ignore any failure in stop, we just want it stopped.
}
/* Test: getSurface() after stop()
* should throw IllegalStateException
*/
try {
surface = recorder.getSurface();
throw new Exception("getSurface failed to throw IllegalStateException");
} catch (IllegalStateException e) {
// OK
}
} catch (Exception e) {
// fail
success = false;
}
try {
if (mCamera != null) {
mCamera.lock();
mCamera.release();
mCamera = null;
}
recorder.release();
} catch (Exception e) {
success = false;
}
return success;
}
Aggregations