use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by ResurrectionRemix.
the class LegacyResultMapper method cachedConvertResultMetadata.
/**
* Generate capture result metadata from the legacy camera request.
*
* <p>This method caches and reuses the result from the previous call to this method if
* the {@code parameters} of the subsequent {@link LegacyRequest} passed to this method
* have not changed.</p>
*
* @param legacyRequest a non-{@code null} legacy request containing the latest parameters
* @param timestamp the timestamp to use for this result in nanoseconds.
*
* @return {@link CameraMetadataNative} object containing result metadata.
*/
public CameraMetadataNative cachedConvertResultMetadata(LegacyRequest legacyRequest, long timestamp) {
CameraMetadataNative result;
boolean cached;
/*
* Attempt to look up the result from the cache if the parameters haven't changed
*/
if (mCachedRequest != null && legacyRequest.parameters.same(mCachedRequest.parameters) && legacyRequest.captureRequest.equals(mCachedRequest.captureRequest)) {
result = new CameraMetadataNative(mCachedResult);
cached = true;
} else {
result = convertResultMetadata(legacyRequest);
cached = false;
// Always cache a *copy* of the metadata result,
// since api2's client side takes ownership of it after it receives a result
mCachedRequest = legacyRequest;
mCachedResult = new CameraMetadataNative(result);
}
/*
* Unconditionally set fields that change in every single frame
*/
{
// sensor.timestamp
result.set(SENSOR_TIMESTAMP, timestamp);
}
if (DEBUG) {
Log.v(TAG, "cachedConvertResultMetadata - cached? " + cached + " timestamp = " + timestamp);
Log.v(TAG, "----- beginning of result dump ------");
result.dumpToLog();
Log.v(TAG, "----- end of result dump ------");
}
return result;
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by ResurrectionRemix.
the class LegacyMetadataMapper method createRequestTemplate.
/**
* Create a request template
*
* @param c a non-{@code null} camera characteristics for this camera
* @param templateId a non-negative template ID
*
* @return a non-{@code null} request template
*
* @throws IllegalArgumentException if {@code templateId} was invalid
*
* @see android.hardware.camera2.CameraDevice#TEMPLATE_MANUAL
*/
public static CameraMetadataNative createRequestTemplate(CameraCharacteristics c, int templateId) {
if (!ArrayUtils.contains(sAllowedTemplates, templateId)) {
throw new IllegalArgumentException("templateId out of range");
}
CameraMetadataNative m = new CameraMetadataNative();
/*
* NOTE: If adding new code here and it needs to query the static info,
* query the camera characteristics, so we can reuse this for api2 code later
* to create our own templates in the framework
*/
/*
* control.*
*/
// control.awbMode
m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
// AWB is always unconditionally available in API1 devices
// control.aeAntibandingMode
m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO);
// control.aeExposureCompensation
m.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
// control.aeLock
m.set(CaptureRequest.CONTROL_AE_LOCK, false);
// control.aePrecaptureTrigger
m.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
// control.afTrigger
m.set(CaptureRequest.CONTROL_AF_TRIGGER, CONTROL_AF_TRIGGER_IDLE);
// control.awbMode
m.set(CaptureRequest.CONTROL_AWB_MODE, CONTROL_AWB_MODE_AUTO);
// control.awbLock
m.set(CaptureRequest.CONTROL_AWB_LOCK, false);
// control.aeRegions, control.awbRegions, control.afRegions
{
Rect activeArray = c.get(SENSOR_INFO_ACTIVE_ARRAY_SIZE);
MeteringRectangle[] activeRegions = new MeteringRectangle[] { new MeteringRectangle(/*x*/
0, /*y*/
0, /*width*/
activeArray.width() - 1, /*height*/
activeArray.height() - 1, /*weight*/
0) };
m.set(CaptureRequest.CONTROL_AE_REGIONS, activeRegions);
m.set(CaptureRequest.CONTROL_AWB_REGIONS, activeRegions);
m.set(CaptureRequest.CONTROL_AF_REGIONS, activeRegions);
}
// control.captureIntent
{
int captureIntent;
switch(templateId) {
case CameraDevice.TEMPLATE_PREVIEW:
captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
break;
case CameraDevice.TEMPLATE_STILL_CAPTURE:
captureIntent = CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
break;
case CameraDevice.TEMPLATE_RECORD:
captureIntent = CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
break;
default:
// Can't get anything else since it's guarded by the IAE check
throw new AssertionError("Impossible; keep in sync with sAllowedTemplates");
}
m.set(CaptureRequest.CONTROL_CAPTURE_INTENT, captureIntent);
}
// control.aeMode
m.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
// AE is always unconditionally available in API1 devices
// control.mode
m.set(CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
// control.afMode
{
Float minimumFocusDistance = c.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
int afMode;
if (minimumFocusDistance != null && minimumFocusDistance == LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS) {
// Cannot control auto-focus with fixed-focus cameras
afMode = CameraMetadata.CONTROL_AF_MODE_OFF;
} else {
// If a minimum focus distance is reported; the camera must have AF
afMode = CameraMetadata.CONTROL_AF_MODE_AUTO;
if (templateId == CameraDevice.TEMPLATE_RECORD || templateId == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO;
}
} else if (templateId == CameraDevice.TEMPLATE_PREVIEW || templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES), CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
}
}
}
if (DEBUG) {
Log.v(TAG, "createRequestTemplate (templateId=" + templateId + ")," + " afMode=" + afMode + ", minimumFocusDistance=" + minimumFocusDistance);
}
m.set(CaptureRequest.CONTROL_AF_MODE, afMode);
}
{
// control.aeTargetFpsRange
Range<Integer>[] availableFpsRange = c.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
// Pick FPS range with highest max value, tiebreak on higher min value
Range<Integer> bestRange = availableFpsRange[0];
for (Range<Integer> r : availableFpsRange) {
if (bestRange.getUpper() < r.getUpper()) {
bestRange = r;
} else if (bestRange.getUpper() == r.getUpper() && bestRange.getLower() < r.getLower()) {
bestRange = r;
}
}
m.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, bestRange);
}
// control.sceneMode -- DISABLED is always available
m.set(CaptureRequest.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
/*
* statistics.*
*/
// statistics.faceDetectMode
m.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, STATISTICS_FACE_DETECT_MODE_OFF);
/*
* flash.*
*/
// flash.mode
m.set(CaptureRequest.FLASH_MODE, FLASH_MODE_OFF);
/*
* noiseReduction.*
*/
if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_HIGH_QUALITY);
} else {
m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_FAST);
}
/*
* colorCorrection.*
*/
if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY);
} else {
m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE, COLOR_CORRECTION_ABERRATION_MODE_FAST);
}
/*
* lens.*
*/
// lens.focalLength
m.set(CaptureRequest.LENS_FOCAL_LENGTH, c.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)[0]);
/*
* jpeg.*
*/
// jpeg.thumbnailSize - set smallest non-zero size if possible
Size[] sizes = c.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES);
m.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, (sizes.length > 1) ? sizes[1] : sizes[0]);
// TODO: map other request template values
return m;
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by ResurrectionRemix.
the class CameraDeviceBinderTest method testCreateDefaultRequest.
@SmallTest
public void testCreateDefaultRequest() throws Exception {
CameraMetadataNative metadata = null;
assertTrue(metadata.isEmpty());
metadata = mCameraUser.createDefaultRequest(TEMPLATE_PREVIEW);
assertFalse(metadata.isEmpty());
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by ResurrectionRemix.
the class CameraDeviceBinderTest method createDefaultBuilder.
private CaptureRequest.Builder createDefaultBuilder(boolean needStream) throws Exception {
CameraMetadataNative metadata = null;
assertTrue(metadata.isEmpty());
metadata = mCameraUser.createDefaultRequest(TEMPLATE_PREVIEW);
assertFalse(metadata.isEmpty());
CaptureRequest.Builder request = new CaptureRequest.Builder(metadata, /*reprocess*/
false, CameraCaptureSession.SESSION_ID_NONE);
assertFalse(request.isEmpty());
assertFalse(metadata.isEmpty());
if (needStream) {
int streamId = mCameraUser.createStream(mOutputConfiguration);
assertEquals(0, streamId);
request.addTarget(mSurface);
}
return request;
}
use of android.hardware.camera2.impl.CameraMetadataNative in project android_frameworks_base by ResurrectionRemix.
the class CameraDeviceBinderTest method testCameraCharacteristics.
@SmallTest
public void testCameraCharacteristics() throws RemoteException {
CameraMetadataNative info = mUtils.getCameraService().getCameraCharacteristics(mCameraId);
assertFalse(info.isEmpty());
assertNotNull(info.get(CameraCharacteristics.SCALER_AVAILABLE_FORMATS));
}
Aggregations