use of android.hardware.camera2.params.Face in project platform_frameworks_base by android.
the class CameraMetadataNative method setFaceRectangles.
/**
* Convert Face Rectangles from managed side to native side as they have different definitions.
* <p>
* Managed side face rectangles are defined as: left, top, width, height.
* Native side face rectangles are defined as: left, top, right, bottom.
* The input face rectangle need to be converted to native side definition when set is called.
* </p>
*
* @param faceRects Input face rectangles.
* @return true if face rectangles can be set successfully. Otherwise, Let the caller
* (setBase) to handle it appropriately.
*/
private boolean setFaceRectangles(Rect[] faceRects) {
if (faceRects == null) {
return false;
}
Rect[] newFaceRects = new Rect[faceRects.length];
for (int i = 0; i < newFaceRects.length; i++) {
newFaceRects[i] = new Rect(faceRects[i].left, faceRects[i].top, faceRects[i].right + faceRects[i].left, faceRects[i].bottom + faceRects[i].top);
}
setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects);
return true;
}
use of android.hardware.camera2.params.Face in project platform_frameworks_base by android.
the class CameraMetadataNative method getFaces.
private Face[] getFaces() {
Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES);
Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES);
int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS);
int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS);
if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) {
return null;
}
if (faceDetectMode == null) {
Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE");
faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE;
} else {
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) {
return new Face[0];
}
if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
Log.w(TAG, "Unknown face detect mode: " + faceDetectMode);
return new Face[0];
}
}
// Face scores and rectangles are required by SIMPLE and FULL mode.
if (faceScores == null || faceRectangles == null) {
Log.w(TAG, "Expect face scores and rectangles to be non-null");
return new Face[0];
} else if (faceScores.length != faceRectangles.length) {
Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", faceScores.length, faceRectangles.length));
}
// To be safe, make number of faces is the minimal of all face info metadata length.
int numFaces = Math.min(faceScores.length, faceRectangles.length);
// Face id and landmarks are only required by FULL mode.
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
if (faceIds == null || faceLandmarks == null) {
Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + "fallback to SIMPLE mode");
faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE;
} else {
if (faceIds.length != numFaces || faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) {
Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + "match face number(%d)!", faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces));
}
// To be safe, make number of faces is the minimal of all face info metadata length.
numFaces = Math.min(numFaces, faceIds.length);
numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE);
}
}
ArrayList<Face> faceList = new ArrayList<Face>();
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
for (int i = 0; i < numFaces; i++) {
if (faceScores[i] <= Face.SCORE_MAX && faceScores[i] >= Face.SCORE_MIN) {
faceList.add(new Face(faceRectangles[i], faceScores[i]));
}
}
} else {
// CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL
for (int i = 0; i < numFaces; i++) {
if (faceScores[i] <= Face.SCORE_MAX && faceScores[i] >= Face.SCORE_MIN && faceIds[i] >= 0) {
Point leftEye = new Point(faceLandmarks[i * FACE_LANDMARK_SIZE], faceLandmarks[i * FACE_LANDMARK_SIZE + 1]);
Point rightEye = new Point(faceLandmarks[i * FACE_LANDMARK_SIZE + 2], faceLandmarks[i * FACE_LANDMARK_SIZE + 3]);
Point mouth = new Point(faceLandmarks[i * FACE_LANDMARK_SIZE + 4], faceLandmarks[i * FACE_LANDMARK_SIZE + 5]);
Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], leftEye, rightEye, mouth);
faceList.add(face);
}
}
}
Face[] faces = new Face[faceList.size()];
faceList.toArray(faces);
return faces;
}
use of android.hardware.camera2.params.Face in project android_frameworks_base by AOSPA.
the class CameraMetadataNative method getFaces.
private Face[] getFaces() {
Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES);
Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES);
int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS);
int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS);
if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) {
return null;
}
if (faceDetectMode == null) {
Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE");
faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE;
} else {
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) {
return new Face[0];
}
if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
Log.w(TAG, "Unknown face detect mode: " + faceDetectMode);
return new Face[0];
}
}
// Face scores and rectangles are required by SIMPLE and FULL mode.
if (faceScores == null || faceRectangles == null) {
Log.w(TAG, "Expect face scores and rectangles to be non-null");
return new Face[0];
} else if (faceScores.length != faceRectangles.length) {
Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", faceScores.length, faceRectangles.length));
}
// To be safe, make number of faces is the minimal of all face info metadata length.
int numFaces = Math.min(faceScores.length, faceRectangles.length);
// Face id and landmarks are only required by FULL mode.
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
if (faceIds == null || faceLandmarks == null) {
Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + "fallback to SIMPLE mode");
faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE;
} else {
if (faceIds.length != numFaces || faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) {
Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + "match face number(%d)!", faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces));
}
// To be safe, make number of faces is the minimal of all face info metadata length.
numFaces = Math.min(numFaces, faceIds.length);
numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE);
}
}
ArrayList<Face> faceList = new ArrayList<Face>();
if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
for (int i = 0; i < numFaces; i++) {
if (faceScores[i] <= Face.SCORE_MAX && faceScores[i] >= Face.SCORE_MIN) {
faceList.add(new Face(faceRectangles[i], faceScores[i]));
}
}
} else {
// CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL
for (int i = 0; i < numFaces; i++) {
if (faceScores[i] <= Face.SCORE_MAX && faceScores[i] >= Face.SCORE_MIN && faceIds[i] >= 0) {
Point leftEye = new Point(faceLandmarks[i * FACE_LANDMARK_SIZE], faceLandmarks[i * FACE_LANDMARK_SIZE + 1]);
Point rightEye = new Point(faceLandmarks[i * FACE_LANDMARK_SIZE + 2], faceLandmarks[i * FACE_LANDMARK_SIZE + 3]);
Point mouth = new Point(faceLandmarks[i * FACE_LANDMARK_SIZE + 4], faceLandmarks[i * FACE_LANDMARK_SIZE + 5]);
Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], leftEye, rightEye, mouth);
faceList.add(face);
}
}
}
Face[] faces = new Face[faceList.size()];
faceList.toArray(faces);
return faces;
}
use of android.hardware.camera2.params.Face in project android_frameworks_base by DirtyUnicorns.
the class CameraMetadataNative method setFaces.
private boolean setFaces(Face[] faces) {
if (faces == null) {
return false;
}
int numFaces = faces.length;
// Detect if all faces are SIMPLE or not; count # of valid faces
boolean fullMode = true;
for (Face face : faces) {
if (face == null) {
numFaces--;
Log.w(TAG, "setFaces - null face detected, skipping");
continue;
}
if (face.getId() == Face.ID_UNSUPPORTED) {
fullMode = false;
}
}
Rect[] faceRectangles = new Rect[numFaces];
byte[] faceScores = new byte[numFaces];
int[] faceIds = null;
int[] faceLandmarks = null;
if (fullMode) {
faceIds = new int[numFaces];
faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE];
}
int i = 0;
for (Face face : faces) {
if (face == null) {
continue;
}
faceRectangles[i] = face.getBounds();
faceScores[i] = (byte) face.getScore();
if (fullMode) {
faceIds[i] = face.getId();
int j = 0;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x;
faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y;
}
i++;
}
set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles);
set(CaptureResult.STATISTICS_FACE_IDS, faceIds);
set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks);
set(CaptureResult.STATISTICS_FACE_SCORES, faceScores);
return true;
}
use of android.hardware.camera2.params.Face in project android_frameworks_base by DirtyUnicorns.
the class CameraMetadataTest method testReadWriteOverride.
@SmallTest
public void testReadWriteOverride() {
//
// android.scaler.availableFormats (int x n array)
//
int[] availableFormats = new int[] { // RAW_SENSOR
0x20, // YV12
0x32315659, // YCrCb_420_SP
0x11, // ImageFormat.JPEG
0x100, // IMPLEMENTATION_DEFINED
0x22, // YCbCr_420_888
0x23 };
int[] expectedIntValues = new int[] { // RAW_SENSOR
0x20, // YV12
0x32315659, // YCrCb_420_SP
0x11, // BLOB
0x21, // IMPLEMENTATION_DEFINED
0x22, // YCbCr_420_888
0x23 };
int availableFormatTag = CameraMetadataNative.getTag("android.scaler.availableFormats");
Key<int[]> formatKey = CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey();
validateArrayMetadataReadWriteOverride(formatKey, availableFormats, expectedIntValues, availableFormatTag);
//
// android.statistics.faces (Face x n array)
//
int[] expectedFaceIds = new int[] { 1, 2, 3, 4, 5 };
byte[] expectedFaceScores = new byte[] { 10, 20, 30, 40, 50 };
int numFaces = expectedFaceIds.length;
Rect[] expectedRects = new Rect[numFaces];
for (int i = 0; i < numFaces; i++) {
expectedRects[i] = new Rect(i * 4 + 1, i * 4 + 2, i * 4 + 3, i * 4 + 4);
}
int[] expectedFaceLM = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 };
Point[] expectedFaceLMPoints = new Point[numFaces * 3];
for (int i = 0; i < numFaces; i++) {
expectedFaceLMPoints[i * 3] = new Point(expectedFaceLM[i * 6], expectedFaceLM[i * 6 + 1]);
expectedFaceLMPoints[i * 3 + 1] = new Point(expectedFaceLM[i * 6 + 2], expectedFaceLM[i * 6 + 3]);
expectedFaceLMPoints[i * 3 + 2] = new Point(expectedFaceLM[i * 6 + 4], expectedFaceLM[i * 6 + 5]);
}
/**
* Read - FACE_DETECT_MODE == FULL
*/
mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL);
mMetadata.set(CaptureResult.STATISTICS_FACE_IDS, expectedFaceIds);
mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
mMetadata.set(CaptureResult.STATISTICS_FACE_LANDMARKS, expectedFaceLM);
Face[] resultFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
assertEquals(numFaces, resultFaces.length);
for (int i = 0; i < numFaces; i++) {
assertEquals(expectedFaceIds[i], resultFaces[i].getId());
assertEquals(expectedFaceScores[i], resultFaces[i].getScore());
assertEquals(expectedRects[i], resultFaces[i].getBounds());
assertEquals(expectedFaceLMPoints[i * 3], resultFaces[i].getLeftEyePosition());
assertEquals(expectedFaceLMPoints[i * 3 + 1], resultFaces[i].getRightEyePosition());
assertEquals(expectedFaceLMPoints[i * 3 + 2], resultFaces[i].getMouthPosition());
}
/**
* Read - FACE_DETECT_MODE == SIMPLE
*/
mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE);
mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
Face[] resultSimpleFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
assertEquals(numFaces, resultSimpleFaces.length);
for (int i = 0; i < numFaces; i++) {
assertEquals(Face.ID_UNSUPPORTED, resultSimpleFaces[i].getId());
assertEquals(expectedFaceScores[i], resultSimpleFaces[i].getScore());
assertEquals(expectedRects[i], resultSimpleFaces[i].getBounds());
assertNull(resultSimpleFaces[i].getLeftEyePosition());
assertNull(resultSimpleFaces[i].getRightEyePosition());
assertNull(resultSimpleFaces[i].getMouthPosition());
}
/**
* Read/Write TonemapCurve
*/
float[] red = new float[] { 0.0f, 0.0f, 1.0f, 1.0f };
float[] green = new float[] { 0.0f, 1.0f, 1.0f, 0.0f };
float[] blue = new float[] { 0.0000f, 0.0000f, 0.0667f, 0.2920f, 0.1333f, 0.4002f, 0.2000f, 0.4812f, 0.2667f, 0.5484f, 0.3333f, 0.6069f, 0.4000f, 0.6594f, 0.4667f, 0.7072f, 0.5333f, 0.7515f, 0.6000f, 0.7928f, 0.6667f, 0.8317f, 0.7333f, 0.8685f, 0.8000f, 0.9035f, 0.8667f, 0.9370f, 0.9333f, 0.9691f, 1.0000f, 1.0000f };
TonemapCurve tcIn = new TonemapCurve(red, green, blue);
mMetadata.set(CaptureResult.TONEMAP_CURVE, tcIn);
float[] redOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_RED);
float[] greenOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_GREEN);
float[] blueOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_BLUE);
assertArrayEquals(red, redOut);
assertArrayEquals(green, greenOut);
assertArrayEquals(blue, blueOut);
TonemapCurve tcOut = mMetadata.get(CaptureResult.TONEMAP_CURVE);
assertEquals(tcIn, tcOut);
mMetadata.set(CaptureResult.TONEMAP_CURVE_GREEN, null);
// If any of channel has null curve, return a null TonemapCurve
assertNull(mMetadata.get(CaptureResult.TONEMAP_CURVE));
}
Aggregations