Search in sources :

Example 1 with Face

use of android.hardware.camera2.params.Face in project platform_frameworks_base by android.

the class CameraMetadataTest method testReadWriteOverride.

@SmallTest
public void testReadWriteOverride() {
    //
    // android.scaler.availableFormats (int x n array)
    //
    int[] availableFormats = new int[] { // RAW_SENSOR
    0x20, // YV12
    0x32315659, // YCrCb_420_SP
    0x11, // ImageFormat.JPEG
    0x100, // IMPLEMENTATION_DEFINED
    0x22, // YCbCr_420_888
    0x23 };
    int[] expectedIntValues = new int[] { // RAW_SENSOR
    0x20, // YV12
    0x32315659, // YCrCb_420_SP
    0x11, // BLOB
    0x21, // IMPLEMENTATION_DEFINED
    0x22, // YCbCr_420_888
    0x23 };
    int availableFormatTag = CameraMetadataNative.getTag("android.scaler.availableFormats");
    Key<int[]> formatKey = CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey();
    validateArrayMetadataReadWriteOverride(formatKey, availableFormats, expectedIntValues, availableFormatTag);
    //
    // android.statistics.faces (Face x n array)
    //
    int[] expectedFaceIds = new int[] { 1, 2, 3, 4, 5 };
    byte[] expectedFaceScores = new byte[] { 10, 20, 30, 40, 50 };
    int numFaces = expectedFaceIds.length;
    Rect[] expectedRects = new Rect[numFaces];
    for (int i = 0; i < numFaces; i++) {
        expectedRects[i] = new Rect(i * 4 + 1, i * 4 + 2, i * 4 + 3, i * 4 + 4);
    }
    int[] expectedFaceLM = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 };
    Point[] expectedFaceLMPoints = new Point[numFaces * 3];
    for (int i = 0; i < numFaces; i++) {
        expectedFaceLMPoints[i * 3] = new Point(expectedFaceLM[i * 6], expectedFaceLM[i * 6 + 1]);
        expectedFaceLMPoints[i * 3 + 1] = new Point(expectedFaceLM[i * 6 + 2], expectedFaceLM[i * 6 + 3]);
        expectedFaceLMPoints[i * 3 + 2] = new Point(expectedFaceLM[i * 6 + 4], expectedFaceLM[i * 6 + 5]);
    }
    /**
         * Read - FACE_DETECT_MODE == FULL
         */
    mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL);
    mMetadata.set(CaptureResult.STATISTICS_FACE_IDS, expectedFaceIds);
    mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
    mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
    mMetadata.set(CaptureResult.STATISTICS_FACE_LANDMARKS, expectedFaceLM);
    Face[] resultFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
    assertEquals(numFaces, resultFaces.length);
    for (int i = 0; i < numFaces; i++) {
        assertEquals(expectedFaceIds[i], resultFaces[i].getId());
        assertEquals(expectedFaceScores[i], resultFaces[i].getScore());
        assertEquals(expectedRects[i], resultFaces[i].getBounds());
        assertEquals(expectedFaceLMPoints[i * 3], resultFaces[i].getLeftEyePosition());
        assertEquals(expectedFaceLMPoints[i * 3 + 1], resultFaces[i].getRightEyePosition());
        assertEquals(expectedFaceLMPoints[i * 3 + 2], resultFaces[i].getMouthPosition());
    }
    /**
         * Read - FACE_DETECT_MODE == SIMPLE
         */
    mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE);
    mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
    mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
    Face[] resultSimpleFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
    assertEquals(numFaces, resultSimpleFaces.length);
    for (int i = 0; i < numFaces; i++) {
        assertEquals(Face.ID_UNSUPPORTED, resultSimpleFaces[i].getId());
        assertEquals(expectedFaceScores[i], resultSimpleFaces[i].getScore());
        assertEquals(expectedRects[i], resultSimpleFaces[i].getBounds());
        assertNull(resultSimpleFaces[i].getLeftEyePosition());
        assertNull(resultSimpleFaces[i].getRightEyePosition());
        assertNull(resultSimpleFaces[i].getMouthPosition());
    }
    /**
         * Read/Write TonemapCurve
         */
    float[] red = new float[] { 0.0f, 0.0f, 1.0f, 1.0f };
    float[] green = new float[] { 0.0f, 1.0f, 1.0f, 0.0f };
    float[] blue = new float[] { 0.0000f, 0.0000f, 0.0667f, 0.2920f, 0.1333f, 0.4002f, 0.2000f, 0.4812f, 0.2667f, 0.5484f, 0.3333f, 0.6069f, 0.4000f, 0.6594f, 0.4667f, 0.7072f, 0.5333f, 0.7515f, 0.6000f, 0.7928f, 0.6667f, 0.8317f, 0.7333f, 0.8685f, 0.8000f, 0.9035f, 0.8667f, 0.9370f, 0.9333f, 0.9691f, 1.0000f, 1.0000f };
    TonemapCurve tcIn = new TonemapCurve(red, green, blue);
    mMetadata.set(CaptureResult.TONEMAP_CURVE, tcIn);
    float[] redOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_RED);
    float[] greenOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_GREEN);
    float[] blueOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_BLUE);
    assertArrayEquals(red, redOut);
    assertArrayEquals(green, greenOut);
    assertArrayEquals(blue, blueOut);
    TonemapCurve tcOut = mMetadata.get(CaptureResult.TONEMAP_CURVE);
    assertEquals(tcIn, tcOut);
    mMetadata.set(CaptureResult.TONEMAP_CURVE_GREEN, null);
    // If any of channel has null curve, return a null TonemapCurve
    assertNull(mMetadata.get(CaptureResult.TONEMAP_CURVE));
}
Also used : TonemapCurve(android.hardware.camera2.params.TonemapCurve) Rect(android.graphics.Rect) Point(android.graphics.Point) Face(android.hardware.camera2.params.Face) Point(android.graphics.Point) SmallTest(android.test.suitebuilder.annotation.SmallTest)

Example 2 with Face

use of android.hardware.camera2.params.Face in project platform_frameworks_base by android.

the class CameraMetadataNative method setFaces.

private boolean setFaces(Face[] faces) {
    if (faces == null) {
        return false;
    }
    int numFaces = faces.length;
    // Detect if all faces are SIMPLE or not; count # of valid faces
    boolean fullMode = true;
    for (Face face : faces) {
        if (face == null) {
            numFaces--;
            Log.w(TAG, "setFaces - null face detected, skipping");
            continue;
        }
        if (face.getId() == Face.ID_UNSUPPORTED) {
            fullMode = false;
        }
    }
    Rect[] faceRectangles = new Rect[numFaces];
    byte[] faceScores = new byte[numFaces];
    int[] faceIds = null;
    int[] faceLandmarks = null;
    if (fullMode) {
        faceIds = new int[numFaces];
        faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE];
    }
    int i = 0;
    for (Face face : faces) {
        if (face == null) {
            continue;
        }
        faceRectangles[i] = face.getBounds();
        faceScores[i] = (byte) face.getScore();
        if (fullMode) {
            faceIds[i] = face.getId();
            int j = 0;
            faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x;
            faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y;
            faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x;
            faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y;
            faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x;
            faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y;
        }
        i++;
    }
    set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles);
    set(CaptureResult.STATISTICS_FACE_IDS, faceIds);
    set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks);
    set(CaptureResult.STATISTICS_FACE_SCORES, faceScores);
    return true;
}
Also used : Rect(android.graphics.Rect) MarshalQueryableRect(android.hardware.camera2.marshal.impl.MarshalQueryableRect) Face(android.hardware.camera2.params.Face) Point(android.graphics.Point)

Example 3 with Face

use of android.hardware.camera2.params.Face in project platform_frameworks_base by android.

the class CameraMetadataNative method getFaceRectangles.

// Face rectangles are defined as (left, top, right, bottom) instead of
// (left, top, width, height) at the native level, so the normal Rect
// conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo
// that conversion here for just the faces.
private Rect[] getFaceRectangles() {
    Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES);
    if (faceRectangles == null)
        return null;
    Rect[] fixedFaceRectangles = new Rect[faceRectangles.length];
    for (int i = 0; i < faceRectangles.length; i++) {
        fixedFaceRectangles[i] = new Rect(faceRectangles[i].left, faceRectangles[i].top, faceRectangles[i].right - faceRectangles[i].left, faceRectangles[i].bottom - faceRectangles[i].top);
    }
    return fixedFaceRectangles;
}
Also used : Rect(android.graphics.Rect) MarshalQueryableRect(android.hardware.camera2.marshal.impl.MarshalQueryableRect) Point(android.graphics.Point)

Example 4 with Face

use of android.hardware.camera2.params.Face in project platform_frameworks_base by android.

the class LegacyFaceDetectMapper method mapResultFaces.

/**
     * Update the {@code result} camera metadata map with the new value for the
     * {@code statistics.faces} and {@code statistics.faceDetectMode}.
     *
     * <p>Face detect callbacks are processed in the background, and each call to
     * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p>
     *
     * <p>If the scene mode was set to {@code FACE_PRIORITY} but face detection is disabled,
     * the camera will still run face detection in the background, but no faces will be reported
     * in the capture result.</p>
     *
     * @param result a non-{@code null} result
     * @param legacyRequest a non-{@code null} request (read-only)
     */
public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) {
    checkNotNull(result, "result must not be null");
    checkNotNull(legacyRequest, "legacyRequest must not be null");
    Camera.Face[] faces, previousFaces;
    int fdMode;
    boolean fdScenePriority;
    synchronized (mLock) {
        fdMode = mFaceDetectReporting ? STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF;
        if (mFaceDetectReporting) {
            faces = mFaces;
        } else {
            faces = null;
        }
        fdScenePriority = mFaceDetectScenePriority;
        previousFaces = mFacesPrev;
        mFacesPrev = faces;
    }
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;
    Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
    ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray, request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
    List<Face> convertedFaces = new ArrayList<>();
    if (faces != null) {
        for (Camera.Face face : faces) {
            if (face != null) {
                convertedFaces.add(ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData));
            } else {
                Log.w(TAG, "mapResultFaces - read NULL face from camera1 device");
            }
        }
    }
    if (DEBUG && previousFaces != faces) {
        // Log only in verbose and IF the faces changed
        Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces));
    }
    result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0]));
    result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode);
    // Override scene mode with FACE_PRIORITY if the request was using FACE_PRIORITY
    if (fdScenePriority) {
        result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_FACE_PRIORITY);
    }
}
Also used : Rect(android.graphics.Rect) Size(android.util.Size) ArrayList(java.util.ArrayList) CameraCharacteristics(android.hardware.camera2.CameraCharacteristics) ZoomData(android.hardware.camera2.legacy.ParameterUtils.ZoomData) CaptureRequest(android.hardware.camera2.CaptureRequest) Camera(android.hardware.Camera) Face(android.hardware.camera2.params.Face)

Example 5 with Face

use of android.hardware.camera2.params.Face in project platform_frameworks_base by android.

the class ParameterUtils method convertFaceFromLegacy.

/**
     * Convert an api1 face into an active-array based api2 face.
     *
     * <p>Out-of-ranges scores and ids will be clipped to be within range (with a warning).</p>
     *
     * @param face a non-{@code null} api1 face
     * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
     * @param zoomData the calculated zoom data corresponding to this request
     *
     * @return a non-{@code null} api2 face
     *
     * @throws NullPointerException if the {@code face} was {@code null}
     */
public static Face convertFaceFromLegacy(Camera.Face face, Rect activeArray, ZoomData zoomData) {
    checkNotNull(face, "face must not be null");
    Face api2Face;
    Camera.Area fakeArea = new Camera.Area(face.rect, /*weight*/
    1);
    WeightedRectangle faceRect = convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, fakeArea);
    Point leftEye = face.leftEye, rightEye = face.rightEye, mouth = face.mouth;
    if (leftEye != null && rightEye != null && mouth != null && leftEye.x != -2000 && leftEye.y != -2000 && rightEye.x != -2000 && rightEye.y != -2000 && mouth.x != -2000 && mouth.y != -2000) {
        leftEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData, leftEye, /*usePreviewCrop*/
        true);
        rightEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData, leftEye, /*usePreviewCrop*/
        true);
        mouth = convertCameraPointToActiveArrayPoint(activeArray, zoomData, leftEye, /*usePreviewCrop*/
        true);
        api2Face = faceRect.toFace(face.id, leftEye, rightEye, mouth);
    } else {
        api2Face = faceRect.toFace();
    }
    return api2Face;
}
Also used : Area(android.hardware.Camera.Area) Area(android.hardware.Camera.Area) Camera(android.hardware.Camera) Point(android.graphics.Point) Face(android.hardware.camera2.params.Face)

Aggregations

Point (android.graphics.Point)30 Rect (android.graphics.Rect)30 Face (android.hardware.camera2.params.Face)25 MarshalQueryableRect (android.hardware.camera2.marshal.impl.MarshalQueryableRect)20 Camera (android.hardware.Camera)10 ArrayList (java.util.ArrayList)10 Area (android.hardware.Camera.Area)5 CameraCharacteristics (android.hardware.camera2.CameraCharacteristics)5 CaptureRequest (android.hardware.camera2.CaptureRequest)5 ZoomData (android.hardware.camera2.legacy.ParameterUtils.ZoomData)5 MarshalQueryableNativeByteToInteger (android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger)5 TonemapCurve (android.hardware.camera2.params.TonemapCurve)5 SmallTest (android.test.suitebuilder.annotation.SmallTest)5 Size (android.util.Size)5