Search in sources :

Example 1 with TonemapCurve

use of android.hardware.camera2.params.TonemapCurve in project platform_frameworks_base by android.

the class CameraMetadataTest method testReadWriteOverride.

@SmallTest
public void testReadWriteOverride() {
    //
    // android.scaler.availableFormats (int x n array)
    //
    int[] availableFormats = new int[] { // RAW_SENSOR
    0x20, // YV12
    0x32315659, // YCrCb_420_SP
    0x11, // ImageFormat.JPEG
    0x100, // IMPLEMENTATION_DEFINED
    0x22, // YCbCr_420_888
    0x23 };
    int[] expectedIntValues = new int[] { // RAW_SENSOR
    0x20, // YV12
    0x32315659, // YCrCb_420_SP
    0x11, // BLOB
    0x21, // IMPLEMENTATION_DEFINED
    0x22, // YCbCr_420_888
    0x23 };
    int availableFormatTag = CameraMetadataNative.getTag("android.scaler.availableFormats");
    Key<int[]> formatKey = CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey();
    validateArrayMetadataReadWriteOverride(formatKey, availableFormats, expectedIntValues, availableFormatTag);
    //
    // android.statistics.faces (Face x n array)
    //
    int[] expectedFaceIds = new int[] { 1, 2, 3, 4, 5 };
    byte[] expectedFaceScores = new byte[] { 10, 20, 30, 40, 50 };
    int numFaces = expectedFaceIds.length;
    Rect[] expectedRects = new Rect[numFaces];
    for (int i = 0; i < numFaces; i++) {
        expectedRects[i] = new Rect(i * 4 + 1, i * 4 + 2, i * 4 + 3, i * 4 + 4);
    }
    int[] expectedFaceLM = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 };
    Point[] expectedFaceLMPoints = new Point[numFaces * 3];
    for (int i = 0; i < numFaces; i++) {
        expectedFaceLMPoints[i * 3] = new Point(expectedFaceLM[i * 6], expectedFaceLM[i * 6 + 1]);
        expectedFaceLMPoints[i * 3 + 1] = new Point(expectedFaceLM[i * 6 + 2], expectedFaceLM[i * 6 + 3]);
        expectedFaceLMPoints[i * 3 + 2] = new Point(expectedFaceLM[i * 6 + 4], expectedFaceLM[i * 6 + 5]);
    }
    /**
         * Read - FACE_DETECT_MODE == FULL
         */
    mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL);
    mMetadata.set(CaptureResult.STATISTICS_FACE_IDS, expectedFaceIds);
    mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
    mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
    mMetadata.set(CaptureResult.STATISTICS_FACE_LANDMARKS, expectedFaceLM);
    Face[] resultFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
    assertEquals(numFaces, resultFaces.length);
    for (int i = 0; i < numFaces; i++) {
        assertEquals(expectedFaceIds[i], resultFaces[i].getId());
        assertEquals(expectedFaceScores[i], resultFaces[i].getScore());
        assertEquals(expectedRects[i], resultFaces[i].getBounds());
        assertEquals(expectedFaceLMPoints[i * 3], resultFaces[i].getLeftEyePosition());
        assertEquals(expectedFaceLMPoints[i * 3 + 1], resultFaces[i].getRightEyePosition());
        assertEquals(expectedFaceLMPoints[i * 3 + 2], resultFaces[i].getMouthPosition());
    }
    /**
         * Read - FACE_DETECT_MODE == SIMPLE
         */
    mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE);
    mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
    mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
    Face[] resultSimpleFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
    assertEquals(numFaces, resultSimpleFaces.length);
    for (int i = 0; i < numFaces; i++) {
        assertEquals(Face.ID_UNSUPPORTED, resultSimpleFaces[i].getId());
        assertEquals(expectedFaceScores[i], resultSimpleFaces[i].getScore());
        assertEquals(expectedRects[i], resultSimpleFaces[i].getBounds());
        assertNull(resultSimpleFaces[i].getLeftEyePosition());
        assertNull(resultSimpleFaces[i].getRightEyePosition());
        assertNull(resultSimpleFaces[i].getMouthPosition());
    }
    /**
         * Read/Write TonemapCurve
         */
    float[] red = new float[] { 0.0f, 0.0f, 1.0f, 1.0f };
    float[] green = new float[] { 0.0f, 1.0f, 1.0f, 0.0f };
    float[] blue = new float[] { 0.0000f, 0.0000f, 0.0667f, 0.2920f, 0.1333f, 0.4002f, 0.2000f, 0.4812f, 0.2667f, 0.5484f, 0.3333f, 0.6069f, 0.4000f, 0.6594f, 0.4667f, 0.7072f, 0.5333f, 0.7515f, 0.6000f, 0.7928f, 0.6667f, 0.8317f, 0.7333f, 0.8685f, 0.8000f, 0.9035f, 0.8667f, 0.9370f, 0.9333f, 0.9691f, 1.0000f, 1.0000f };
    TonemapCurve tcIn = new TonemapCurve(red, green, blue);
    mMetadata.set(CaptureResult.TONEMAP_CURVE, tcIn);
    float[] redOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_RED);
    float[] greenOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_GREEN);
    float[] blueOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_BLUE);
    assertArrayEquals(red, redOut);
    assertArrayEquals(green, greenOut);
    assertArrayEquals(blue, blueOut);
    TonemapCurve tcOut = mMetadata.get(CaptureResult.TONEMAP_CURVE);
    assertEquals(tcIn, tcOut);
    mMetadata.set(CaptureResult.TONEMAP_CURVE_GREEN, null);
    // If any of channel has null curve, return a null TonemapCurve
    assertNull(mMetadata.get(CaptureResult.TONEMAP_CURVE));
}
Also used : TonemapCurve(android.hardware.camera2.params.TonemapCurve) Rect(android.graphics.Rect) Point(android.graphics.Point) Face(android.hardware.camera2.params.Face) Point(android.graphics.Point) SmallTest(android.test.suitebuilder.annotation.SmallTest)

Example 2 with TonemapCurve

use of android.hardware.camera2.params.TonemapCurve in project android_frameworks_base by DirtyUnicorns.

the class CameraMetadataNative method getTonemapCurve.

private <T> TonemapCurve getTonemapCurve() {
    float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED);
    float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN);
    float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE);
    if (areValuesAllNull(red, green, blue)) {
        return null;
    }
    if (red == null || green == null || blue == null) {
        Log.w(TAG, "getTonemapCurve - missing tone curve components");
        return null;
    }
    TonemapCurve tc = new TonemapCurve(red, green, blue);
    return tc;
}
Also used : TonemapCurve(android.hardware.camera2.params.TonemapCurve)

Example 3 with TonemapCurve

use of android.hardware.camera2.params.TonemapCurve in project android_frameworks_base by AOSPA.

the class CameraMetadataTest method testReadWriteOverride.

@SmallTest
public void testReadWriteOverride() {
    //
    // android.scaler.availableFormats (int x n array)
    //
    int[] availableFormats = new int[] { // RAW_SENSOR
    0x20, // YV12
    0x32315659, // YCrCb_420_SP
    0x11, // ImageFormat.JPEG
    0x100, // IMPLEMENTATION_DEFINED
    0x22, // YCbCr_420_888
    0x23 };
    int[] expectedIntValues = new int[] { // RAW_SENSOR
    0x20, // YV12
    0x32315659, // YCrCb_420_SP
    0x11, // BLOB
    0x21, // IMPLEMENTATION_DEFINED
    0x22, // YCbCr_420_888
    0x23 };
    int availableFormatTag = CameraMetadataNative.getTag("android.scaler.availableFormats");
    Key<int[]> formatKey = CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey();
    validateArrayMetadataReadWriteOverride(formatKey, availableFormats, expectedIntValues, availableFormatTag);
    //
    // android.statistics.faces (Face x n array)
    //
    int[] expectedFaceIds = new int[] { 1, 2, 3, 4, 5 };
    byte[] expectedFaceScores = new byte[] { 10, 20, 30, 40, 50 };
    int numFaces = expectedFaceIds.length;
    Rect[] expectedRects = new Rect[numFaces];
    for (int i = 0; i < numFaces; i++) {
        expectedRects[i] = new Rect(i * 4 + 1, i * 4 + 2, i * 4 + 3, i * 4 + 4);
    }
    int[] expectedFaceLM = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 };
    Point[] expectedFaceLMPoints = new Point[numFaces * 3];
    for (int i = 0; i < numFaces; i++) {
        expectedFaceLMPoints[i * 3] = new Point(expectedFaceLM[i * 6], expectedFaceLM[i * 6 + 1]);
        expectedFaceLMPoints[i * 3 + 1] = new Point(expectedFaceLM[i * 6 + 2], expectedFaceLM[i * 6 + 3]);
        expectedFaceLMPoints[i * 3 + 2] = new Point(expectedFaceLM[i * 6 + 4], expectedFaceLM[i * 6 + 5]);
    }
    /**
         * Read - FACE_DETECT_MODE == FULL
         */
    mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL);
    mMetadata.set(CaptureResult.STATISTICS_FACE_IDS, expectedFaceIds);
    mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
    mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
    mMetadata.set(CaptureResult.STATISTICS_FACE_LANDMARKS, expectedFaceLM);
    Face[] resultFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
    assertEquals(numFaces, resultFaces.length);
    for (int i = 0; i < numFaces; i++) {
        assertEquals(expectedFaceIds[i], resultFaces[i].getId());
        assertEquals(expectedFaceScores[i], resultFaces[i].getScore());
        assertEquals(expectedRects[i], resultFaces[i].getBounds());
        assertEquals(expectedFaceLMPoints[i * 3], resultFaces[i].getLeftEyePosition());
        assertEquals(expectedFaceLMPoints[i * 3 + 1], resultFaces[i].getRightEyePosition());
        assertEquals(expectedFaceLMPoints[i * 3 + 2], resultFaces[i].getMouthPosition());
    }
    /**
         * Read - FACE_DETECT_MODE == SIMPLE
         */
    mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE);
    mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
    mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
    Face[] resultSimpleFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
    assertEquals(numFaces, resultSimpleFaces.length);
    for (int i = 0; i < numFaces; i++) {
        assertEquals(Face.ID_UNSUPPORTED, resultSimpleFaces[i].getId());
        assertEquals(expectedFaceScores[i], resultSimpleFaces[i].getScore());
        assertEquals(expectedRects[i], resultSimpleFaces[i].getBounds());
        assertNull(resultSimpleFaces[i].getLeftEyePosition());
        assertNull(resultSimpleFaces[i].getRightEyePosition());
        assertNull(resultSimpleFaces[i].getMouthPosition());
    }
    /**
         * Read/Write TonemapCurve
         */
    float[] red = new float[] { 0.0f, 0.0f, 1.0f, 1.0f };
    float[] green = new float[] { 0.0f, 1.0f, 1.0f, 0.0f };
    float[] blue = new float[] { 0.0000f, 0.0000f, 0.0667f, 0.2920f, 0.1333f, 0.4002f, 0.2000f, 0.4812f, 0.2667f, 0.5484f, 0.3333f, 0.6069f, 0.4000f, 0.6594f, 0.4667f, 0.7072f, 0.5333f, 0.7515f, 0.6000f, 0.7928f, 0.6667f, 0.8317f, 0.7333f, 0.8685f, 0.8000f, 0.9035f, 0.8667f, 0.9370f, 0.9333f, 0.9691f, 1.0000f, 1.0000f };
    TonemapCurve tcIn = new TonemapCurve(red, green, blue);
    mMetadata.set(CaptureResult.TONEMAP_CURVE, tcIn);
    float[] redOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_RED);
    float[] greenOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_GREEN);
    float[] blueOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_BLUE);
    assertArrayEquals(red, redOut);
    assertArrayEquals(green, greenOut);
    assertArrayEquals(blue, blueOut);
    TonemapCurve tcOut = mMetadata.get(CaptureResult.TONEMAP_CURVE);
    assertEquals(tcIn, tcOut);
    mMetadata.set(CaptureResult.TONEMAP_CURVE_GREEN, null);
    // If any of channel has null curve, return a null TonemapCurve
    assertNull(mMetadata.get(CaptureResult.TONEMAP_CURVE));
}
Also used : TonemapCurve(android.hardware.camera2.params.TonemapCurve) Rect(android.graphics.Rect) Point(android.graphics.Point) Face(android.hardware.camera2.params.Face) Point(android.graphics.Point) SmallTest(android.test.suitebuilder.annotation.SmallTest)

Example 4 with TonemapCurve

use of android.hardware.camera2.params.TonemapCurve in project android_frameworks_base by ResurrectionRemix.

the class CameraMetadataTest method testReadWriteOverride.

@SmallTest
public void testReadWriteOverride() {
    //
    // android.scaler.availableFormats (int x n array)
    //
    int[] availableFormats = new int[] { // RAW_SENSOR
    0x20, // YV12
    0x32315659, // YCrCb_420_SP
    0x11, // ImageFormat.JPEG
    0x100, // IMPLEMENTATION_DEFINED
    0x22, // YCbCr_420_888
    0x23 };
    int[] expectedIntValues = new int[] { // RAW_SENSOR
    0x20, // YV12
    0x32315659, // YCrCb_420_SP
    0x11, // BLOB
    0x21, // IMPLEMENTATION_DEFINED
    0x22, // YCbCr_420_888
    0x23 };
    int availableFormatTag = CameraMetadataNative.getTag("android.scaler.availableFormats");
    Key<int[]> formatKey = CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey();
    validateArrayMetadataReadWriteOverride(formatKey, availableFormats, expectedIntValues, availableFormatTag);
    //
    // android.statistics.faces (Face x n array)
    //
    int[] expectedFaceIds = new int[] { 1, 2, 3, 4, 5 };
    byte[] expectedFaceScores = new byte[] { 10, 20, 30, 40, 50 };
    int numFaces = expectedFaceIds.length;
    Rect[] expectedRects = new Rect[numFaces];
    for (int i = 0; i < numFaces; i++) {
        expectedRects[i] = new Rect(i * 4 + 1, i * 4 + 2, i * 4 + 3, i * 4 + 4);
    }
    int[] expectedFaceLM = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 };
    Point[] expectedFaceLMPoints = new Point[numFaces * 3];
    for (int i = 0; i < numFaces; i++) {
        expectedFaceLMPoints[i * 3] = new Point(expectedFaceLM[i * 6], expectedFaceLM[i * 6 + 1]);
        expectedFaceLMPoints[i * 3 + 1] = new Point(expectedFaceLM[i * 6 + 2], expectedFaceLM[i * 6 + 3]);
        expectedFaceLMPoints[i * 3 + 2] = new Point(expectedFaceLM[i * 6 + 4], expectedFaceLM[i * 6 + 5]);
    }
    /**
         * Read - FACE_DETECT_MODE == FULL
         */
    mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL);
    mMetadata.set(CaptureResult.STATISTICS_FACE_IDS, expectedFaceIds);
    mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
    mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
    mMetadata.set(CaptureResult.STATISTICS_FACE_LANDMARKS, expectedFaceLM);
    Face[] resultFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
    assertEquals(numFaces, resultFaces.length);
    for (int i = 0; i < numFaces; i++) {
        assertEquals(expectedFaceIds[i], resultFaces[i].getId());
        assertEquals(expectedFaceScores[i], resultFaces[i].getScore());
        assertEquals(expectedRects[i], resultFaces[i].getBounds());
        assertEquals(expectedFaceLMPoints[i * 3], resultFaces[i].getLeftEyePosition());
        assertEquals(expectedFaceLMPoints[i * 3 + 1], resultFaces[i].getRightEyePosition());
        assertEquals(expectedFaceLMPoints[i * 3 + 2], resultFaces[i].getMouthPosition());
    }
    /**
         * Read - FACE_DETECT_MODE == SIMPLE
         */
    mMetadata.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE);
    mMetadata.set(CaptureResult.STATISTICS_FACE_SCORES, expectedFaceScores);
    mMetadata.set(CaptureResult.STATISTICS_FACE_RECTANGLES, expectedRects);
    Face[] resultSimpleFaces = mMetadata.get(CaptureResult.STATISTICS_FACES);
    assertEquals(numFaces, resultSimpleFaces.length);
    for (int i = 0; i < numFaces; i++) {
        assertEquals(Face.ID_UNSUPPORTED, resultSimpleFaces[i].getId());
        assertEquals(expectedFaceScores[i], resultSimpleFaces[i].getScore());
        assertEquals(expectedRects[i], resultSimpleFaces[i].getBounds());
        assertNull(resultSimpleFaces[i].getLeftEyePosition());
        assertNull(resultSimpleFaces[i].getRightEyePosition());
        assertNull(resultSimpleFaces[i].getMouthPosition());
    }
    /**
         * Read/Write TonemapCurve
         */
    float[] red = new float[] { 0.0f, 0.0f, 1.0f, 1.0f };
    float[] green = new float[] { 0.0f, 1.0f, 1.0f, 0.0f };
    float[] blue = new float[] { 0.0000f, 0.0000f, 0.0667f, 0.2920f, 0.1333f, 0.4002f, 0.2000f, 0.4812f, 0.2667f, 0.5484f, 0.3333f, 0.6069f, 0.4000f, 0.6594f, 0.4667f, 0.7072f, 0.5333f, 0.7515f, 0.6000f, 0.7928f, 0.6667f, 0.8317f, 0.7333f, 0.8685f, 0.8000f, 0.9035f, 0.8667f, 0.9370f, 0.9333f, 0.9691f, 1.0000f, 1.0000f };
    TonemapCurve tcIn = new TonemapCurve(red, green, blue);
    mMetadata.set(CaptureResult.TONEMAP_CURVE, tcIn);
    float[] redOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_RED);
    float[] greenOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_GREEN);
    float[] blueOut = mMetadata.get(CaptureResult.TONEMAP_CURVE_BLUE);
    assertArrayEquals(red, redOut);
    assertArrayEquals(green, greenOut);
    assertArrayEquals(blue, blueOut);
    TonemapCurve tcOut = mMetadata.get(CaptureResult.TONEMAP_CURVE);
    assertEquals(tcIn, tcOut);
    mMetadata.set(CaptureResult.TONEMAP_CURVE_GREEN, null);
    // If any of channel has null curve, return a null TonemapCurve
    assertNull(mMetadata.get(CaptureResult.TONEMAP_CURVE));
}
Also used : TonemapCurve(android.hardware.camera2.params.TonemapCurve) Rect(android.graphics.Rect) Point(android.graphics.Point) Face(android.hardware.camera2.params.Face) Point(android.graphics.Point) SmallTest(android.test.suitebuilder.annotation.SmallTest)

Example 5 with TonemapCurve

use of android.hardware.camera2.params.TonemapCurve in project android_frameworks_base by ResurrectionRemix.

the class CameraMetadataNative method getTonemapCurve.

private <T> TonemapCurve getTonemapCurve() {
    float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED);
    float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN);
    float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE);
    if (areValuesAllNull(red, green, blue)) {
        return null;
    }
    if (red == null || green == null || blue == null) {
        Log.w(TAG, "getTonemapCurve - missing tone curve components");
        return null;
    }
    TonemapCurve tc = new TonemapCurve(red, green, blue);
    return tc;
}
Also used : TonemapCurve(android.hardware.camera2.params.TonemapCurve)

Aggregations

TonemapCurve (android.hardware.camera2.params.TonemapCurve)10 Point (android.graphics.Point)5 Rect (android.graphics.Rect)5 Face (android.hardware.camera2.params.Face)5 SmallTest (android.test.suitebuilder.annotation.SmallTest)5