Search in sources :

Example 21 with FrameImage2D

use of androidx.media.filterfw.FrameImage2D in project android_frameworks_base by ResurrectionRemix.

the class ImageGoodnessFilter method onProcess.

/**
     * @see androidx.media.filterfw.Filter#onProcess()
     */
@Override
protected void onProcess() {
    FrameValue sharpnessFrameValue = getConnectedInputPort("sharpness").pullFrame().asFrameValue();
    float sharpness = ((Float) sharpnessFrameValue.getValue()).floatValue();
    FrameValue overExposureFrameValue = getConnectedInputPort("overExposure").pullFrame().asFrameValue();
    float overExposure = ((Float) overExposureFrameValue.getValue()).floatValue();
    FrameValue underExposureFrameValue = getConnectedInputPort("underExposure").pullFrame().asFrameValue();
    float underExposure = ((Float) underExposureFrameValue.getValue()).floatValue();
    FrameValue colorfulnessFrameValue = getConnectedInputPort("colorfulness").pullFrame().asFrameValue();
    float colorfulness = ((Float) colorfulnessFrameValue.getValue()).floatValue();
    FrameValue contrastRatingFrameValue = getConnectedInputPort("contrastRating").pullFrame().asFrameValue();
    float contrastRating = ((Float) contrastRatingFrameValue.getValue()).floatValue();
    FrameValue brightnessFrameValue = getConnectedInputPort("brightness").pullFrame().asFrameValue();
    float brightness = ((Float) brightnessFrameValue.getValue()).floatValue();
    FrameValue motionValuesFrameValue = getConnectedInputPort("motionValues").pullFrame().asFrameValue();
    float[] motionValues = (float[]) motionValuesFrameValue.getValue();
    float vectorAccel = (float) Math.sqrt(Math.pow(motionValues[0], 2) + Math.pow(motionValues[1], 2) + Math.pow(motionValues[2], 2));
    String outStr;
    FrameValue capturingFrameValue = getConnectedInputPort("capturing").pullFrame().asFrameValue();
    boolean capturing = (Boolean) capturingFrameValue.getValue();
    FrameImage2D inputImage = getConnectedInputPort("image").pullFrame().asFrameImage2D();
    // TODO: get rid of magic numbers
    float score = 0.0f;
    score = computePictureScore(vectorAccel, sharpness, underExposure, overExposure, contrastRating, colorfulness, brightness);
    if (scoreMean == 0)
        scoreMean = score;
    else
        scoreMean = scoreMean * (1 - DECAY) + score * DECAY;
    if (motionMean == 0)
        motionMean = vectorAccel;
    else
        motionMean = motionMean * (1 - DECAY) + vectorAccel * DECAY;
    float classifierScore = classifierComputeScore(vectorAccel, sharpness, underExposure, colorfulness, contrastRating, score);
    //        Log.v(TAG, "ClassifierScore:: " + classifierScore);
    final float GREAT_SCORE = 3.5f;
    final float GOOD_SCORE = 2.5f;
    final float OK_SCORE = 1.5f;
    final float BAD_SCORE = 0.5f;
    if (score >= GREAT_SCORE) {
        outStr = GREAT;
    } else if (score >= GOOD_SCORE) {
        outStr = GOOD;
    } else if (score >= OK_SCORE) {
        outStr = OK;
    } else if (score >= BAD_SCORE) {
        outStr = BAD;
    } else {
        outStr = AWFUL;
    }
    if (capturing) {
        if (outStr.equals(GREAT)) {
            // take a picture
            Bitmap bitmap = inputImage.toBitmap();
            new AsyncOperation().execute(bitmap);
            final float RESET_FEATURES = 0.01f;
            sharpnessMean = RESET_FEATURES;
            underExposureMean = RESET_FEATURES;
            overExposureMean = RESET_FEATURES;
            contrastMean = RESET_FEATURES;
            colorfulnessMean = RESET_FEATURES;
            brightnessMean = RESET_FEATURES;
        }
    }
    OutputPort outPort = getConnectedOutputPort("goodOrBadPic");
    FrameValue stringFrame = outPort.fetchAvailableFrame(null).asFrameValue();
    stringFrame.setValue(outStr);
    outPort.pushFrame(stringFrame);
    OutputPort scoreOutPort = getConnectedOutputPort("score");
    FrameValue scoreFrame = scoreOutPort.fetchAvailableFrame(null).asFrameValue();
    scoreFrame.setValue(score);
    scoreOutPort.pushFrame(scoreFrame);
}
Also used : OutputPort(androidx.media.filterfw.OutputPort) Bitmap(android.graphics.Bitmap) FrameImage2D(androidx.media.filterfw.FrameImage2D) FrameValue(androidx.media.filterfw.FrameValue)

Example 22 with FrameImage2D

use of androidx.media.filterfw.FrameImage2D in project android_frameworks_base by ResurrectionRemix.

the class AvgBrightnessFilterTest method testBrightnessFilter.

public void testBrightnessFilter() throws Exception {
    final int INPUT_WIDTH = 480;
    final int INPUT_HEIGHT = 640;
    FrameImage2D image = createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU), new int[] { INPUT_WIDTH, INPUT_HEIGHT }).asFrameImage2D();
    Bitmap bitmap = BitmapFactory.decodeStream(assetMgr.open("0002_000390.jpg"));
    image.setBitmap(bitmap);
    injectInputFrame("image", image);
    process();
    final float EXPECTED_RESULT = 0.35f;
    assertEquals(EXPECTED_RESULT, ((Float) getOutputFrame("brightnessRating").asFrameValue().getValue()).floatValue(), 0.01f);
}
Also used : Bitmap(android.graphics.Bitmap) FrameImage2D(androidx.media.filterfw.FrameImage2D)

Example 23 with FrameImage2D

use of androidx.media.filterfw.FrameImage2D in project android_frameworks_base by ResurrectionRemix.

the class ExposureFilterTest method testExposureFilter.

public void testExposureFilter() throws Exception {
    final int INPUT_WIDTH = 480;
    final int INPUT_HEIGHT = 640;
    FrameImage2D image = createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU), new int[] { INPUT_WIDTH, INPUT_HEIGHT }).asFrameImage2D();
    Bitmap bitmap = BitmapFactory.decodeStream(assetMgr.open("0002_000390.jpg"));
    image.setBitmap(bitmap);
    injectInputFrame("image", image);
    process();
    final float EXPECTED_OVEREXPOSURE = 0.00757f;
    assertEquals(EXPECTED_OVEREXPOSURE, ((Float) getOutputFrame("overExposureRating").asFrameValue().getValue()).floatValue(), 0.001f);
    final float EXPECTED_UNDEREXPOSURE = 0.2077f;
    assertEquals(EXPECTED_UNDEREXPOSURE, ((Float) getOutputFrame("underExposureRating").asFrameValue().getValue()).floatValue(), 0.001f);
}
Also used : Bitmap(android.graphics.Bitmap) FrameImage2D(androidx.media.filterfw.FrameImage2D)

Example 24 with FrameImage2D

use of androidx.media.filterfw.FrameImage2D in project android_frameworks_base by ResurrectionRemix.

the class FaceSquareFilterTest method testFaceSquareFilter.

public void testFaceSquareFilter() throws Exception {
    final int INPUT_WIDTH = 1536;
    final int INPUT_HEIGHT = 2048;
    FrameImage2D image = createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU), new int[] { INPUT_WIDTH, INPUT_HEIGHT }).asFrameImage2D();
    FrameValues facesFrame = createFrame(FrameType.array(Camera.Face.class), new int[] { 1, 1 }).asFrameValues();
    Bitmap bitmap = BitmapFactory.decodeStream(assetMgr.open("XZZ019.jpg"));
    image.setBitmap(bitmap);
    injectInputFrame("image", image);
    Face face = new Face();
    Rect faceRect = new Rect();
    // These are the values for image 141 with 1 face
    faceRect.set(-533, -453, 369, 224);
    face.rect = faceRect;
    Face[] faces = new Face[1];
    faces[0] = face;
    facesFrame.setValue(faces);
    injectInputFrame("faces", facesFrame);
    process();
    // ensure the output image has the rectangle in the right place
    FrameImage2D outputImage = getOutputFrame("image").asFrameImage2D();
    int[] pixels = new int[bitmap.getByteCount()];
    bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
    final int FACE_X_RANGE = 2000;
    final int WIDTH_OFFSET = 1000;
    final int HEIGHT_OFFSET = 1000;
    int top = (faceRect.top + HEIGHT_OFFSET) * bitmap.getHeight() / FACE_X_RANGE;
    int bottom = (faceRect.bottom + HEIGHT_OFFSET) * bitmap.getHeight() / FACE_X_RANGE;
    int left = (faceRect.left + WIDTH_OFFSET) * bitmap.getWidth() / FACE_X_RANGE;
    int right = (faceRect.right + WIDTH_OFFSET) * bitmap.getWidth() / FACE_X_RANGE;
    if (top < 0) {
        top = 0;
    } else if (top > bitmap.getHeight()) {
        top = bitmap.getHeight();
    }
    if (left < 0) {
        left = 0;
    } else if (left > bitmap.getWidth()) {
        left = bitmap.getWidth();
    }
    if (bottom > bitmap.getHeight()) {
        bottom = bitmap.getHeight();
    } else if (bottom < 0) {
        bottom = 0;
    }
    if (right > bitmap.getWidth()) {
        right = bitmap.getWidth();
    } else if (right < 0) {
        right = 0;
    }
    for (int j = 0; j < (bottom - top); j++) {
        // Left edge
        if (left > 0 && top > 0) {
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) + ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) + ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) + ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
        }
        // Right edge
        if (right > 0 && top > 0) {
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) + ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) + ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) + ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
        }
    }
    for (int k = 0; k < (right - left); k++) {
        // Top edge
        if (top < bitmap.getHeight()) {
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) + ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) + ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) + ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
        }
        // Bottom edge
        if (bottom < bitmap.getHeight()) {
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) + ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) + ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;
            pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) + ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;
        }
    }
    Bitmap outputBitmap = outputImage.toBitmap();
    int[] outputPixels = new int[outputBitmap.getByteCount()];
    outputBitmap.getPixels(outputPixels, 0, outputBitmap.getWidth(), 0, 0, outputBitmap.getWidth(), outputBitmap.getHeight());
    int equalCount = 0;
    for (int i = 0; i < outputBitmap.getByteCount(); i++) {
        if (pixels[i] == outputPixels[i])
            equalCount++;
    }
    if (equalCount + (0.05f * outputBitmap.getByteCount()) < outputBitmap.getByteCount()) {
        // Assertion will fail if condition is true
        assertEquals(equalCount, outputBitmap.getByteCount());
    }
}
Also used : Bitmap(android.graphics.Bitmap) Rect(android.graphics.Rect) FrameValues(androidx.media.filterfw.FrameValues) FrameImage2D(androidx.media.filterfw.FrameImage2D) Camera(android.hardware.Camera) Face(android.hardware.Camera.Face)

Example 25 with FrameImage2D

use of androidx.media.filterfw.FrameImage2D in project android_frameworks_base by ResurrectionRemix.

the class IfElseFilterTest method testIfElseFilterTrue.

public void testIfElseFilterTrue() throws Exception {
    FrameImage2D image = createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU), new int[] { BIG_INPUT_WIDTH, BIG_INPUT_HEIGHT }).asFrameImage2D();
    FrameImage2D video = createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU), new int[] { SMALL_INPUT_WIDTH, SMALL_INPUT_HEIGHT }).asFrameImage2D();
    // Image of legs
    Bitmap videoBitmap = BitmapFactory.decodeStream(assetMgr.open("0002_000390.jpg"));
    // Image of a face
    Bitmap imageBitmap = BitmapFactory.decodeStream(assetMgr.open("XZZ019.jpg"));
    image.setBitmap(imageBitmap);
    injectInputFrame("falseResult", image);
    video.setBitmap(videoBitmap);
    injectInputFrame("trueResult", video);
    FrameValue conditionFrame = createFrame(FrameType.single(boolean.class), new int[] { 1 }).asFrameValue();
    conditionFrame.setValue(true);
    injectInputFrame("condition", conditionFrame);
    process();
    // Ensure that for true, we use the video input
    FrameImage2D outputImage = getOutputFrame("output").asFrameImage2D();
    assertEquals(outputImage, video);
}
Also used : Bitmap(android.graphics.Bitmap) FrameImage2D(androidx.media.filterfw.FrameImage2D) FrameValue(androidx.media.filterfw.FrameValue)

Aggregations

FrameImage2D (androidx.media.filterfw.FrameImage2D)72 OutputPort (androidx.media.filterfw.OutputPort)48 Bitmap (android.graphics.Bitmap)32 FrameValue (androidx.media.filterfw.FrameValue)32 ByteBuffer (java.nio.ByteBuffer)24 Face (android.hardware.Camera.Face)8 FrameBuffer2D (androidx.media.filterfw.FrameBuffer2D)8 FrameValues (androidx.media.filterfw.FrameValues)8 Quad (androidx.media.filterfw.geometry.Quad)8 Canvas (android.graphics.Canvas)4 Matrix (android.graphics.Matrix)4 Paint (android.graphics.Paint)4 Rect (android.graphics.Rect)4 Camera (android.hardware.Camera)4 RenderTarget (androidx.media.filterfw.RenderTarget)4