Search in sources :

Example 16 with Mat

use of org.opencv.core.Mat in project Auto.js by hyb1996.

the class Images method findImage.

public Point findImage(ImageWrapper image, ImageWrapper template, float weakThreshold, float threshold, Rect rect, int maxLevel) {
    if (image == null)
        throw new NullPointerException("image = null");
    if (template == null)
        throw new NullPointerException("template = null");
    Mat src = image.getMat();
    if (rect != null) {
        src = new Mat(src, rect);
    }
    org.opencv.core.Point point = TemplateMatching.fastTemplateMatching(src, template.getMat(), TemplateMatching.MATCHING_METHOD_DEFAULT, weakThreshold, threshold, maxLevel);
    if (point != null) {
        if (rect != null) {
            point.x += rect.x;
            point.y += rect.y;
        }
        point.x = mScreenMetrics.scaleX((int) point.x);
        point.y = mScreenMetrics.scaleX((int) point.y);
    }
    return point;
}
Also used : Mat(org.opencv.core.Mat) Point(org.opencv.core.Point)

Example 17 with Mat

use of org.opencv.core.Mat in project Relic_Main by TeamOverdrive.

the class CryptoboxDetector method processFrame.

@Override
public Mat processFrame(Mat rgba, Mat gray) {
    downScaleFactor = 0.5;
    Size initSize = rgba.size();
    newSize = new Size(initSize.width * downScaleFactor, initSize.height * downScaleFactor);
    rgba.copyTo(workingMat);
    avgPoints = new ArrayList<>();
    Imgproc.resize(workingMat, workingMat, newSize);
    if (rotateMat) {
        Mat tempBefore = workingMat.t();
        // mRgba.t() is the transpose
        Core.flip(tempBefore, workingMat, 1);
        tempBefore.release();
    }
    switch(detectionMode) {
        case RED:
            Mat redMask = workingMat.clone();
            colorFilterRed.process(redMask, mask);
            redMask.release();
            break;
        case BLUE:
            Mat blueMask = workingMat.clone();
            colorFilterBlue.process(blueMask, mask);
            blueMask.release();
            break;
    }
    // display = new Mat(mask.height(), mask.width(), CvType.CV_8UC1);
    ArrayList<Line> lines = (ArrayList<Line>) Lines.getOpenCvLines(mask, 1, 55);
    lines = (ArrayList<Line>) Lines.linearExtend(lines, 4, newSize);
    // lines = Lines.mergeLines(lines, 13, 300, 6);
    // lines = Lines.mergeLines(lines, 6, 2000, 4);
    List<Line> linesVertical = new ArrayList<Line>();
    for (Line line : lines) {
        if (Lines.getAngularDistance(line, new Line(new Point(0, 0), new Point(100, 0))) > 45) {
            linesVertical.add(line);
        }
    }
    Collections.sort(linesVertical, new Comparator<Line>() {

        @Override
        public int compare(Line line1, Line line2) {
            if (line1.center().x > line2.center().x) {
                return 1;
            } else if (line1.center().x < line2.center().x) {
                return -1;
            } else {
                return 0;
            }
        }
    });
    if (linesVertical.size() == 0) {
        CryptoBoxDetected = false;
        ColumnDetected = false;
        return rgba;
    }
    Line left = linesVertical.get(0);
    Line right = linesVertical.get(linesVertical.size() - 1);
    double perpDistance = Lines.getPerpindicularDistance(left, right);
    double collumnLength = Lines.getPerpindicularDistance(left, right) / 6;
    List<List<Line>> groupings = new ArrayList<List<Line>>();
    int j = 0;
    while (j < linesVertical.size()) {
        List<Line> group = new ArrayList<Line>();
        group.add(linesVertical.get(j));
        int i = j + 1;
        while (i < linesVertical.size() && Lines.getPerpindicularDistance(linesVertical.get(j), linesVertical.get(i)) < collumnLength) {
            group.add(linesVertical.get(i));
            i++;
        }
        groupings.add(group);
        j = i;
    }
    for (int i = 0; i < groupings.size() - 1; i++) {
        Point center = new Line(Lines.getMeanPoint(groupings.get(i)), Lines.getMeanPoint(groupings.get(i + 1))).center();
        int y = (int) MathFTC.clip(0.6 * center.y, 0, mask.height());
        double max = 1.4 * center.y;
        if (center.y < 125) {
            y = 1;
            max = 250;
        }
        int count = 0;
        while (y < mask.height() && y < max && count < 10) {
            if (mask.get(y, (int) center.x)[0] > 0) {
                count++;
            // Imgproc.circle(original, new Point(2*center.x, 2*y), 10, new Scalar(255,255,255), 6);
            } else {
            // Imgproc.circle(original, new Point(2*center.x, 2*y), 10, new Scalar(30,30,200), 6);
            }
            y += 10;
        }
        if (count >= 10) {
            List<Line> appendee = groupings.get(i);
            appendee.addAll(groupings.get(i + 1));
            groupings.set(i, appendee);
            groupings.remove(i + 1);
            i -= 1;
        }
    }
    for (int i = 0; i < groupings.size(); i++) {
        Point center = Lines.getMeanPoint(groupings.get(i));
        int y = (int) MathFTC.clip(0.2 * center.y, 0, mask.height());
        double max = 1.8 * center.y;
        if (center.y < 50) {
            y = 1;
            max = (int) 0.8 * mask.height();
        }
        int minX = (int) MathFTC.clip(center.x - 5, 0, mask.width());
        int maxX = (int) MathFTC.clip(center.x + 5, 0, mask.width());
        int count = 0;
        while (y < mask.height() && y < max && count < 10) {
            if (mask.get(y, (int) center.x)[0] > 0 || mask.get(y, minX)[0] > 0 || mask.get(y, maxX)[0] > 0) {
                count++;
            // Imgproc.circle(rgba, new Point(2*center.x, 2*y), 10, new Scalar(255,255,255), 6);
            } else {
            // Imgproc.circle(rgba, new Point(2*center.x, 2*y), 10, new Scalar(30,30,200), 6);
            }
            y += 4;
        }
        if (count <= 9) {
            groupings.remove(i);
            i -= 1;
        }
    }
    if (groupings.size() > 4) {
        Collections.sort(groupings, new Comparator<List<Line>>() {

            @Override
            public int compare(List<Line> g1, List<Line> g2) {
                if (Lines.stdDevX(g1) > Lines.stdDevX(g2)) {
                    return 1;
                } else if (Lines.stdDevX(g1) < Lines.stdDevX(g2)) {
                    return -1;
                } else {
                    return 0;
                }
            }
        });
        groupings = groupings.subList(0, 4);
    }
    List<Line> columns = new ArrayList<Line>();
    for (int i = 0; i < groupings.size(); i++) {
        Point center = Lines.getMeanPoint(groupings.get(i));
        double angle = Lines.getMeanAngle(groupings.get(i));
        columns.add(Lines.constructLine(Lines.getMeanPoint(groupings.get(i)), Lines.getMeanAngle(groupings.get(i)), 400));
    }
    for (int i = 0; i < groupings.size(); i++) {
        groupings.set(i, Lines.resize(groupings.get(i), 1 / downScaleFactor));
    }
    for (int i = 0; i < groupings.size(); i++) {
        // Imgproc.circle(original, Lines.getMeanPoint(groupings.get(i)), 50, new Scalar(40,200,70), 4);
        for (Line line : groupings.get(i)) {
        // Imgproc.line(rgba, line.point1, line.point2, new Scalar(50,200,55), 4);
        // Imgproc.circle(rgba, line.center(), 20, new Scalar(80,60,190),4);
        // Imgproc.putText(rgba, Integer.toString(i), line.center(), Core.FONT_HERSHEY_PLAIN, 7, new Scalar(10,240,230),3);
        }
    }
    for (Line line : columns) {
        line.resize(1 / downScaleFactor);
        Imgproc.line(rgba, line.point1, line.point2, new Scalar(20, 165, 240), 20);
    }
    if (columns.size() < 3) {
        trackables = new ArrayList<>();
        CryptoBoxDetected = false;
        ColumnDetected = false;
        return rgba;
    }
    for (int i = 0; i < columns.size() - 1; i++) {
        Line conec = Lines.getPerpindicularConnector(columns.get(i), columns.get(i + 1), rgba.size());
        // Imgproc.line(rgba, conec.point1, conec.point2, new Scalar(210, 30, 40), 7);
        Point centerPoint = conec.center();
        if (i < 3) {
            if (trackables.size() == 0) {
                for (int l = 0; l < trackableMemory; l++) {
                    trackables.add(new ArrayList<Point>());
                }
            }
            if (trackables.size() <= i) {
                trackables.add(new ArrayList<Point>());
            }
            if (trackables.get(i).size() < trackableMemory) {
                trackables.get(i).add(centerPoint);
            } else {
                Collections.rotate(trackables.get(i), -1);
                trackables.get(i).set(trackableMemory - 1, centerPoint);
            }
            for (int k = 0; k < trackables.get(i).size(); k++) {
            // Imgproc.circle(rgba, trackables.get(i).get(k),4,new Scalar(255,255,255),3);
            }
        }
        Point avgPoint = Points.getMeanPoint(trackables.get(i));
        Imgproc.putText(rgba, "Col #" + i, new Point(avgPoint.x, avgPoint.y - 15), 0, 1.5, new Scalar(0, 255, 255), 2);
        // DogeLogger.LogVar("Col-"+i, avgPoint.toString());
        Imgproc.circle(rgba, avgPoint, 15, new Scalar(0, 255, 0), 6);
        avgPoints.add(avgPoint);
        CryptoBoxPositions[i] = (int) avgPoint.x;
    }
    if (avgPoints.size() == 3) {
        CryptoBoxDetected = true;
    }
    ColumnDetected = true;
    Point newFull = Points.getMeanPoint(avgPoints);
    Line newFullLine = new Line(newFull, fullAvgPoint);
    if (newFullLine.length() > 75) {
        trackables = new ArrayList<>();
        Log.d("DogeCV", "RESETTING TRACKABLE!");
    }
    fullAvgPoint = newFull;
    // Imgproc.cvtColor(white, white, Imgproc.COLOR_RGB2HSV);
    Imgproc.putText(rgba, "DogeCV 1.1 Crypto: " + newSize.toString() + " - " + speed.toString() + " - " + detectionMode.toString(), new Point(5, 30), 0, 1.2, new Scalar(0, 255, 255), 2);
    return rgba;
}
Also used : Mat(org.opencv.core.Mat) Size(org.opencv.core.Size) ArrayList(java.util.ArrayList) Point(org.opencv.core.Point) MatOfPoint(org.opencv.core.MatOfPoint) Point(org.opencv.core.Point) MatOfPoint(org.opencv.core.MatOfPoint) Scalar(org.opencv.core.Scalar) Line(com.disnodeteam.dogecv.math.Line) ArrayList(java.util.ArrayList) List(java.util.List)

Example 18 with Mat

use of org.opencv.core.Mat in project Relic_Main by TeamOverdrive.

the class GenericDetector method processFrame.

@Override
public Mat processFrame(Mat rgba, Mat gray) {
    Size initSize = rgba.size();
    newSize = new Size(initSize.width * downScaleFactor, initSize.height * downScaleFactor);
    rgba.copyTo(workingMat);
    Imgproc.resize(workingMat, workingMat, newSize);
    if (rotateMat) {
        Mat tempBefore = workingMat.t();
        // mRgba.t() is the transpose
        Core.flip(tempBefore, workingMat, -1);
        tempBefore.release();
    }
    Mat preConvert = workingMat.clone();
    colorFilter.process(preConvert, mask);
    if (stretch) {
        structure = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, stretchKernal);
        Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_CLOSE, structure);
    }
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(mask, contours, hiarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
    Imgproc.drawContours(workingMat, contours, -1, new Scalar(230, 70, 70), 2);
    Rect chosenRect = null;
    double chosenScore = Integer.MAX_VALUE;
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    for (MatOfPoint c : contours) {
        MatOfPoint2f contour2f = new MatOfPoint2f(c.toArray());
        // Processing on mMOP2f1 which is in type MatOfPoint2f
        double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02;
        Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true);
        // Convert back to MatOfPoint
        MatOfPoint points = new MatOfPoint(approxCurve.toArray());
        // Get bounding rect of contour
        Rect rect = Imgproc.boundingRect(points);
        // You can find this by printing the area of each found rect, then looking and finding what u deem to be perfect.
        // Run this with the bot, on a balance board, with jewels in their desired location. Since jewels should mostly be
        // in the same position, this hack could work nicely.
        double area = Imgproc.contourArea(c);
        double areaDiffrence = 0;
        switch(detectionMode) {
            case MAX_AREA:
                areaDiffrence = -area * areaWeight;
                break;
            case PERFECT_AREA:
                areaDiffrence = Math.abs(perfectArea - area);
                break;
        }
        // Just declaring vars to make my life eassy
        double x = rect.x;
        double y = rect.y;
        double w = rect.width;
        double h = rect.height;
        Point centerPoint = new Point(x + (w / 2), y + (h / 2));
        // Get the ratio. We use max in case h and w get swapped??? it happens when u account for rotation
        double cubeRatio = Math.max(Math.abs(h / w), Math.abs(w / h));
        double ratioDiffrence = Math.abs(cubeRatio - perfectRatio);
        double finalDiffrence = (ratioDiffrence * ratioWeight) + (areaDiffrence * areaWeight);
        // Think of diffrence as score. 0 = perfect
        if (finalDiffrence < chosenScore && finalDiffrence < maxDiffrence && area > minArea) {
            chosenScore = finalDiffrence;
            chosenRect = rect;
        }
        if (debugContours && area > 100) {
            Imgproc.circle(workingMat, centerPoint, 3, new Scalar(0, 255, 255), 3);
            Imgproc.putText(workingMat, "Area: " + String.format("%.1f", area), centerPoint, 0, 0.5, new Scalar(0, 255, 255));
        }
    }
    if (chosenRect != null) {
        Imgproc.rectangle(workingMat, new Point(chosenRect.x, chosenRect.y), new Point(chosenRect.x + chosenRect.width, chosenRect.y + chosenRect.height), new Scalar(0, 255, 0), 3);
        Imgproc.putText(workingMat, "Result: " + String.format("%.2f", chosenScore), new Point(chosenRect.x - 5, chosenRect.y - 10), Core.FONT_HERSHEY_PLAIN, 1.3, new Scalar(0, 255, 0), 2);
        Point centerPoint = new Point(chosenRect.x + (chosenRect.width / 2), chosenRect.y + (chosenRect.height / 2));
        resultRect = chosenRect;
        resultLocation = centerPoint;
        resultFound = true;
    } else {
        resultFound = false;
        resultRect = null;
        resultLocation = null;
    }
    Imgproc.resize(workingMat, workingMat, initSize);
    preConvert.release();
    Imgproc.putText(workingMat, "DogeCV v1.1 Generic: " + newSize.toString() + " - " + speed.toString() + " - " + detectionMode.toString(), new Point(5, 30), 0, 1.2, new Scalar(0, 255, 255), 2);
    return workingMat;
}
Also used : Mat(org.opencv.core.Mat) Rect(org.opencv.core.Rect) Size(org.opencv.core.Size) MatOfPoint2f(org.opencv.core.MatOfPoint2f) ArrayList(java.util.ArrayList) MatOfPoint(org.opencv.core.MatOfPoint) Point(org.opencv.core.Point) MatOfPoint(org.opencv.core.MatOfPoint) Scalar(org.opencv.core.Scalar)

Example 19 with Mat

use of org.opencv.core.Mat in project Relic_Main by TeamOverdrive.

the class Imgproc method drawContours.

// javadoc: drawContours(image, contours, contourIdx, color, thickness)
public static void drawContours(Mat image, List<MatOfPoint> contours, int contourIdx, Scalar color, int thickness) {
    List<Mat> contours_tmplm = new ArrayList<Mat>((contours != null) ? contours.size() : 0);
    Mat contours_mat = Converters.vector_vector_Point_to_Mat(contours, contours_tmplm);
    drawContours_1(image.nativeObj, contours_mat.nativeObj, contourIdx, color.val[0], color.val[1], color.val[2], color.val[3], thickness);
    return;
}
Also used : Mat(org.opencv.core.Mat) ArrayList(java.util.ArrayList)

Example 20 with Mat

use of org.opencv.core.Mat in project Relic_Main by TeamOverdrive.

the class Imgproc method polylines.

// 
// C++:  void polylines(Mat& img, vector_vector_Point pts, bool isClosed, Scalar color, int thickness = 1, int lineType = LINE_8, int shift = 0)
// 
// javadoc: polylines(img, pts, isClosed, color, thickness, lineType, shift)
public static void polylines(Mat img, List<MatOfPoint> pts, boolean isClosed, Scalar color, int thickness, int lineType, int shift) {
    List<Mat> pts_tmplm = new ArrayList<Mat>((pts != null) ? pts.size() : 0);
    Mat pts_mat = Converters.vector_vector_Point_to_Mat(pts, pts_tmplm);
    polylines_0(img.nativeObj, pts_mat.nativeObj, isClosed, color.val[0], color.val[1], color.val[2], color.val[3], thickness, lineType, shift);
    return;
}
Also used : Mat(org.opencv.core.Mat) ArrayList(java.util.ArrayList)

Aggregations

Mat (org.opencv.core.Mat)285 Point (org.opencv.core.Point)50 ArrayList (java.util.ArrayList)45 MatOfPoint (org.opencv.core.MatOfPoint)43 MatOfKeyPoint (org.opencv.core.MatOfKeyPoint)20 Size (org.opencv.core.Size)20 KeyPoint (org.opencv.core.KeyPoint)18 Scalar (org.opencv.core.Scalar)17 Rect (org.opencv.core.Rect)16 File (java.io.File)8 RotatedRect (org.opencv.core.RotatedRect)6 BufferedImage (java.awt.image.BufferedImage)5 FilterContext (de.serviceflow.frankenstein.plugin.api.FilterContext)4 SegmentVideoFilter (de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter)4 DefaultFilterContext (de.serviceflow.frankenstein.vf.DefaultFilterContext)4 VideoFilter (de.serviceflow.frankenstein.vf.VideoFilter)4 DataBufferByte (java.awt.image.DataBufferByte)4 IOException (java.io.IOException)4 FilterElement (de.serviceflow.frankenstein.vf.FilterElement)3 InvocationTargetException (java.lang.reflect.InvocationTargetException)3