Search in sources :

Example 1 with FeatureDetector

use of org.opencv.features2d.FeatureDetector in project seleniumRobot by bhecquet.

the class ImageDetector method detectCorrespondingZone.

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);
    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);
    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);
    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        File tmpImg = File.createTempFile("img", ".png");
        tmpImg.deleteOnExit();
        String tempFile = tmpImg.getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {
    }
    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();
    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: " + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }
    matcher.match(objectDescriptor, sceneDescriptor, matches);
    double maxDist = 0;
    double minDist = 10000;
    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }
    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);
    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();
    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);
    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch, Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);
    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }
    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();
    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();
    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }
    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);
    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);
    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);
    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);
    objectCorners.put(0, 0, 0, 0);
    objectCorners.put(1, 0, objectImageMat.cols(), 0);
    objectCorners.put(2, 0, objectImageMat.cols(), objectImageMat.rows());
    objectCorners.put(3, 0, 0, objectImageMat.rows());
    Core.perspectiveTransform(objectCorners, sceneCorners, hg);
    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));
    // point of object in scene
    // top left
    Point p1 = new Point(sceneCorners.get(0, 0));
    // top right
    Point p2 = new Point(sceneCorners.get(1, 0));
    // bottom right
    Point p3 = new Point(sceneCorners.get(2, 0));
    // bottom left
    Point p4 = new Point(sceneCorners.get(3, 0));
    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    // top left
    logger.debug(p1);
    // top right
    logger.debug(p2);
    // bottom right
    logger.debug(p3);
    // bottom left
    logger.debug(p4);
    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });
            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);
            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }
    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);
    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270°
    reworkOnScenePoints(p1, p2, p3, p4);
    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);
    recordDetectedRectangle(p1, p2, p3, p4);
}
Also used : Mat(org.opencv.core.Mat) MatOfKeyPoint(org.opencv.core.MatOfKeyPoint) MatOfPoint2f(org.opencv.core.MatOfPoint2f) ImageSearchException(com.seleniumtests.customexception.ImageSearchException) IOException(java.io.IOException) Point(org.opencv.core.Point) MatOfKeyPoint(org.opencv.core.MatOfKeyPoint) KeyPoint(org.opencv.core.KeyPoint) Point(org.opencv.core.Point) MatOfKeyPoint(org.opencv.core.MatOfKeyPoint) KeyPoint(org.opencv.core.KeyPoint) LinkedList(java.util.LinkedList) FeatureDetector(org.opencv.features2d.FeatureDetector) Scalar(org.opencv.core.Scalar) DescriptorExtractor(org.opencv.features2d.DescriptorExtractor) MatOfKeyPoint(org.opencv.core.MatOfKeyPoint) KeyPoint(org.opencv.core.KeyPoint) MatOfDMatch(org.opencv.core.MatOfDMatch) DMatch(org.opencv.core.DMatch) MatOfDMatch(org.opencv.core.MatOfDMatch) DescriptorMatcher(org.opencv.features2d.DescriptorMatcher) MatOfByte(org.opencv.core.MatOfByte) File(java.io.File)

Example 2 with FeatureDetector

use of org.opencv.features2d.FeatureDetector in project kifu-recorder by leonardost.

the class BoardDetectorByImageSimilarity method generateDescriptorMatches.

private void generateDescriptorMatches(Mat image1, Mat image2) {
    Mat processedImage1 = image1.clone();
    Mat processedImage2 = image2.clone();
    // Imgcodecs.imwrite("processing/difference_between_" + imageIndex + "_1.jpg", processedImage1);
    // Imgcodecs.imwrite("processing/difference_between_" + imageIndex + "_2.jpg", processedImage2);
    FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB);
    MatOfKeyPoint keypointsA = new MatOfKeyPoint();
    MatOfKeyPoint keypointsB = new MatOfKeyPoint();
    detector.detect(processedImage1, keypointsA);
    detector.detect(processedImage2, keypointsB);
    DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    Mat descriptorsA = new Mat();
    Mat descriptorsB = new Mat();
    extractor.compute(processedImage1, keypointsA, descriptorsA);
    extractor.compute(processedImage2, keypointsB, descriptorsB);
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    MatOfDMatch matches = new MatOfDMatch();
    matcher.match(descriptorsA, descriptorsB, matches);
    matchesList = matches.toList();
    Collections.sort(matchesList, new Comparator<DMatch>() {

        public int compare(DMatch a, DMatch b) {
            if (a.distance < b.distance)
                return -1;
            else if (a.distance > b.distance)
                return 1;
            return 0;
        }
    });
}
Also used : Mat(org.opencv.core.Mat) DescriptorExtractor(org.opencv.features2d.DescriptorExtractor) MatOfKeyPoint(org.opencv.core.MatOfKeyPoint) MatOfDMatch(org.opencv.core.MatOfDMatch) DMatch(org.opencv.core.DMatch) MatOfDMatch(org.opencv.core.MatOfDMatch) DescriptorMatcher(org.opencv.features2d.DescriptorMatcher) FeatureDetector(org.opencv.features2d.FeatureDetector)

Example 3 with FeatureDetector

use of org.opencv.features2d.FeatureDetector in project openpnp by openpnp.

the class SimpleBlobDetector method process.

public Result process(CvPipeline pipeline) throws Exception {
    Mat mat = pipeline.getWorkingImage();
    FeatureDetector blobDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
    File outputFile = File.createTempFile("SimpleBlobDetector", ".YAML");
    writeToFile(outputFile, // java
    "%YAML:1.0" + // backdoor
    "\nthresholdStep: " + thresholdStep + "\nminThreshold: " + thresholdMin + "\nmaxThreshold: " + thresholdMax + "\nminRepeatability: " + repeatability + "\nminDistBetweenBlobs: " + distBetweenBlobs + "\nfilterByColor: " + (color ? 1 : 0) + "\nblobColor: " + colorValue + "\nfilterByArea: " + (area ? 1 : 0) + "\nminArea: " + areaMin + "\nmaxArea: " + (areaMax < 0. ? 3.4028234663852886E+038 : areaMax) + "\nfilterByCircularity: " + (circularity ? 1 : 0) + "\nminCircularity: " + circularityMin + "\nmaxCircularity: " + (circularityMax < 0. ? 3.4028234663852886E+038 : circularityMax) + "\nfilterByInertia: " + (inertia ? 1 : 0) + "\nminInertiaRatio: " + inertiaRatioMin + "\nmaxInertiaRatio: " + (inertiaRatioMax < 0. ? 3.4028234663852886E+038 : inertiaRatioMax) + "\nfilterByConvexity: " + (convexity ? 1 : 0) + "\nminConvexity: " + convexityMin + "\nmaxConvexity: " + (convexityMax < 0. ? 3.4028234663852886E+038 : convexityMax) + "\n");
    blobDetector.read(outputFile.getAbsolutePath());
    outputFile.delete();
    MatOfKeyPoint kpMat = new MatOfKeyPoint();
    blobDetector.detect(mat, kpMat);
    List<KeyPoint> keypoints = kpMat.toList();
    kpMat.release();
    return new Result(null, keypoints);
}
Also used : Mat(org.opencv.core.Mat) KeyPoint(org.opencv.core.KeyPoint) MatOfKeyPoint(org.opencv.core.MatOfKeyPoint) MatOfKeyPoint(org.opencv.core.MatOfKeyPoint) File(java.io.File) FeatureDetector(org.opencv.features2d.FeatureDetector)

Aggregations

Mat (org.opencv.core.Mat)3 MatOfKeyPoint (org.opencv.core.MatOfKeyPoint)3 FeatureDetector (org.opencv.features2d.FeatureDetector)3 File (java.io.File)2 DMatch (org.opencv.core.DMatch)2 KeyPoint (org.opencv.core.KeyPoint)2 MatOfDMatch (org.opencv.core.MatOfDMatch)2 DescriptorExtractor (org.opencv.features2d.DescriptorExtractor)2 DescriptorMatcher (org.opencv.features2d.DescriptorMatcher)2 ImageSearchException (com.seleniumtests.customexception.ImageSearchException)1 IOException (java.io.IOException)1 LinkedList (java.util.LinkedList)1 MatOfByte (org.opencv.core.MatOfByte)1 MatOfPoint2f (org.opencv.core.MatOfPoint2f)1 Point (org.opencv.core.Point)1 Scalar (org.opencv.core.Scalar)1