use of org.opencv.features2d.DescriptorExtractor in project seleniumRobot by bhecquet.
the class ImageDetector method detectCorrespondingZone.
/**
* Compute the rectangle where the searched picture is and the rotation angle between both images
* Throw {@link ImageSearchException} if picture is not found
* @return
* @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
*/
public void detectCorrespondingZone() {
Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);
MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
surf.detect(objectImageMat, objectKeyPoints);
surf.detect(sceneImageMat, sceneKeyPoints);
DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
Mat objectDescriptor = new Mat();
Mat sceneDescriptor = new Mat();
surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);
try {
Mat outImage = new Mat();
Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
File tmpImg = File.createTempFile("img", ".png");
tmpImg.deleteOnExit();
String tempFile = tmpImg.getAbsolutePath();
writeComparisonPictureToFile(tempFile, outImage);
} catch (IOException e) {
}
// http://stackoverflow.com/questions/29828849/flann-for-opencv-java
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
MatOfDMatch matches = new MatOfDMatch();
if (objectKeyPoints.toList().isEmpty()) {
throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: " + objectImage.getAbsolutePath());
}
if (sceneKeyPoints.toList().isEmpty()) {
throw new ImageSearchException("No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
}
if (objectDescriptor.type() != CvType.CV_32F) {
objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
}
if (sceneDescriptor.type() != CvType.CV_32F) {
sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
}
matcher.match(objectDescriptor, sceneDescriptor, matches);
double maxDist = 0;
double minDist = 10000;
for (int i = 0; i < objectDescriptor.rows(); i++) {
double dist = matches.toList().get(i).distance;
if (dist < minDist) {
minDist = dist;
}
if (dist > maxDist) {
maxDist = dist;
}
}
logger.debug("-- Max dist : " + maxDist);
logger.debug("-- Min dist : " + minDist);
LinkedList<DMatch> goodMatches = new LinkedList<>();
MatOfDMatch gm = new MatOfDMatch();
for (int i = 0; i < objectDescriptor.rows(); i++) {
if (matches.toList().get(i).distance < detectionThreshold) {
goodMatches.addLast(matches.toList().get(i));
}
}
gm.fromList(goodMatches);
Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch, Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);
if (goodMatches.isEmpty()) {
throw new ImageSearchException("Cannot find matching zone");
}
LinkedList<Point> objList = new LinkedList<>();
LinkedList<Point> sceneList = new LinkedList<>();
List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();
for (int i = 0; i < goodMatches.size(); i++) {
objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
}
MatOfPoint2f obj = new MatOfPoint2f();
obj.fromList(objList);
MatOfPoint2f scene = new MatOfPoint2f();
scene.fromList(sceneList);
// Calib3d.RANSAC could be used instead of 0
Mat hg = Calib3d.findHomography(obj, scene, 0, 5);
Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);
objectCorners.put(0, 0, 0, 0);
objectCorners.put(1, 0, objectImageMat.cols(), 0);
objectCorners.put(2, 0, objectImageMat.cols(), objectImageMat.rows());
objectCorners.put(3, 0, 0, objectImageMat.rows());
Core.perspectiveTransform(objectCorners, sceneCorners, hg);
// points of object
Point po1 = new Point(objectCorners.get(0, 0));
Point po2 = new Point(objectCorners.get(1, 0));
Point po3 = new Point(objectCorners.get(2, 0));
Point po4 = new Point(objectCorners.get(3, 0));
// point of object in scene
// top left
Point p1 = new Point(sceneCorners.get(0, 0));
// top right
Point p2 = new Point(sceneCorners.get(1, 0));
// bottom right
Point p3 = new Point(sceneCorners.get(2, 0));
// bottom left
Point p4 = new Point(sceneCorners.get(3, 0));
logger.debug(po1);
logger.debug(po2);
logger.debug(po3);
logger.debug(po4);
// top left
logger.debug(p1);
// top right
logger.debug(p2);
// bottom right
logger.debug(p3);
// bottom left
logger.debug(p4);
if (debug) {
try {
// translate corners
p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });
Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);
showResultingPicture(imgMatch);
} catch (IOException e) {
}
}
// check rotation angles
checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);
// rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270°
reworkOnScenePoints(p1, p2, p3, p4);
// check that aspect ratio of the detected height and width are the same
checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);
recordDetectedRectangle(p1, p2, p3, p4);
}
use of org.opencv.features2d.DescriptorExtractor in project kifu-recorder by leonardost.
the class BoardDetectorByImageSimilarity method generateDescriptorMatches.
private void generateDescriptorMatches(Mat image1, Mat image2) {
Mat processedImage1 = image1.clone();
Mat processedImage2 = image2.clone();
// Imgcodecs.imwrite("processing/difference_between_" + imageIndex + "_1.jpg", processedImage1);
// Imgcodecs.imwrite("processing/difference_between_" + imageIndex + "_2.jpg", processedImage2);
FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB);
MatOfKeyPoint keypointsA = new MatOfKeyPoint();
MatOfKeyPoint keypointsB = new MatOfKeyPoint();
detector.detect(processedImage1, keypointsA);
detector.detect(processedImage2, keypointsB);
DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);
Mat descriptorsA = new Mat();
Mat descriptorsB = new Mat();
extractor.compute(processedImage1, keypointsA, descriptorsA);
extractor.compute(processedImage2, keypointsB, descriptorsB);
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
MatOfDMatch matches = new MatOfDMatch();
matcher.match(descriptorsA, descriptorsB, matches);
matchesList = matches.toList();
Collections.sort(matchesList, new Comparator<DMatch>() {
public int compare(DMatch a, DMatch b) {
if (a.distance < b.distance)
return -1;
else if (a.distance > b.distance)
return 1;
return 0;
}
});
}
Aggregations