use of boofcv.gui.feature.AssociationPanel in project BoofCV by lessthanoptimal.
the class CompareConvertedDescriptionsApp method visualize.
public static <TD extends TupleDesc> void visualize(String title, BufferedImage image1, BufferedImage image2, InterestPointDetector<GrayF32> detector, DescribeRegionPoint<GrayF32, TD> describe, ScoreAssociation<TD> scorer) {
AssociateDescription<TD> assoc = FactoryAssociation.greedy(scorer, Double.MAX_VALUE, false);
List<Point2D_F64> locationSrc = new ArrayList<>();
List<Point2D_F64> locationDst = new ArrayList<>();
GrayF32 input1 = ConvertBufferedImage.convertFrom(image1, (GrayF32) null);
GrayF32 input2 = ConvertBufferedImage.convertFrom(image2, (GrayF32) null);
FastQueue<TD> listSrc = describeImage(input1, detector, describe, locationSrc);
FastQueue<TD> listDst = describeImage(input2, detector, describe, locationDst);
assoc.setSource(listSrc);
assoc.setDestination(listDst);
assoc.associate();
FastQueue<AssociatedIndex> matches = assoc.getMatches();
AssociationPanel panel = new AssociationPanel(20);
panel.setImages(image1, image2);
panel.setAssociation(locationSrc, locationDst, matches);
ShowImages.showWindow(panel, title);
}
use of boofcv.gui.feature.AssociationPanel in project BoofCV by lessthanoptimal.
the class ExampleFundamentalMatrix method main.
public static void main(String[] args) {
String dir = UtilIO.pathExample("structure/");
BufferedImage imageA = UtilImageIO.loadImage(dir, "undist_cyto_01.jpg");
BufferedImage imageB = UtilImageIO.loadImage(dir, "undist_cyto_02.jpg");
List<AssociatedPair> matches = computeMatches(imageA, imageB);
// Where the fundamental matrix is stored
DMatrixRMaj F;
// List of matches that matched the model
List<AssociatedPair> inliers = new ArrayList<>();
// estimate and print the results using a robust and simple estimator
// The results should be difference since there are many false associations in the simple model
// Also note that the fundamental matrix is only defined up to a scale factor.
F = robustFundamental(matches, inliers);
System.out.println("Robust");
F.print();
F = simpleFundamental(matches);
System.out.println("Simple");
F.print();
// display the inlier matches found using the robust estimator
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(inliers);
panel.setImages(imageA, imageB);
ShowImages.showWindow(panel, "Inlier Pairs");
}
use of boofcv.gui.feature.AssociationPanel in project BoofCV by lessthanoptimal.
the class CompareConvertedDescriptionsApp method visualize.
public static <TD extends TupleDesc<TD>> void visualize(String title, BufferedImage image1, BufferedImage image2, InterestPointDetector<GrayF32> detector, DescribePointRadiusAngle<GrayF32, TD> describe, ScoreAssociation<TD> scorer) {
AssociateDescription<TD> assoc = FactoryAssociation.greedy(new ConfigAssociateGreedy(false), scorer);
List<Point2D_F64> locationSrc = new ArrayList<>();
List<Point2D_F64> locationDst = new ArrayList<>();
GrayF32 input1 = ConvertBufferedImage.convertFrom(image1, (GrayF32) null);
GrayF32 input2 = ConvertBufferedImage.convertFrom(image2, (GrayF32) null);
FastAccess<TD> listSrc = describeImage(input1, detector, describe, locationSrc);
FastAccess<TD> listDst = describeImage(input2, detector, describe, locationDst);
assoc.setSource(listSrc);
assoc.setDestination(listDst);
assoc.associate();
FastAccess<AssociatedIndex> matches = assoc.getMatches();
AssociationPanel panel = new AssociationPanel(20);
panel.setImages(image1, image2);
panel.setAssociation(locationSrc, locationDst, matches);
ShowImages.showWindow(panel, title);
}
use of boofcv.gui.feature.AssociationPanel in project BoofCV by lessthanoptimal.
the class ExampleRectifyUncalibratedStereo method main.
public static void main(String[] args) {
// Load images with lens distortion removed. If lens distortion has not been
// removed then the results will be approximate
String dir = UtilIO.pathExample("stereo/");
BufferedImage imageA = UtilImageIO.loadImage(dir, "mono_wall_01_undist.jpg");
BufferedImage imageB = UtilImageIO.loadImage(dir, "mono_wall_03_undist.jpg");
// Find a set of point feature matches
List<AssociatedPair> matches = ExampleFundamentalMatrix.computeMatches(imageA, imageB);
// Prune matches using the epipolar constraint
List<AssociatedPair> inliers = new ArrayList<>();
DMatrixRMaj F = ExampleFundamentalMatrix.robustFundamental(matches, inliers);
// display the inlier matches found using the robust estimator
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(inliers);
panel.setImages(imageA, imageB);
ShowImages.showWindow(panel, "Inlier Pairs");
rectify(F, inliers, imageA, imageB);
}
use of boofcv.gui.feature.AssociationPanel in project BoofCV by lessthanoptimal.
the class ExampleStereoTwoViewsOneCamera method drawInliers.
/**
* Draw inliers for debugging purposes. Need to convert from normalized to pixel coordinates.
*/
public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeRadial intrinsic, List<AssociatedPair> normalized) {
Point2Transform2_F64 n_to_p = LensDistortionOps.narrow(intrinsic).distort_F64(false, true);
List<AssociatedPair> pixels = new ArrayList<>();
for (AssociatedPair n : normalized) {
AssociatedPair p = new AssociatedPair();
n_to_p.compute(n.p1.x, n.p1.y, p.p1);
n_to_p.compute(n.p2.x, n.p2.y, p.p2);
pixels.add(p);
}
// display the results
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(pixels);
panel.setImages(left, right);
ShowImages.showWindow(panel, "Inlier Features");
}
Aggregations