use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class ExampleFundamentalMatrix method computeMatches.
/**
* Use the associate point feature example to create a list of {@link AssociatedPair} for use in computing the
* fundamental matrix.
*/
public static List<AssociatedPair> computeMatches(BufferedImage left, BufferedImage right) {
DetectDescribePoint detDesc = FactoryDetectDescribe.surfStable(new ConfigFastHessian(1, 2, 200, 1, 9, 4, 4), null, null, GrayF32.class);
// DetectDescribePoint detDesc = FactoryDetectDescribe.sift(null,new ConfigSiftDetector(2,0,200,5),null,null);
ScoreAssociation<BrightFeature> scorer = FactoryAssociation.scoreEuclidean(BrightFeature.class, true);
AssociateDescription<BrightFeature> associate = FactoryAssociation.greedy(scorer, 1, true);
ExampleAssociatePoints<GrayF32, BrightFeature> findMatches = new ExampleAssociatePoints<>(detDesc, associate, GrayF32.class);
findMatches.associate(left, right);
List<AssociatedPair> matches = new ArrayList<>();
FastQueue<AssociatedIndex> matchIndexes = associate.getMatches();
for (int i = 0; i < matchIndexes.size; i++) {
AssociatedIndex a = matchIndexes.get(i);
AssociatedPair p = new AssociatedPair(findMatches.pointsA.get(a.src), findMatches.pointsB.get(a.dst));
matches.add(p);
}
return matches;
}
use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class ExampleRectifyUncalibratedStereo method main.
public static void main(String[] args) {
// Load images with lens distortion removed. If lens distortion has not been
// removed then the results will be approximate
String dir = UtilIO.pathExample("stereo/");
BufferedImage imageA = UtilImageIO.loadImage(dir, "mono_wall_01_undist.jpg");
BufferedImage imageB = UtilImageIO.loadImage(dir, "mono_wall_03_undist.jpg");
// Find a set of point feature matches
List<AssociatedPair> matches = ExampleFundamentalMatrix.computeMatches(imageA, imageB);
// Prune matches using the epipolar constraint
List<AssociatedPair> inliers = new ArrayList<>();
DMatrixRMaj F = ExampleFundamentalMatrix.robustFundamental(matches, inliers);
// display the inlier matches found using the robust estimator
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(inliers);
panel.setImages(imageA, imageB);
ShowImages.showWindow(panel, "Inlier Pairs");
rectify(F, inliers, imageA, imageB);
}
use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class ExampleStereoTwoViewsOneCamera method drawInliers.
/**
* Draw inliers for debugging purposes. Need to convert from normalized to pixel coordinates.
*/
public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeRadial intrinsic, List<AssociatedPair> normalized) {
Point2Transform2_F64 n_to_p = LensDistortionOps.narrow(intrinsic).distort_F64(false, true);
List<AssociatedPair> pixels = new ArrayList<>();
for (AssociatedPair n : normalized) {
AssociatedPair p = new AssociatedPair();
n_to_p.compute(n.p1.x, n.p1.y, p.p1);
n_to_p.compute(n.p2.x, n.p2.y, p.p2);
pixels.add(p);
}
// display the results
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(pixels);
panel.setImages(left, right);
ShowImages.showWindow(panel, "Inlier Features");
}
use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class ExampleStereoTwoViewsOneCamera method main.
public static void main(String[] args) {
// specify location of images and calibration
String calibDir = UtilIO.pathExample("calibration/mono/Sony_DSC-HX5V_Chess/");
String imageDir = UtilIO.pathExample("stereo/");
// Camera parameters
CameraPinholeRadial intrinsic = CalibrationIO.load(new File(calibDir, "intrinsic.yaml"));
// Input images from the camera moving left to right
BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "mono_wall_01.jpg");
BufferedImage origRight = UtilImageIO.loadImage(imageDir, "mono_wall_02.jpg");
// Input images with lens distortion
GrayU8 distortedLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8) null);
GrayU8 distortedRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8) null);
// matched features between the two images
List<AssociatedPair> matchedFeatures = ExampleFundamentalMatrix.computeMatches(origLeft, origRight);
// convert from pixel coordinates into normalized image coordinates
List<AssociatedPair> matchedCalibrated = convertToNormalizedCoordinates(matchedFeatures, intrinsic);
// Robustly estimate camera motion
List<AssociatedPair> inliers = new ArrayList<>();
Se3_F64 leftToRight = estimateCameraMotion(intrinsic, matchedCalibrated, inliers);
drawInliers(origLeft, origRight, intrinsic, inliers);
// Rectify and remove lens distortion for stereo processing
DMatrixRMaj rectifiedK = new DMatrixRMaj(3, 3);
GrayU8 rectifiedLeft = distortedLeft.createSameShape();
GrayU8 rectifiedRight = distortedRight.createSameShape();
rectifyImages(distortedLeft, distortedRight, leftToRight, intrinsic, rectifiedLeft, rectifiedRight, rectifiedK);
// compute disparity
StereoDisparity<GrayS16, GrayF32> disparityAlg = FactoryStereoDisparity.regionSubpixelWta(DisparityAlgorithms.RECT_FIVE, minDisparity, maxDisparity, 5, 5, 20, 1, 0.1, GrayS16.class);
// Apply the Laplacian across the image to add extra resistance to changes in lighting or camera gain
GrayS16 derivLeft = new GrayS16(rectifiedLeft.width, rectifiedLeft.height);
GrayS16 derivRight = new GrayS16(rectifiedLeft.width, rectifiedLeft.height);
LaplacianEdge.process(rectifiedLeft, derivLeft);
LaplacianEdge.process(rectifiedRight, derivRight);
// process and return the results
disparityAlg.process(derivLeft, derivRight);
GrayF32 disparity = disparityAlg.getDisparity();
// show results
BufferedImage visualized = VisualizeImageData.disparity(disparity, null, minDisparity, maxDisparity, 0);
BufferedImage outLeft = ConvertBufferedImage.convertTo(rectifiedLeft, null);
BufferedImage outRight = ConvertBufferedImage.convertTo(rectifiedRight, null);
ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification");
ShowImages.showWindow(visualized, "Disparity");
showPointCloud(disparity, outLeft, leftToRight, rectifiedK, minDisparity, maxDisparity);
System.out.println("Total found " + matchedCalibrated.size());
System.out.println("Total Inliers " + inliers.size());
}
use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class TestDistanceHomographySq method createRandomData.
@Override
public AssociatedPair createRandomData() {
Point2D_F64 p1 = new Point2D_F64(rand.nextGaussian(), rand.nextGaussian());
Point2D_F64 p2 = new Point2D_F64(rand.nextGaussian(), rand.nextGaussian());
return new AssociatedPair(p1, p2, false);
}
Aggregations