use of boofcv.struct.feature.AssociatedIndex in project BoofCV by lessthanoptimal.
the class CompareConvertedDescriptionsApp method visualize.
public static <TD extends TupleDesc> void visualize(String title, BufferedImage image1, BufferedImage image2, InterestPointDetector<GrayF32> detector, DescribeRegionPoint<GrayF32, TD> describe, ScoreAssociation<TD> scorer) {
AssociateDescription<TD> assoc = FactoryAssociation.greedy(scorer, Double.MAX_VALUE, false);
List<Point2D_F64> locationSrc = new ArrayList<>();
List<Point2D_F64> locationDst = new ArrayList<>();
GrayF32 input1 = ConvertBufferedImage.convertFrom(image1, (GrayF32) null);
GrayF32 input2 = ConvertBufferedImage.convertFrom(image2, (GrayF32) null);
FastQueue<TD> listSrc = describeImage(input1, detector, describe, locationSrc);
FastQueue<TD> listDst = describeImage(input2, detector, describe, locationDst);
assoc.setSource(listSrc);
assoc.setDestination(listDst);
assoc.associate();
FastQueue<AssociatedIndex> matches = assoc.getMatches();
AssociationPanel panel = new AssociationPanel(20);
panel.setImages(image1, image2);
panel.setAssociation(locationSrc, locationDst, matches);
ShowImages.showWindow(panel, title);
}
use of boofcv.struct.feature.AssociatedIndex in project BoofCV by lessthanoptimal.
the class DetectDescribeAssociate method updateTrackState.
/**
* Update each track's location and description (if configured to do so) mark tracks as being associated.
*/
protected void updateTrackState(FastQueue<AssociatedIndex> matches) {
// update tracks
for (int i = 0; i < matches.size; i++) {
AssociatedIndex indexes = matches.data[i];
PointTrack track = tracksAll.get(indexes.src);
Point2D_F64 loc = locDst.data[indexes.dst];
track.set(loc.x, loc.y);
tracksActive.add(track);
// update the description
if (updateDescription) {
((Desc) track.getDescription()).setTo(featDst.get(indexes.dst));
}
isAssociated[indexes.src] = true;
}
}
use of boofcv.struct.feature.AssociatedIndex in project BoofCV by lessthanoptimal.
the class DetectDescribeAssociateTwoPass method updateTrackLocation.
/**
* Update each track's location only and not its description. Update the active list too
*/
protected void updateTrackLocation(FastQueue<AssociatedIndex> matches) {
tracksActive.clear();
for (int i = 0; i < matches.size; i++) {
AssociatedIndex indexes = matches.data[i];
PointTrack track = tracksAll.get(indexes.src);
Point2D_F64 loc = locDst.data[indexes.dst];
track.set(loc.x, loc.y);
tracksActive.add(track);
}
this.matches = matches;
}
use of boofcv.struct.feature.AssociatedIndex in project BoofCV by lessthanoptimal.
the class ExampleMultiviewSceneReconstruction method initialize.
/**
* Initialize the 3D world given these two images. imageA is assumed to be the origin of the world.
*/
private void initialize(int imageA, int imageB) {
System.out.println("Initializing 3D world using " + imageA + " and " + imageB);
// Compute the 3D pose and find valid image features
Se3_F64 motionAtoB = new Se3_F64();
List<AssociatedIndex> inliers = new ArrayList<>();
if (!estimateStereoPose(imageA, imageB, motionAtoB, inliers))
throw new RuntimeException("The first image pair is a bad keyframe!");
motionWorldToCamera[imageB].set(motionAtoB);
estimatedImage[imageB] = true;
processedImage[imageB] = true;
// create tracks for only those features in the inlier list
FastQueue<Point2D_F64> pixelsA = imagePixels.get(imageA);
FastQueue<Point2D_F64> pixelsB = imagePixels.get(imageB);
List<Feature3D> tracksA = imageFeature3D.get(imageA);
List<Feature3D> tracksB = imageFeature3D.get(imageB);
GrowQueue_I32 colorsA = imageColors.get(imageA);
for (int i = 0; i < inliers.size(); i++) {
AssociatedIndex a = inliers.get(i);
Feature3D t = new Feature3D();
t.color = colorsA.get(a.src);
t.obs.grow().set(pixelsA.get(a.src));
t.obs.grow().set(pixelsB.get(a.dst));
t.frame.add(imageA);
t.frame.add(imageB);
// compute the 3D coordinate of the feature
Point2D_F64 pa = pixelsA.get(a.src);
Point2D_F64 pb = pixelsB.get(a.dst);
if (!triangulate.triangulate(pa, pb, motionAtoB, t.worldPt))
continue;
// the feature has to be in front of the camera
if (t.worldPt.z > 0) {
featuresAll.add(t);
tracksA.add(t);
tracksB.add(t);
}
}
// adjust the scale so that it's not excessively large or small
normalizeScale(motionWorldToCamera[imageB], tracksA);
}
use of boofcv.struct.feature.AssociatedIndex in project BoofCV by lessthanoptimal.
the class TestAssociateStereo2D method positive.
/**
* Very simple positive case with only a perfect observation and descriptor
*/
@Test
public void positive() {
Point3D_F64 X = new Point3D_F64(0.02, -0.5, 3);
SfmTestHelper.renderPointPixel(param, X, leftP, rightP);
pointsLeft.grow().set(leftP);
pointsRight.grow().set(rightP);
descLeft.grow();
descRight.grow();
AssociateStereo2D<TupleDesc_F64> alg = new AssociateStereo2D<>(scorer, 0.5, TupleDesc_F64.class);
alg.setCalibration(param);
alg.setSource(pointsLeft, descLeft);
alg.setDestination(pointsRight, descRight);
alg.associate();
FastQueue<AssociatedIndex> matches = alg.getMatches();
assertEquals(1, matches.size);
}
Aggregations