use of boofcv.struct.feature.TupleDesc in project BoofCV by lessthanoptimal.
the class VisualizeRegionDescriptionApp method updateTargetDescription.
/**
* Extracts the target description and updates the panel. Should only be called from a swing thread
*/
private void updateTargetDescription() {
if (targetPt != null) {
TupleDesc feature = describe.createDescription();
describe.process(targetPt.x, targetPt.y, targetOrientation, targetRadius, feature);
tuplePanel.setDescription(feature);
} else {
tuplePanel.setDescription(null);
}
tuplePanel.repaint();
}
use of boofcv.struct.feature.TupleDesc in project BoofCV by lessthanoptimal.
the class TupleDescPanel method paintComponent.
@Override
public synchronized void paintComponent(Graphics g) {
super.paintComponent(g);
Graphics2D g2 = (Graphics2D) g;
@Nullable TupleDesc desc = this.desc;
if (desc == null) {
g2.setColor(Color.WHITE);
g2.fillRect(0, 0, getWidth(), getHeight());
} else {
int h = getHeight();
int w = getWidth();
int m = h / 2;
int[] x = new int[desc.size()];
int[] y = new int[desc.size()];
// find the maximum magnitude of any of the elements
double max = 0;
for (int i = 0; i < desc.size(); i++) {
double d = desc.getDouble(i);
if (max < Math.abs(d)) {
max = Math.abs(d);
}
}
// draw a normalized histogram plot
double stepX = 1.0 / desc.size();
for (int i = 0; i < desc.size(); i++) {
x[i] = (int) (w * i * stepX);
y[i] = (int) ((m * desc.getDouble(i) / max) + m);
}
g2.setColor(Color.GRAY);
g2.drawLine(0, m, w, m);
g2.setStroke(new BasicStroke(2));
g2.setColor(Color.RED);
g2.drawPolyline(x, y, x.length);
// print out the magnitude
g2.setColor(Color.BLACK);
String s = String.format("%4.1e", max);
g2.drawString(s, 0, 20);
g2.setColor(Color.BLUE);
for (int i = 0; i < desc.size(); i++) {
int r = 1;
w = r * 2 + 1;
g2.fillOval(x[i] - r, y[i] - r, w, w);
}
}
}
use of boofcv.struct.feature.TupleDesc in project BoofCV by lessthanoptimal.
the class VisualizeAssociationMatchesApp method extractImageFeatures.
private void extractImageFeatures(Planar<T> color, T gray, FastQueue<TupleDesc> descs, List<Point2D_F64> locs) {
detector.detect(gray);
if (describe.getImageType().getFamily() == ImageType.Family.GRAY)
describe.setImage(gray);
else
describe.setImage(color);
orientation.setImage(gray);
if (detector.hasScale()) {
for (int i = 0; i < detector.getNumberOfFeatures(); i++) {
double yaw = 0;
Point2D_F64 pt = detector.getLocation(i);
double radius = detector.getRadius(i);
if (describe.requiresOrientation()) {
orientation.setObjectRadius(radius);
yaw = orientation.compute(pt.x, pt.y);
}
TupleDesc d = descs.grow();
if (describe.process(pt.x, pt.y, yaw, radius, d)) {
locs.add(pt.copy());
} else {
descs.removeTail();
}
}
} else {
orientation.setObjectRadius(10);
for (int i = 0; i < detector.getNumberOfFeatures(); i++) {
double yaw = 0;
Point2D_F64 pt = detector.getLocation(i);
if (describe.requiresOrientation()) {
yaw = orientation.compute(pt.x, pt.y);
}
TupleDesc d = descs.grow();
if (describe.process(pt.x, pt.y, yaw, 1, d)) {
locs.add(pt.copy());
} else {
descs.removeTail();
}
}
}
}
use of boofcv.struct.feature.TupleDesc in project BoofCV by lessthanoptimal.
the class FactoryVisualOdometry method stereoQuadPnP.
/**
* Stereo visual odometry which uses the two most recent stereo observations (total of four views) to estimate
* motion.
*
* @see VisOdomQuadPnP
*
* @param inlierPixelTol Pixel tolerance for RANSAC inliers - Euclidean distance
* @param epipolarPixelTol Feature association tolerance in pixels.
* @param maxDistanceF2F Maximum allowed distance between two features in pixels
* @param maxAssociationError Maxium error between two features when associating.
* @param ransacIterations Number of iterations RANSAC will perform
* @param refineIterations Number of refinement iterations
* @param detector Which feature detector to use
* @param imageType Type of input image
*/
public static <T extends ImageGray<T>, Desc extends TupleDesc> StereoVisualOdometry<T> stereoQuadPnP(double inlierPixelTol, double epipolarPixelTol, double maxDistanceF2F, double maxAssociationError, int ransacIterations, int refineIterations, DetectDescribeMulti<T, Desc> detector, Class<T> imageType) {
EstimateNofPnP pnp = FactoryMultiView.computePnP_N(EnumPNP.P3P_FINSTERWALDER, -1);
DistanceModelMonoPixels<Se3_F64, Point2D3D> distanceMono = new PnPDistanceReprojectionSq();
PnPStereoDistanceReprojectionSq distanceStereo = new PnPStereoDistanceReprojectionSq();
PnPStereoEstimator pnpStereo = new PnPStereoEstimator(pnp, distanceMono, 0);
ModelManagerSe3_F64 manager = new ModelManagerSe3_F64();
EstimatorToGenerator<Se3_F64, Stereo2D3D> generator = new EstimatorToGenerator<>(pnpStereo);
// euclidean error squared from left + right images
double ransacTOL = 2 * inlierPixelTol * inlierPixelTol;
ModelMatcher<Se3_F64, Stereo2D3D> motion = new Ransac<>(2323, manager, generator, distanceStereo, ransacIterations, ransacTOL);
RefinePnPStereo refinePnP = null;
if (refineIterations > 0) {
refinePnP = new PnPStereoRefineRodrigues(1e-12, refineIterations);
}
Class<Desc> descType = detector.getDescriptionType();
ScoreAssociation<Desc> scorer = FactoryAssociation.defaultScore(descType);
AssociateDescription2D<Desc> assocSame;
if (maxDistanceF2F > 0)
assocSame = new AssociateMaxDistanceNaive<>(scorer, true, maxAssociationError, maxDistanceF2F);
else
assocSame = new AssociateDescTo2D<>(FactoryAssociation.greedy(scorer, maxAssociationError, true));
AssociateStereo2D<Desc> associateStereo = new AssociateStereo2D<>(scorer, epipolarPixelTol, descType);
TriangulateTwoViewsCalibrated triangulate = FactoryMultiView.triangulateTwoGeometric();
associateStereo.setThreshold(maxAssociationError);
VisOdomQuadPnP<T, Desc> alg = new VisOdomQuadPnP<>(detector, assocSame, associateStereo, triangulate, motion, refinePnP);
return new WrapVisOdomQuadPnP<>(alg, refinePnP, associateStereo, distanceStereo, distanceMono, imageType);
}
use of boofcv.struct.feature.TupleDesc in project BoofCV by lessthanoptimal.
the class FactoryVisualOdometry method stereoDualTrackerPnP.
/**
* Creates a stereo visual odometry algorithm that independently tracks features in left and right camera.
*
* @see VisOdomDualTrackPnP
*
* @param thresholdAdd When the number of inliers is below this number new features are detected
* @param thresholdRetire When a feature has not been in the inlier list for this many ticks it is dropped
* @param inlierPixelTol Tolerance in pixels for defining an inlier during robust model matching. Typically 1.5
* @param epipolarPixelTol Tolerance in pixels for enforcing the epipolar constraint
* @param ransacIterations Number of iterations performed by RANSAC. Try 300 or more.
* @param refineIterations Number of iterations done during non-linear optimization. Try 50 or more.
* @param trackerLeft Tracker used for left camera
* @param trackerRight Tracker used for right camera
* @param imageType Type of image being processed
* @return Stereo visual odometry algorithm.
*/
public static <T extends ImageGray<T>, Desc extends TupleDesc> StereoVisualOdometry<T> stereoDualTrackerPnP(int thresholdAdd, int thresholdRetire, double inlierPixelTol, double epipolarPixelTol, int ransacIterations, int refineIterations, PointTracker<T> trackerLeft, PointTracker<T> trackerRight, DescribeRegionPoint<T, Desc> descriptor, Class<T> imageType) {
EstimateNofPnP pnp = FactoryMultiView.computePnP_N(EnumPNP.P3P_FINSTERWALDER, -1);
DistanceModelMonoPixels<Se3_F64, Point2D3D> distanceMono = new PnPDistanceReprojectionSq();
PnPStereoDistanceReprojectionSq distanceStereo = new PnPStereoDistanceReprojectionSq();
PnPStereoEstimator pnpStereo = new PnPStereoEstimator(pnp, distanceMono, 0);
ModelManagerSe3_F64 manager = new ModelManagerSe3_F64();
EstimatorToGenerator<Se3_F64, Stereo2D3D> generator = new EstimatorToGenerator<>(pnpStereo);
// Pixel tolerance for RANSAC inliers - euclidean error squared from left + right images
double ransacTOL = 2 * inlierPixelTol * inlierPixelTol;
ModelMatcher<Se3_F64, Stereo2D3D> motion = new Ransac<>(2323, manager, generator, distanceStereo, ransacIterations, ransacTOL);
RefinePnPStereo refinePnP = null;
Class<Desc> descType = descriptor.getDescriptionType();
ScoreAssociation<Desc> scorer = FactoryAssociation.defaultScore(descType);
AssociateStereo2D<Desc> associateStereo = new AssociateStereo2D<>(scorer, epipolarPixelTol, descType);
// need to make sure associations are unique
AssociateDescription2D<Desc> associateUnique = associateStereo;
if (!associateStereo.uniqueDestination() || !associateStereo.uniqueSource()) {
associateUnique = new EnforceUniqueByScore.Describe2D<>(associateStereo, true, true);
}
if (refineIterations > 0) {
refinePnP = new PnPStereoRefineRodrigues(1e-12, refineIterations);
}
TriangulateTwoViewsCalibrated triangulate = FactoryMultiView.triangulateTwoGeometric();
VisOdomDualTrackPnP<T, Desc> alg = new VisOdomDualTrackPnP<>(thresholdAdd, thresholdRetire, epipolarPixelTol, trackerLeft, trackerRight, descriptor, associateUnique, triangulate, motion, refinePnP);
return new WrapVisOdomDualTrackPnP<>(pnpStereo, distanceMono, distanceStereo, associateStereo, alg, refinePnP, imageType);
}
Aggregations