Search in sources :

Example 1 with EstimateNofPnP

use of boofcv.abst.geo.EstimateNofPnP in project BoofCV by lessthanoptimal.

the class TestPnPStereoEstimator method perfectData.

private void perfectData(int numExtra) {
    EstimateNofPnP pnp = FactoryMultiView.computePnP_N(EnumPNP.P3P_FINSTERWALDER, -1);
    DistanceModelMonoPixels<Se3_F64, Point2D3D> distanceMono = new PnPDistanceReprojectionSq();
    PnPStereoEstimator alg = new PnPStereoEstimator(pnp, distanceMono, numExtra);
    Se3_F64 expected = new Se3_F64();
    expected.getR().set(ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, 0.05, -0.03, 0.02, null));
    expected.getT().set(0.2, -0.1, 0.01);
    generateScene(alg.getMinimumPoints(), expected, false);
    Se3_F64 found = new Se3_F64();
    alg.setLeftToRight(leftToRight);
    assertTrue(alg.process(pointPose, found));
    // found.print();
    // expected.print();
    assertTrue(MatrixFeatures_DDRM.isIdentical(expected.getR(), found.getR(), 1e-8));
    assertTrue(found.getT().isIdentical(expected.getT(), 1e-8));
}
Also used : Point2D3D(boofcv.struct.geo.Point2D3D) EstimateNofPnP(boofcv.abst.geo.EstimateNofPnP) Se3_F64(georegression.struct.se.Se3_F64)

Example 2 with EstimateNofPnP

use of boofcv.abst.geo.EstimateNofPnP in project BoofCV by lessthanoptimal.

the class FactoryVisualOdometry method stereoQuadPnP.

/**
 * Stereo visual odometry which uses the two most recent stereo observations (total of four views) to estimate
 * motion.
 *
 * @see VisOdomQuadPnP
 *
 * @param inlierPixelTol Pixel tolerance for RANSAC inliers - Euclidean distance
 * @param epipolarPixelTol Feature association tolerance in pixels.
 * @param maxDistanceF2F Maximum allowed distance between two features in pixels
 * @param maxAssociationError Maxium error between two features when associating.
 * @param ransacIterations Number of iterations RANSAC will perform
 * @param refineIterations Number of refinement iterations
 * @param detector Which feature detector to use
 * @param imageType Type of input image
 */
public static <T extends ImageGray<T>, Desc extends TupleDesc> StereoVisualOdometry<T> stereoQuadPnP(double inlierPixelTol, double epipolarPixelTol, double maxDistanceF2F, double maxAssociationError, int ransacIterations, int refineIterations, DetectDescribeMulti<T, Desc> detector, Class<T> imageType) {
    EstimateNofPnP pnp = FactoryMultiView.computePnP_N(EnumPNP.P3P_FINSTERWALDER, -1);
    DistanceModelMonoPixels<Se3_F64, Point2D3D> distanceMono = new PnPDistanceReprojectionSq();
    PnPStereoDistanceReprojectionSq distanceStereo = new PnPStereoDistanceReprojectionSq();
    PnPStereoEstimator pnpStereo = new PnPStereoEstimator(pnp, distanceMono, 0);
    ModelManagerSe3_F64 manager = new ModelManagerSe3_F64();
    EstimatorToGenerator<Se3_F64, Stereo2D3D> generator = new EstimatorToGenerator<>(pnpStereo);
    // euclidean error squared from left + right images
    double ransacTOL = 2 * inlierPixelTol * inlierPixelTol;
    ModelMatcher<Se3_F64, Stereo2D3D> motion = new Ransac<>(2323, manager, generator, distanceStereo, ransacIterations, ransacTOL);
    RefinePnPStereo refinePnP = null;
    if (refineIterations > 0) {
        refinePnP = new PnPStereoRefineRodrigues(1e-12, refineIterations);
    }
    Class<Desc> descType = detector.getDescriptionType();
    ScoreAssociation<Desc> scorer = FactoryAssociation.defaultScore(descType);
    AssociateDescription2D<Desc> assocSame;
    if (maxDistanceF2F > 0)
        assocSame = new AssociateMaxDistanceNaive<>(scorer, true, maxAssociationError, maxDistanceF2F);
    else
        assocSame = new AssociateDescTo2D<>(FactoryAssociation.greedy(scorer, maxAssociationError, true));
    AssociateStereo2D<Desc> associateStereo = new AssociateStereo2D<>(scorer, epipolarPixelTol, descType);
    TriangulateTwoViewsCalibrated triangulate = FactoryMultiView.triangulateTwoGeometric();
    associateStereo.setThreshold(maxAssociationError);
    VisOdomQuadPnP<T, Desc> alg = new VisOdomQuadPnP<>(detector, assocSame, associateStereo, triangulate, motion, refinePnP);
    return new WrapVisOdomQuadPnP<>(alg, refinePnP, associateStereo, distanceStereo, distanceMono, imageType);
}
Also used : AssociateMaxDistanceNaive(boofcv.alg.feature.associate.AssociateMaxDistanceNaive) AssociateDescTo2D(boofcv.abst.feature.associate.AssociateDescTo2D) Ransac(org.ddogleg.fitting.modelset.ransac.Ransac) EstimatorToGenerator(boofcv.factory.geo.EstimatorToGenerator) Point2D3D(boofcv.struct.geo.Point2D3D) TupleDesc(boofcv.struct.feature.TupleDesc) EstimateNofPnP(boofcv.abst.geo.EstimateNofPnP) AssociateStereo2D(boofcv.alg.feature.associate.AssociateStereo2D) Stereo2D3D(boofcv.struct.sfm.Stereo2D3D) TriangulateTwoViewsCalibrated(boofcv.abst.geo.TriangulateTwoViewsCalibrated) ModelManagerSe3_F64(georegression.fitting.se.ModelManagerSe3_F64) Se3_F64(georegression.struct.se.Se3_F64) ModelManagerSe3_F64(georegression.fitting.se.ModelManagerSe3_F64)

Example 3 with EstimateNofPnP

use of boofcv.abst.geo.EstimateNofPnP in project BoofCV by lessthanoptimal.

the class FactoryVisualOdometry method stereoDualTrackerPnP.

/**
 * Creates a stereo visual odometry algorithm that independently tracks features in left and right camera.
 *
 * @see VisOdomDualTrackPnP
 *
 * @param thresholdAdd When the number of inliers is below this number new features are detected
 * @param thresholdRetire When a feature has not been in the inlier list for this many ticks it is dropped
 * @param inlierPixelTol Tolerance in pixels for defining an inlier during robust model matching.  Typically 1.5
 * @param epipolarPixelTol Tolerance in pixels for enforcing the epipolar constraint
 * @param ransacIterations Number of iterations performed by RANSAC.  Try 300 or more.
 * @param refineIterations Number of iterations done during non-linear optimization.  Try 50 or more.
 * @param trackerLeft Tracker used for left camera
 * @param trackerRight Tracker used for right camera
 * @param imageType Type of image being processed
 * @return Stereo visual odometry algorithm.
 */
public static <T extends ImageGray<T>, Desc extends TupleDesc> StereoVisualOdometry<T> stereoDualTrackerPnP(int thresholdAdd, int thresholdRetire, double inlierPixelTol, double epipolarPixelTol, int ransacIterations, int refineIterations, PointTracker<T> trackerLeft, PointTracker<T> trackerRight, DescribeRegionPoint<T, Desc> descriptor, Class<T> imageType) {
    EstimateNofPnP pnp = FactoryMultiView.computePnP_N(EnumPNP.P3P_FINSTERWALDER, -1);
    DistanceModelMonoPixels<Se3_F64, Point2D3D> distanceMono = new PnPDistanceReprojectionSq();
    PnPStereoDistanceReprojectionSq distanceStereo = new PnPStereoDistanceReprojectionSq();
    PnPStereoEstimator pnpStereo = new PnPStereoEstimator(pnp, distanceMono, 0);
    ModelManagerSe3_F64 manager = new ModelManagerSe3_F64();
    EstimatorToGenerator<Se3_F64, Stereo2D3D> generator = new EstimatorToGenerator<>(pnpStereo);
    // Pixel tolerance for RANSAC inliers - euclidean error squared from left + right images
    double ransacTOL = 2 * inlierPixelTol * inlierPixelTol;
    ModelMatcher<Se3_F64, Stereo2D3D> motion = new Ransac<>(2323, manager, generator, distanceStereo, ransacIterations, ransacTOL);
    RefinePnPStereo refinePnP = null;
    Class<Desc> descType = descriptor.getDescriptionType();
    ScoreAssociation<Desc> scorer = FactoryAssociation.defaultScore(descType);
    AssociateStereo2D<Desc> associateStereo = new AssociateStereo2D<>(scorer, epipolarPixelTol, descType);
    // need to make sure associations are unique
    AssociateDescription2D<Desc> associateUnique = associateStereo;
    if (!associateStereo.uniqueDestination() || !associateStereo.uniqueSource()) {
        associateUnique = new EnforceUniqueByScore.Describe2D<>(associateStereo, true, true);
    }
    if (refineIterations > 0) {
        refinePnP = new PnPStereoRefineRodrigues(1e-12, refineIterations);
    }
    TriangulateTwoViewsCalibrated triangulate = FactoryMultiView.triangulateTwoGeometric();
    VisOdomDualTrackPnP<T, Desc> alg = new VisOdomDualTrackPnP<>(thresholdAdd, thresholdRetire, epipolarPixelTol, trackerLeft, trackerRight, descriptor, associateUnique, triangulate, motion, refinePnP);
    return new WrapVisOdomDualTrackPnP<>(pnpStereo, distanceMono, distanceStereo, associateStereo, alg, refinePnP, imageType);
}
Also used : Ransac(org.ddogleg.fitting.modelset.ransac.Ransac) EstimatorToGenerator(boofcv.factory.geo.EstimatorToGenerator) Point2D3D(boofcv.struct.geo.Point2D3D) TupleDesc(boofcv.struct.feature.TupleDesc) EstimateNofPnP(boofcv.abst.geo.EstimateNofPnP) AssociateStereo2D(boofcv.alg.feature.associate.AssociateStereo2D) EnforceUniqueByScore(boofcv.abst.feature.associate.EnforceUniqueByScore) Stereo2D3D(boofcv.struct.sfm.Stereo2D3D) TriangulateTwoViewsCalibrated(boofcv.abst.geo.TriangulateTwoViewsCalibrated) ModelManagerSe3_F64(georegression.fitting.se.ModelManagerSe3_F64) Se3_F64(georegression.struct.se.Se3_F64) ModelManagerSe3_F64(georegression.fitting.se.ModelManagerSe3_F64)

Aggregations

EstimateNofPnP (boofcv.abst.geo.EstimateNofPnP)3 Point2D3D (boofcv.struct.geo.Point2D3D)3 Se3_F64 (georegression.struct.se.Se3_F64)3 TriangulateTwoViewsCalibrated (boofcv.abst.geo.TriangulateTwoViewsCalibrated)2 AssociateStereo2D (boofcv.alg.feature.associate.AssociateStereo2D)2 EstimatorToGenerator (boofcv.factory.geo.EstimatorToGenerator)2 TupleDesc (boofcv.struct.feature.TupleDesc)2 Stereo2D3D (boofcv.struct.sfm.Stereo2D3D)2 ModelManagerSe3_F64 (georegression.fitting.se.ModelManagerSe3_F64)2 Ransac (org.ddogleg.fitting.modelset.ransac.Ransac)2 AssociateDescTo2D (boofcv.abst.feature.associate.AssociateDescTo2D)1 EnforceUniqueByScore (boofcv.abst.feature.associate.EnforceUniqueByScore)1 AssociateMaxDistanceNaive (boofcv.alg.feature.associate.AssociateMaxDistanceNaive)1