use of boofcv.abst.feature.describe.DescribeRegionPoint in project BoofCV by lessthanoptimal.
the class CompareConvertedDescriptionsApp method describeImage.
public static <TD extends TupleDesc> FastQueue<TD> describeImage(GrayF32 input, InterestPointDetector<GrayF32> detector, DescribeRegionPoint<GrayF32, TD> describe, List<Point2D_F64> location) {
FastQueue<TD> list = new FastQueue<>(100, describe.getDescriptionType(), false);
System.out.println("Detecting");
detector.detect(input);
System.out.println("Describing");
describe.setImage(input);
for (int i = 0; i < detector.getNumberOfFeatures(); i++) {
Point2D_F64 p = detector.getLocation(i);
double radius = detector.getRadius(i);
double ori = detector.getOrientation(i);
TD d = describe.createDescription();
if (describe.process(p.x, p.y, ori, radius, d)) {
list.add(d);
location.add(p.copy());
}
}
return list;
}
use of boofcv.abst.feature.describe.DescribeRegionPoint in project BoofCV by lessthanoptimal.
the class TestDetectDescribeFusion method checkFeatureNotInBounds.
/**
* If a feature is not in bounds make sure everything is handled correctly
*/
@Test
public void checkFeatureNotInBounds() {
InterestPointDetector detector = new DummyDetector();
DescribeRegionPoint describe = new DummyRegionPoint();
DetectDescribeFusion alg = new DetectDescribeFusion(detector, null, describe);
alg.detect(new GrayF32(2, 2));
// one feature should not be inside the image
assertEquals(9, alg.getNumberOfFeatures());
for (int i = 0; i < 9; i++) {
assertEquals(2, alg.getRadius(i), 1e-8);
assertEquals(1, alg.getOrientation(i), 1e-8);
assertTrue(alg.getDescription(i) != null);
assertTrue(alg.getLocation(i) != null);
}
}
use of boofcv.abst.feature.describe.DescribeRegionPoint in project BoofCV by lessthanoptimal.
the class TestWrapVisOdomQuadPnP method createAlgorithm.
@Override
public StereoVisualOdometry<GrayF32> createAlgorithm() {
GeneralFeatureIntensity intensity = FactoryIntensityPoint.shiTomasi(1, false, GrayF32.class);
NonMaxSuppression nonmax = FactoryFeatureExtractor.nonmax(new ConfigExtract(2, 1, 0, true, false, true));
GeneralFeatureDetector<GrayF32, GrayF32> general = new GeneralFeatureDetector<>(intensity, nonmax);
general.setMaxFeatures(600);
DetectorInterestPointMulti detector = new GeneralToInterestMulti(general, 2, GrayF32.class, GrayF32.class);
DescribeRegionPoint describe = FactoryDescribeRegionPoint.surfFast(null, GrayF32.class);
DetectDescribeMulti detDescMulti = new DetectDescribeMultiFusion(detector, null, describe);
return FactoryVisualOdometry.stereoQuadPnP(1.5, 0.5, 200, Double.MAX_VALUE, 300, 50, detDescMulti, GrayF32.class);
}
use of boofcv.abst.feature.describe.DescribeRegionPoint in project BoofCV by lessthanoptimal.
the class ExampleDetectDescribe method createFromComponents.
/**
* Any arbitrary implementation of InterestPointDetector, OrientationImage, DescribeRegionPoint
* can be combined into DetectDescribePoint. The syntax is more complex, but the end result is more flexible.
* This should only be done if there isn't a pre-made DetectDescribePoint.
*/
public static <T extends ImageGray<T>, TD extends TupleDesc> DetectDescribePoint<T, TD> createFromComponents(Class<T> imageType) {
// create a corner detector
Class derivType = GImageDerivativeOps.getDerivativeType(imageType);
GeneralFeatureDetector corner = FactoryDetectPoint.createShiTomasi(new ConfigGeneralDetector(1000, 5, 1), false, derivType);
InterestPointDetector detector = FactoryInterestPoint.wrapPoint(corner, 1, imageType, derivType);
// describe points using BRIEF
DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(new ConfigBrief(true), imageType);
// NOTE: orientation will not be estimated
return FactoryDetectDescribe.fuseTogether(detector, null, describe);
}
use of boofcv.abst.feature.describe.DescribeRegionPoint in project BoofCV by lessthanoptimal.
the class VisualizeDepthVisualOdometryApp method changeSelectedAlgortihm.
private void changeSelectedAlgortihm(int whichAlg) {
this.whichAlg = whichAlg;
AlgType prevAlgType = this.algType;
Class imageType = GrayU8.class;
Class derivType = GImageDerivativeOps.getDerivativeType(imageType);
DepthSparse3D<GrayU16> sparseDepth = new DepthSparse3D.I<>(1e-3);
PkltConfig pkltConfig = new PkltConfig();
pkltConfig.templateRadius = 3;
pkltConfig.pyramidScaling = new int[] { 1, 2, 4, 8 };
algType = AlgType.UNKNOWN;
if (whichAlg == 0) {
algType = AlgType.FEATURE;
ConfigGeneralDetector configDetector = new ConfigGeneralDetector(600, 3, 1);
PointTrackerTwoPass tracker = FactoryPointTrackerTwoPass.klt(pkltConfig, configDetector, imageType, derivType);
alg = FactoryVisualOdometry.depthDepthPnP(1.5, 120, 2, 200, 50, false, sparseDepth, tracker, imageType, GrayU16.class);
} else if (whichAlg == 1) {
algType = AlgType.FEATURE;
ConfigGeneralDetector configExtract = new ConfigGeneralDetector(600, 3, 1);
GeneralFeatureDetector detector = FactoryPointTracker.createShiTomasi(configExtract, derivType);
DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(null, imageType);
ScoreAssociateHamming_B score = new ScoreAssociateHamming_B();
AssociateDescription2D<TupleDesc_B> associate = new AssociateDescTo2D<>(FactoryAssociation.greedy(score, 150, true));
PointTrackerTwoPass tracker = FactoryPointTrackerTwoPass.dda(detector, describe, associate, null, 1, imageType);
alg = FactoryVisualOdometry.depthDepthPnP(1.5, 80, 3, 200, 50, false, sparseDepth, tracker, imageType, GrayU16.class);
} else if (whichAlg == 2) {
algType = AlgType.FEATURE;
PointTracker tracker = FactoryPointTracker.combined_ST_SURF_KLT(new ConfigGeneralDetector(600, 3, 1), pkltConfig, 50, null, null, imageType, derivType);
PointTrackerTwoPass twopass = new PointTrackerToTwoPass<>(tracker);
alg = FactoryVisualOdometry.depthDepthPnP(1.5, 120, 3, 200, 50, false, sparseDepth, twopass, imageType, GrayU16.class);
} else if (whichAlg == 3) {
algType = AlgType.DIRECT;
alg = FactoryVisualOdometry.depthDirect(sparseDepth, ImageType.pl(3, GrayF32.class), GrayU16.class);
} else {
throw new RuntimeException("Unknown selection");
}
if (algType != prevAlgType) {
switch(prevAlgType) {
case FEATURE:
mainPanel.remove(featurePanel);
break;
case DIRECT:
mainPanel.remove(directPanel);
break;
default:
mainPanel.remove(algorithmPanel);
break;
}
switch(algType) {
case FEATURE:
mainPanel.add(featurePanel, BorderLayout.NORTH);
break;
case DIRECT:
mainPanel.add(directPanel, BorderLayout.NORTH);
break;
default:
mainPanel.add(algorithmPanel, BorderLayout.NORTH);
break;
}
mainPanel.invalidate();
}
setImageTypes(alg.getVisualType(), ImageType.single(alg.getDepthType()));
}
Aggregations