use of boofcv.abst.feature.detect.interest.ConfigGeneralDetector in project BoofCV by lessthanoptimal.
the class FactoryDetectPoint method createFast.
/**
* Creates a Fast corner detector.
*
* @param configFast Configuration for FAST feature detector
* @param configDetector Configuration for feature extractor.
* @param imageType Type of input image.
* @see FastCornerIntensity
*/
@SuppressWarnings("UnnecessaryLocalVariable")
public static <T extends ImageGray<T>, D extends ImageGray<D>> GeneralFeatureDetector<T, D> createFast(@Nullable ConfigFast configFast, ConfigGeneralDetector configDetector, Class<T> imageType) {
if (configFast == null)
configFast = new ConfigFast();
configFast.checkValidity();
ConfigGeneralDetector d = configDetector;
FastCornerIntensity<T> alg = FactoryIntensityPointAlg.fast(configFast.pixelTol, configFast.minContinuous, imageType);
GeneralFeatureIntensity<T, D> intensity = new WrapperFastCornerIntensity<>(alg);
ConfigGeneralDetector configExtract = new ConfigGeneralDetector(d.maxFeatures, d.radius, d.threshold, 0, true, false, true);
return createGeneral(intensity, configExtract);
}
use of boofcv.abst.feature.detect.interest.ConfigGeneralDetector in project BoofCV by lessthanoptimal.
the class FactoryDetectPoint method createGeneral.
public static <T extends ImageGray<T>, D extends ImageGray<D>> GeneralFeatureDetector<T, D> createGeneral(GeneralFeatureIntensity<T, D> intensity, ConfigGeneralDetector config) {
// create a copy since it's going to modify the detector config
ConfigGeneralDetector foo = new ConfigGeneralDetector();
foo.setTo(config);
config = foo;
config.ignoreBorder += config.radius;
NonMaxSuppression extractor = FactoryFeatureExtractor.nonmax(config);
GeneralFeatureDetector<T, D> det = new GeneralFeatureDetector<>(intensity, extractor);
det.setMaxFeatures(config.maxFeatures);
return det;
}
use of boofcv.abst.feature.detect.interest.ConfigGeneralDetector in project BoofCV by lessthanoptimal.
the class ExampleDetectDescribe method createFromComponents.
/**
* Any arbitrary implementation of InterestPointDetector, OrientationImage, DescribeRegionPoint
* can be combined into DetectDescribePoint. The syntax is more complex, but the end result is more flexible.
* This should only be done if there isn't a pre-made DetectDescribePoint.
*/
public static <T extends ImageGray<T>, TD extends TupleDesc> DetectDescribePoint<T, TD> createFromComponents(Class<T> imageType) {
// create a corner detector
Class derivType = GImageDerivativeOps.getDerivativeType(imageType);
GeneralFeatureDetector corner = FactoryDetectPoint.createShiTomasi(new ConfigGeneralDetector(1000, 5, 1), false, derivType);
InterestPointDetector detector = FactoryInterestPoint.wrapPoint(corner, 1, imageType, derivType);
// describe points using BRIEF
DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(new ConfigBrief(true), imageType);
// NOTE: orientation will not be estimated
return FactoryDetectDescribe.fuseTogether(detector, null, describe);
}
use of boofcv.abst.feature.detect.interest.ConfigGeneralDetector in project BoofCV by lessthanoptimal.
the class VisualizeDepthVisualOdometryApp method changeSelectedAlgortihm.
private void changeSelectedAlgortihm(int whichAlg) {
this.whichAlg = whichAlg;
AlgType prevAlgType = this.algType;
Class imageType = GrayU8.class;
Class derivType = GImageDerivativeOps.getDerivativeType(imageType);
DepthSparse3D<GrayU16> sparseDepth = new DepthSparse3D.I<>(1e-3);
PkltConfig pkltConfig = new PkltConfig();
pkltConfig.templateRadius = 3;
pkltConfig.pyramidScaling = new int[] { 1, 2, 4, 8 };
algType = AlgType.UNKNOWN;
if (whichAlg == 0) {
algType = AlgType.FEATURE;
ConfigGeneralDetector configDetector = new ConfigGeneralDetector(600, 3, 1);
PointTrackerTwoPass tracker = FactoryPointTrackerTwoPass.klt(pkltConfig, configDetector, imageType, derivType);
alg = FactoryVisualOdometry.depthDepthPnP(1.5, 120, 2, 200, 50, false, sparseDepth, tracker, imageType, GrayU16.class);
} else if (whichAlg == 1) {
algType = AlgType.FEATURE;
ConfigGeneralDetector configExtract = new ConfigGeneralDetector(600, 3, 1);
GeneralFeatureDetector detector = FactoryPointTracker.createShiTomasi(configExtract, derivType);
DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(null, imageType);
ScoreAssociateHamming_B score = new ScoreAssociateHamming_B();
AssociateDescription2D<TupleDesc_B> associate = new AssociateDescTo2D<>(FactoryAssociation.greedy(score, 150, true));
PointTrackerTwoPass tracker = FactoryPointTrackerTwoPass.dda(detector, describe, associate, null, 1, imageType);
alg = FactoryVisualOdometry.depthDepthPnP(1.5, 80, 3, 200, 50, false, sparseDepth, tracker, imageType, GrayU16.class);
} else if (whichAlg == 2) {
algType = AlgType.FEATURE;
PointTracker tracker = FactoryPointTracker.combined_ST_SURF_KLT(new ConfigGeneralDetector(600, 3, 1), pkltConfig, 50, null, null, imageType, derivType);
PointTrackerTwoPass twopass = new PointTrackerToTwoPass<>(tracker);
alg = FactoryVisualOdometry.depthDepthPnP(1.5, 120, 3, 200, 50, false, sparseDepth, twopass, imageType, GrayU16.class);
} else if (whichAlg == 3) {
algType = AlgType.DIRECT;
alg = FactoryVisualOdometry.depthDirect(sparseDepth, ImageType.pl(3, GrayF32.class), GrayU16.class);
} else {
throw new RuntimeException("Unknown selection");
}
if (algType != prevAlgType) {
switch(prevAlgType) {
case FEATURE:
mainPanel.remove(featurePanel);
break;
case DIRECT:
mainPanel.remove(directPanel);
break;
default:
mainPanel.remove(algorithmPanel);
break;
}
switch(algType) {
case FEATURE:
mainPanel.add(featurePanel, BorderLayout.NORTH);
break;
case DIRECT:
mainPanel.add(directPanel, BorderLayout.NORTH);
break;
default:
mainPanel.add(algorithmPanel, BorderLayout.NORTH);
break;
}
mainPanel.invalidate();
}
setImageTypes(alg.getVisualType(), ImageType.single(alg.getDepthType()));
}
use of boofcv.abst.feature.detect.interest.ConfigGeneralDetector in project BoofCV by lessthanoptimal.
the class VisualizeMonocularPlaneVisualOdometryApp method createVisualOdometry.
private MonocularPlaneVisualOdometry<I> createVisualOdometry(int whichAlg) {
Class derivType = GImageDerivativeOps.getDerivativeType(imageClass);
if (whichAlg == 0) {
PkltConfig config = new PkltConfig();
config.pyramidScaling = new int[] { 1, 2, 4, 8 };
config.templateRadius = 3;
ConfigGeneralDetector configDetector = new ConfigGeneralDetector(600, 3, 1);
PointTracker<I> tracker = FactoryPointTracker.klt(config, configDetector, imageClass, derivType);
return FactoryVisualOdometry.monoPlaneInfinity(75, 2, 1.5, 200, tracker, imageType);
} else if (whichAlg == 1) {
PkltConfig config = new PkltConfig();
config.pyramidScaling = new int[] { 1, 2, 4, 8 };
config.templateRadius = 3;
ConfigGeneralDetector configDetector = new ConfigGeneralDetector(600, 3, 1);
PointTracker<I> tracker = FactoryPointTracker.klt(config, configDetector, imageClass, derivType);
double cellSize = 0.06;
double inlierGroundTol = 1.5;
return FactoryVisualOdometry.monoPlaneOverhead(cellSize, 25, 0.7, inlierGroundTol, 300, 2, 100, 0.5, 0.6, tracker, imageType);
} else {
throw new RuntimeException("Unknown selection");
}
}
Aggregations