use of boofcv.abst.feature.detect.interest.ConfigFastHessian in project BoofCV by lessthanoptimal.
the class CompareConvertedDescriptionsApp method main.
public static void main(String[] args) {
String file1 = UtilIO.pathExample("stitch/kayak_01.jpg");
String file2 = UtilIO.pathExample("stitch/kayak_02.jpg");
InterestPointDetector<GrayF32> detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(1, 10, -1, 2, 9, 4, 4));
DescribeRegionPoint<GrayF32, TupleDesc_F64> describeA = (DescribeRegionPoint) FactoryDescribeRegionPoint.surfStable(null, GrayF32.class);
ConvertTupleDesc<TupleDesc_F64, TupleDesc_S8> converter = FactoryConvertTupleDesc.real_F64_S8(describeA.createDescription().size());
DescribeRegionPoint<GrayF32, TupleDesc_S8> describeB = new DescribeRegionPointConvert<>(describeA, converter);
ScoreAssociation<TupleDesc_F64> scoreA = FactoryAssociation.scoreSad(TupleDesc_F64.class);
ScoreAssociation<TupleDesc_S8> scoreB = FactoryAssociation.scoreSad(TupleDesc_S8.class);
BufferedImage image1 = UtilImageIO.loadImage(file1);
BufferedImage image2 = UtilImageIO.loadImage(file2);
visualize("Original", image1, image2, detector, describeA, scoreA);
visualize("Modified", image1, image2, detector, describeB, scoreB);
System.out.println("Done");
}
use of boofcv.abst.feature.detect.interest.ConfigFastHessian in project BoofCV by lessthanoptimal.
the class ExamplePointFeatureTracker method createSURF.
/**
* Creates a SURF feature tracker.
*/
public void createSURF() {
ConfigFastHessian configDetector = new ConfigFastHessian();
configDetector.maxFeaturesPerScale = 250;
configDetector.extractRadius = 3;
configDetector.initialSampleSize = 2;
tracker = FactoryPointTracker.dda_FH_SURF_Fast(configDetector, null, null, imageType);
}
use of boofcv.abst.feature.detect.interest.ConfigFastHessian in project BoofCV by lessthanoptimal.
the class ExampleImageStitching method stitch.
/**
* Given two input images create and display an image where the two have been overlayed on top of each other.
*/
public static <T extends ImageGray<T>> void stitch(BufferedImage imageA, BufferedImage imageB, Class<T> imageType) {
T inputA = ConvertBufferedImage.convertFromSingle(imageA, null, imageType);
T inputB = ConvertBufferedImage.convertFromSingle(imageB, null, imageType);
// Detect using the standard SURF feature descriptor and describer
DetectDescribePoint detDesc = FactoryDetectDescribe.surfStable(new ConfigFastHessian(1, 2, 200, 1, 9, 4, 4), null, null, imageType);
ScoreAssociation<BrightFeature> scorer = FactoryAssociation.scoreEuclidean(BrightFeature.class, true);
AssociateDescription<BrightFeature> associate = FactoryAssociation.greedy(scorer, 2, true);
// fit the images using a homography. This works well for rotations and distant objects.
ModelMatcher<Homography2D_F64, AssociatedPair> modelMatcher = FactoryMultiViewRobust.homographyRansac(null, new ConfigRansac(60, 3));
Homography2D_F64 H = computeTransform(inputA, inputB, detDesc, associate, modelMatcher);
renderStitching(imageA, imageB, H);
}
use of boofcv.abst.feature.detect.interest.ConfigFastHessian in project narchy by automenta.
the class ExampleStereoTwoViewsOneCamera method computeMatches.
/**
* Use the associate point feature example to create a list of {@link AssociatedPair} for use in computing the
* fundamental matrix.
*/
public void computeMatches(GrayF32 left, GrayF32 right) {
DetectDescribePoint detDesc = FactoryDetectDescribe.surfStable(new ConfigFastHessian(1, 2, 0, 1, 9, 4, 4), null, null, GrayF32.class);
// DetectDescribePoint detDesc = FactoryDetectDescribe.sift(null,new ConfigSiftDetector(2,0,200,5),null,null);
ScoreAssociation<BrightFeature> scorer = FactoryAssociation.scoreEuclidean(BrightFeature.class, true);
AssociateDescription<BrightFeature> associate = FactoryAssociation.greedy(scorer, 0.9, true);
ExampleAssociatePoints<GrayF32, BrightFeature> findMatches = new ExampleAssociatePoints<>(detDesc, associate, GrayF32.class);
findMatches.associate(left, right);
FastQueue<AssociatedIndex> matchIndexes = associate.getMatches();
matchedFeatures.clear();
for (int i = 0; i < matchIndexes.size; i++) {
AssociatedIndex a = matchIndexes.get(i);
matchedFeatures.add(new AssociatedPair(findMatches.pointsA.get(a.src), findMatches.pointsB.get(a.dst)));
}
}
use of boofcv.abst.feature.detect.interest.ConfigFastHessian in project BoofCV by lessthanoptimal.
the class VideoDetectInterestPoints method perform.
public static <T extends ImageGray<T>, D extends ImageGray<D>> void perform(String fileName, Class<T> imageType, Class<D> derivType) {
SimpleImageSequence<T> sequence = BoofVideoManager.loadManagerDefault().load(fileName, ImageType.single(imageType));
int maxCorners = 200;
int radius = 2;
// if null then no orientation will be computed
OrientationImageAverage<T> orientation = null;
orientation = FactoryOrientationAlgs.nogradient(1.0 / 2.0, radius, imageType);
InterestPointDetector<T> detector;
detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(1, 2, 100, 2, 9, 4, 4));
// FeatureScaleSpace<T,D> feature = FactoryInterestPointAlgs.hessianScaleSpace(radius,1,maxCorners,defaultType,derivType);
// detector = FactoryInterestPoint.wrapDetector(feature,new double[]{1,2,4,6,8,12},defaultType);
VideoDetectInterestPoints<T> display = new VideoDetectInterestPoints<>(sequence, detector, orientation);
display.process();
}
Aggregations