use of boofcv.abst.feature.detect.extract.ConfigExtract in project BoofCV by lessthanoptimal.
the class ExampleNonMaximumSupression method renderNonMax.
public static BufferedImage renderNonMax(GrayF32 intensity, int radius, float threshold) {
// Create and configure the feature detector
NonMaxSuppression nonmax = FactoryFeatureExtractor.nonmax(new ConfigExtract(radius, threshold));
// We will only searching for the maximums. Other variants will look for minimums or will exclude previous
// candidate detections from being detected twice
QueueCorner maximums = new QueueCorner();
nonmax.process(intensity, null, null, null, maximums);
// Visualize the intensity image
BufferedImage output = new BufferedImage(intensity.width, intensity.height, BufferedImage.TYPE_INT_RGB);
VisualizeImageData.colorizeSign(intensity, output, -1);
// render each maximum with a circle
Graphics2D g2 = output.createGraphics();
g2.setColor(Color.blue);
for (int i = 0; i < maximums.size(); i++) {
Point2D_I16 c = maximums.get(i);
VisualizeFeatures.drawCircle(g2, c.x, c.y, radius);
}
return output;
}
use of boofcv.abst.feature.detect.extract.ConfigExtract in project BoofCV by lessthanoptimal.
the class ExampleFeatureSurf method harder.
/**
* Configured exactly the same as the easy example above, but require a lot more code and a more in depth
* understanding of how SURF works and is configured. Instead of TupleDesc_F64, SurfFeature are computed in
* this case. They are almost the same as TupleDesc_F64, but contain the Laplacian's sign which can be used
* to speed up association. That is an example of how using less generalized interfaces can improve performance.
*
* @param image Input image type. DOES NOT NEED TO BE GrayF32, GrayU8 works too
*/
public static <II extends ImageGray<II>> void harder(GrayF32 image) {
// SURF works off of integral images
Class<II> integralType = GIntegralImageOps.getIntegralType(GrayF32.class);
// define the feature detection algorithm
NonMaxSuppression extractor = FactoryFeatureExtractor.nonmax(new ConfigExtract(2, 0, 5, true));
FastHessianFeatureDetector<II> detector = new FastHessianFeatureDetector<>(extractor, 200, 2, 9, 4, 4, 6);
// estimate orientation
OrientationIntegral<II> orientation = FactoryOrientationAlgs.sliding_ii(null, integralType);
DescribePointSurf<II> descriptor = FactoryDescribePointAlgs.<II>surfStability(null, integralType);
// compute the integral image of 'image'
II integral = GeneralizedImageOps.createSingleBand(integralType, image.width, image.height);
GIntegralImageOps.transform(image, integral);
// detect fast hessian features
detector.detect(integral);
// tell algorithms which image to process
orientation.setImage(integral);
descriptor.setImage(integral);
List<ScalePoint> points = detector.getFoundPoints();
List<BrightFeature> descriptions = new ArrayList<>();
for (ScalePoint p : points) {
// estimate orientation
orientation.setObjectRadius(p.scale * BoofDefaults.SURF_SCALE_TO_RADIUS);
double angle = orientation.compute(p.x, p.y);
// extract the SURF description for this region
BrightFeature desc = descriptor.createDescription();
descriptor.describe(p.x, p.y, angle, p.scale, desc);
// save everything for processing later on
descriptions.add(desc);
}
System.out.println("Found Features: " + points.size());
System.out.println("First descriptor's first value: " + descriptions.get(0).value[0]);
}
use of boofcv.abst.feature.detect.extract.ConfigExtract in project BoofCV by lessthanoptimal.
the class TestSiftDetector method createDetector.
private SiftDetector createDetector() {
SiftScaleSpace ss = new SiftScaleSpace(-1, 5, 3, 1.6);
NonMaxSuppression nonmax = FactoryFeatureExtractor.nonmax(new ConfigExtract(1, 0, 1, true, true, true));
NonMaxLimiter limiter = new NonMaxLimiter(nonmax, 1000);
return new SiftDetector(ss, 10, limiter);
}
use of boofcv.abst.feature.detect.extract.ConfigExtract in project BoofCV by lessthanoptimal.
the class TestHoughTransformLinePolar method obviousLines.
/**
* See if it can detect an obvious line in the image
*/
@Test
public void obviousLines() {
GrayU8 image = new GrayU8(width, height);
for (int i = 0; i < height; i++) {
image.set(5, i, 1);
}
NonMaxSuppression extractor = FactoryFeatureExtractor.nonmax(new ConfigExtract(4, 5, 0, true));
HoughTransformLinePolar alg = new HoughTransformLinePolar(extractor, 40, 180);
alg.transform(image);
FastQueue<LineParametric2D_F32> lines = alg.extractLines();
assertTrue(lines.size() > 0);
for (int i = 0; i < lines.size(); i++) {
LineParametric2D_F32 l = lines.get(i);
assertEquals(l.p.x, 5, 0.1);
assertEquals(Math.abs(l.slope.x), 0, 1e-4);
assertEquals(Math.abs(l.slope.y), 1, 0.1);
}
}
use of boofcv.abst.feature.detect.extract.ConfigExtract in project BoofCV by lessthanoptimal.
the class GeneralTemplateMatchTests method checkExpected.
private void checkExpected(Point2D_I32... points) {
// I'm being lazy, update this in the future
assertFalse(alg.isBorderProcessed());
// only process the regions which are not considered the border
int x0 = alg.getBorderX0();
int y0 = alg.getBorderY0();
// solutions should be local maximums
NonMaxSuppression extractor = FactoryFeatureExtractor.nonmax(new ConfigExtract(2, -Float.MAX_VALUE, 0, true));
QueueCorner found = new QueueCorner(10);
extractor.process(alg.getIntensity(), null, null, null, found);
assertTrue(found.size >= points.length);
// search for all the expected matches
for (Point2D_I32 expected : points) {
int numMatches = 0;
for (Point2D_I16 f : found.toList()) {
double d = UtilPoint2D_F64.distance(f.x - x0, f.y - y0, expected.x, expected.y);
if (d <= 1)
numMatches++;
}
assertEquals(1, numMatches);
}
}
Aggregations