use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class TestCompleteSift method basic.
/**
* Doesn't do much more than see if it blows up and the expected size of objects is returned
*/
@Test
public void basic() {
GrayF32 image = new GrayF32(300, 290);
GImageMiscOps.fillUniform(image, rand, 0, 200);
CompleteSift alg = createAlg();
alg.process(image);
assertEquals(128, alg.getDescriptorLength());
GrowQueue_F64 orientations = alg.getOrientations();
FastQueue<ScalePoint> locations = alg.getLocations();
FastQueue<BrightFeature> descriptions = alg.getDescriptions();
assertTrue(orientations.size > 10);
assertEquals(orientations.size, locations.size);
assertEquals(orientations.size, descriptions.size);
}
use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class ExampleFeatureSurf method harder.
/**
* Configured exactly the same as the easy example above, but require a lot more code and a more in depth
* understanding of how SURF works and is configured. Instead of TupleDesc_F64, SurfFeature are computed in
* this case. They are almost the same as TupleDesc_F64, but contain the Laplacian's sign which can be used
* to speed up association. That is an example of how using less generalized interfaces can improve performance.
*
* @param image Input image type. DOES NOT NEED TO BE GrayF32, GrayU8 works too
*/
public static <II extends ImageGray<II>> void harder(GrayF32 image) {
// SURF works off of integral images
Class<II> integralType = GIntegralImageOps.getIntegralType(GrayF32.class);
// define the feature detection algorithm
NonMaxSuppression extractor = FactoryFeatureExtractor.nonmax(new ConfigExtract(2, 0, 5, true));
FastHessianFeatureDetector<II> detector = new FastHessianFeatureDetector<>(extractor, 200, 2, 9, 4, 4, 6);
// estimate orientation
OrientationIntegral<II> orientation = FactoryOrientationAlgs.sliding_ii(null, integralType);
DescribePointSurf<II> descriptor = FactoryDescribePointAlgs.<II>surfStability(null, integralType);
// compute the integral image of 'image'
II integral = GeneralizedImageOps.createSingleBand(integralType, image.width, image.height);
GIntegralImageOps.transform(image, integral);
// detect fast hessian features
detector.detect(integral);
// tell algorithms which image to process
orientation.setImage(integral);
descriptor.setImage(integral);
List<ScalePoint> points = detector.getFoundPoints();
List<BrightFeature> descriptions = new ArrayList<>();
for (ScalePoint p : points) {
// estimate orientation
orientation.setObjectRadius(p.scale * BoofDefaults.SURF_SCALE_TO_RADIUS);
double angle = orientation.compute(p.x, p.y);
// extract the SURF description for this region
BrightFeature desc = descriptor.createDescription();
descriptor.describe(p.x, p.y, angle, p.scale, desc);
// save everything for processing later on
descriptions.add(desc);
}
System.out.println("Found Features: " + points.size());
System.out.println("First descriptor's first value: " + descriptions.get(0).value[0]);
}
use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class ScaleSpacePointPanel method setPoints.
public synchronized void setPoints(List<ScalePoint> points) {
unused.addAll(this.points);
this.points.clear();
this.activeLevel = 0;
for (ScalePoint p : points) {
if (unused.isEmpty()) {
this.points.add(p.copy());
} else {
ScalePoint c = unused.remove(unused.size() - 1);
c.set(p);
}
this.points.add(p);
}
}
use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class ScaleSpacePyramidPointPanel method setPoints.
public synchronized void setPoints(List<ScalePoint> points) {
unused.addAll(this.points);
this.points.clear();
for (ScalePoint p : points) {
if (unused.isEmpty()) {
this.points.add(p.copy());
} else {
ScalePoint c = unused.remove(unused.size() - 1);
c.set(p);
}
this.points.add(p);
}
setLevel(0);
}
use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class TestSiftDetector method process.
/**
* Tests the ability to detect a single square feature at multiple scales and color
*/
@Test
public void process() {
int c_x = 40, c_y = 42;
SiftDetector alg = createDetector();
for (int radius : new int[] { 2, 5 }) {
int width = radius * 2 + 1;
for (boolean white : new boolean[] { true, false }) {
GrayF32 input = new GrayF32(80, 70);
if (white) {
GImageMiscOps.fillRectangle(input, 200, c_x - radius, c_y - radius, width, width);
} else {
GImageMiscOps.fill(input, 200);
GImageMiscOps.fillRectangle(input, 0, c_x - radius, c_y - radius, width, width);
}
alg.process(input);
FastQueue<ScalePoint> detections = alg.getDetections();
assertTrue(detections.size > 0);
boolean found = false;
for (int i = 0; i < detections.size(); i++) {
ScalePoint p = detections.get(i);
if (p.distance(c_x, c_y) <= 0.2) {
assertEquals(radius * 1.25, p.scale, 0.5);
assertTrue(white == p.white);
found = true;
}
}
assertTrue(found);
}
}
}
Aggregations