use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class DetectDescribeSurfPlanar method detect.
/**
* Detects and describes features inside provide images. All images are integral images.
*
* @param grayII Gray-scale integral image
* @param colorII Color integral image
*/
public void detect(II grayII, Planar<II> colorII) {
orientation.setImage(grayII);
describe.setImage(grayII, colorII);
descriptions.reset();
featureAngles.reset();
// detect features
detector.detect(grayII);
// describe the found interest points
foundPoints = detector.getFoundPoints();
for (int i = 0; i < foundPoints.size(); i++) {
ScalePoint p = foundPoints.get(i);
orientation.setObjectRadius(p.scale);
double angle = orientation.compute(p.x, p.y);
describe.describe(p.x, p.y, angle, p.scale, descriptions.grow());
featureAngles.push(angle);
}
}
use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class TestSiftDetector method processFeatureCandidate_Shift.
/**
* The feature intensity is no longer symmetric. See if the interpolated peak moves in the expected direction
* away from the pixel level peak.
*/
@Test
public void processFeatureCandidate_Shift() {
GrayF32 upper = new GrayF32(30, 40);
GrayF32 current = new GrayF32(30, 40);
GrayF32 lower = new GrayF32(30, 40);
SiftDetector alg = createDetector();
alg.pixelScaleToInput = 2.0;
alg.sigmaLower = 4;
alg.sigmaTarget = 5;
alg.sigmaUpper = 6;
alg.dogLower = lower;
alg.dogTarget = current;
alg.dogUpper = upper;
alg.derivXX.setImage(current);
alg.derivXY.setImage(current);
alg.derivYY.setImage(current);
int x = 15, y = 16;
for (float sign : new float[] { -1, 1 }) {
alg.detections.reset();
current.set(x, y - 1, sign * 90);
current.set(x, y, sign * 100);
current.set(x, y + 1, sign * 80);
current.set(x - 1, y, sign * 90);
current.set(x + 1, y, sign * 80);
upper.set(x, y, sign * 80);
lower.set(x, y, sign * 90);
alg.processFeatureCandidate(15, 16, sign * 100, sign > 0);
ScalePoint p = alg.getDetections().get(0);
// make sure it is close
assertTrue(Math.abs(x * 2 - p.x) < 2);
assertTrue(Math.abs(y * 2 - p.y) < 2);
assertTrue(Math.abs(5 - p.scale) < 2);
// see if its shifted in the correct direction
assertTrue(x * 2 > p.x);
assertTrue(y * 2 > p.y);
assertTrue(5 > p.scale);
// do a test just for scale since the code branches depending on the sign
upper.set(x, y, sign * 90);
lower.set(x, y, sign * 80);
alg.detections.reset();
alg.processFeatureCandidate(15, 16, sign * 100, sign > 0);
assertTrue(Math.abs(5 - p.scale) < 2);
assertTrue(5 < p.scale);
}
}
use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class WrapDetectDescribeSurf method detect.
@Override
public void detect(T input) {
if (ii != null) {
ii.reshape(input.width, input.height);
}
// compute integral image
ii = GIntegralImageOps.transform(input, ii);
orientation.setImage(ii);
describe.setImage(ii);
features.reset();
featureAngles.reset();
// detect features
detector.detect(ii);
// describe the found interest points
foundPoints = detector.getFoundPoints();
for (int i = 0; i < foundPoints.size(); i++) {
ScalePoint p = foundPoints.get(i);
double radius = p.scale * BoofDefaults.SURF_SCALE_TO_RADIUS;
orientation.setObjectRadius(radius);
double angle = orientation.compute(p.x, p.y);
describe.describe(p.x, p.y, angle, p.scale, features.grow());
featureAngles.push(angle);
}
}
use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class FeaturePyramid method findLocalScaleSpaceMax.
/**
* Searches the pyramid layers up and down to see if the found 2D features are also scale space maximums.
*/
protected void findLocalScaleSpaceMax(PyramidFloat<T> ss, int layerID) {
int index0 = spaceIndex;
int index1 = (spaceIndex + 1) % 3;
int index2 = (spaceIndex + 2) % 3;
List<Point2D_I16> candidates = maximums[index1];
ImageBorder_F32 inten0 = (ImageBorder_F32) FactoryImageBorderAlgs.value(intensities[index0], 0);
GrayF32 inten1 = intensities[index1];
ImageBorder_F32 inten2 = (ImageBorder_F32) FactoryImageBorderAlgs.value(intensities[index2], 0);
float scale0 = (float) ss.scale[layerID - 1];
float scale1 = (float) ss.scale[layerID];
float scale2 = (float) ss.scale[layerID + 1];
float sigma0 = (float) ss.getSigma(layerID - 1);
float sigma1 = (float) ss.getSigma(layerID);
float sigma2 = (float) ss.getSigma(layerID + 1);
// not sure if this is the correct way to handle the change in scale
float ss0 = (float) (Math.pow(sigma0, scalePower) / scale0);
float ss1 = (float) (Math.pow(sigma1, scalePower) / scale1);
float ss2 = (float) (Math.pow(sigma2, scalePower) / scale2);
for (Point2D_I16 c : candidates) {
float val = ss1 * inten1.get(c.x, c.y);
// find pixel location in each image's local coordinate
int x0 = (int) (c.x * scale1 / scale0);
int y0 = (int) (c.y * scale1 / scale0);
int x2 = (int) (c.x * scale1 / scale2);
int y2 = (int) (c.y * scale1 / scale2);
if (checkMax(inten0, val / ss0, x0, y0) && checkMax(inten2, val / ss2, x2, y2)) {
// put features into the scale of the upper image
foundPoints.add(new ScalePoint(c.x * scale1, c.y * scale1, sigma1));
}
}
}
use of boofcv.struct.feature.ScalePoint in project BoofCV by lessthanoptimal.
the class SiftDetector method processFeatureCandidate.
/**
* Examines a local spatial extremum and interpolates its coordinates using a quadratic function. Very first
* thing it does is check to see if the feature is really an edge/false positive. After that interpolates
* the coordinate independently using a quadratic function along each axis. Resulting coordinate will be
* in the image image's coordinate system.
*
* @param x x-coordinate of extremum
* @param y y-coordinate of extremum
* @param value value of the extremum
* @param maximum true if it was a maximum
*/
protected void processFeatureCandidate(int x, int y, float value, boolean maximum) {
// suppress response along edges
if (isEdge(x, y))
return;
// Estimate the scale and 2D point by fitting 2nd order polynomials
// This is different from the original paper
float signAdj = maximum ? 1 : -1;
value *= signAdj;
float x0 = dogTarget.unsafe_get(x - 1, y) * signAdj;
float x2 = dogTarget.unsafe_get(x + 1, y) * signAdj;
float y0 = dogTarget.unsafe_get(x, y - 1) * signAdj;
float y2 = dogTarget.unsafe_get(x, y + 1) * signAdj;
float s0 = dogLower.unsafe_get(x, y) * signAdj;
float s2 = dogUpper.unsafe_get(x, y) * signAdj;
ScalePoint p = detections.grow();
// Compute the interpolated coordinate of the point in the original image coordinates
p.x = pixelScaleToInput * (x + polyPeak(x0, value, x2));
p.y = pixelScaleToInput * (y + polyPeak(y0, value, y2));
// find the peak then do bilinear interpolate between the two appropriate sigmas
// scaled from -1 to 1
double sigmaInterp = polyPeak(s0, value, s2);
if (sigmaInterp < 0) {
p.scale = sigmaLower * (-sigmaInterp) + (1 + sigmaInterp) * sigmaTarget;
} else {
p.scale = sigmaUpper * sigmaInterp + (1 - sigmaInterp) * sigmaTarget;
}
// a maximum corresponds to a dark object and a minimum to a whiter object
p.white = !maximum;
handleDetection(p);
}
Aggregations