use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class CommonFundamentalChecks method checkEpipolarMatrix.
public void checkEpipolarMatrix(int N, boolean isFundamental) {
init(100, isFundamental);
// run several trials with different inputs of size N
for (int trial = 0; trial < 20; trial++) {
List<AssociatedPair> pairs = randomPairs(N);
computeFundamental(pairs, solutions);
// At least one solution should have been found
assertTrue(solutions.size() > 0);
int totalMatchedAll = 0;
int totalPassedMatrix = 0;
for (DMatrixRMaj F : solutions.toList()) {
// normalize F to ensure a consistent scale
CommonOps_DDRM.scale(1.0 / CommonOps_DDRM.elementMaxAbs(F), F);
// sanity check, F is not zero
if (NormOps_DDRM.normF(F) <= 0.1)
continue;
// the determinant should be zero
if (Math.abs(CommonOps_DDRM.det(F)) > zeroTol)
continue;
totalPassedMatrix++;
// see if this hypothesis matched all the points
boolean matchedAll = true;
for (AssociatedPair p : this.pairs) {
double val = GeometryMath_F64.innerProd(p.p2, F, p.p1);
if (Math.abs(val) > zeroTol) {
matchedAll = false;
break;
}
}
if (matchedAll) {
totalMatchedAll++;
}
}
// At least one of them should be consistent with the original set of points
assertTrue(totalMatchedAll > 0);
assertTrue(totalPassedMatrix > 0);
}
}
use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class EpipolarTestSimulation method init.
public void init(int N, boolean isFundamental) {
// define the camera's motion
worldToCamera = new Se3_F64();
worldToCamera.getR().set(ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, 0.05, -0.03, 0.02, null));
worldToCamera.getT().set(0.1, -0.1, 0.01);
// randomly generate points in space
worldPts = GeoTestingOps.randomPoints_F64(-1, 1, -1, 1, 2, 3, N, rand);
// transform points into second camera's reference frame
pairs = new ArrayList<>();
currentObs = new ArrayList<>();
for (Point3D_F64 p1 : worldPts) {
Point3D_F64 p2 = SePointOps_F64.transform(worldToCamera, p1, null);
AssociatedPair pair = new AssociatedPair();
pair.p1.set(p1.x / p1.z, p1.y / p1.z);
pair.p2.set(p2.x / p2.z, p2.y / p2.z);
pairs.add(pair);
if (isFundamental) {
GeometryMath_F64.mult(K, pair.p1, pair.p1);
GeometryMath_F64.mult(K, pair.p2, pair.p2);
}
currentObs.add(pair.p2);
}
}
use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class TestDistanceEpipolarConstraint method basicCheck.
/**
* Give it a perfect observation and a noisy one. Perfect should have a smaller distance
*/
@Test
public void basicCheck() {
DistanceEpipolarConstraint alg = new DistanceEpipolarConstraint();
alg.setModel(F);
double perfect = alg.computeDistance(new AssociatedPair(p1, p2));
p1.x += 0.2;
p1.y += 0.2;
double noisy = alg.computeDistance(new AssociatedPair(p1, p2));
assertTrue(perfect < noisy * 0.1);
}
use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class TestFundamentalResidualSampson method checkChangeInCost.
/**
* First check to see if the error is very low for perfect parameters. Then
* give it incorrect parameters and make sure it is not zero.
*/
@Test
public void checkChangeInCost() {
init(30, false);
// compute true essential matrix
DMatrixRMaj E = MultiViewOps.createEssential(worldToCamera.getR(), worldToCamera.getT());
FundamentalResidualSampson alg = new FundamentalResidualSampson();
// see if it returns no error for the perfect model
alg.setModel(E);
for (AssociatedPair p : pairs) {
assertEquals(0, alg.computeResidual(p), 1e-8);
}
// now make it a bit off
E.data[1] += 0.1;
alg.setModel(E);
for (AssociatedPair p : pairs) {
assertTrue(Math.abs(alg.computeResidual(p)) > 1e-8);
}
}
use of boofcv.struct.geo.AssociatedPair in project BoofCV by lessthanoptimal.
the class SparseFlowObjectTracker method trackFeatures.
/**
* Tracks features from the previous image into the current image. Tracks are created inside the specified
* region in a grid pattern.
*/
private void trackFeatures(Image input, RectangleRotate_F64 region) {
pairs.reset();
currentImage.process(input);
for (int i = 0; i < currentImage.getNumLayers(); i++) {
Image layer = currentImage.getLayer(i);
gradient.process(layer, currentDerivX[i], currentDerivY[i]);
}
// convert to float to avoid excessive conversions from double to float
float cx = (float) region.cx;
float cy = (float) region.cy;
float height = (float) (region.height);
float width = (float) (region.width);
float c = (float) Math.cos(region.theta);
float s = (float) Math.sin(region.theta);
float p = 1.0f / (config.numberOfSamples - 1);
for (int i = 0; i < config.numberOfSamples; i++) {
float y = (p * i - 0.5f) * height;
for (int j = 0; j < config.numberOfSamples; j++) {
float x = (p * j - 0.5f) * width;
float xx = cx + x * c - y * s;
float yy = cy + x * s + y * c;
// track in the forward direction
track.x = xx;
track.y = yy;
klt.setImage(previousImage, previousDerivX, previousDerivY);
if (!klt.setDescription(track)) {
continue;
}
klt.setImage(currentImage, currentDerivX, currentDerivY);
KltTrackFault fault = klt.track(track);
if (fault != KltTrackFault.SUCCESS) {
continue;
}
float xc = track.x;
float yc = track.y;
// validate by tracking backwards
if (!klt.setDescription(track)) {
continue;
}
klt.setImage(previousImage, previousDerivX, previousDerivY);
fault = klt.track(track);
if (fault != KltTrackFault.SUCCESS) {
continue;
}
float error = UtilPoint2D_F32.distanceSq(track.x, track.y, xx, yy);
if (error > maximumErrorFB) {
continue;
}
// create a list of the observations
AssociatedPair a = pairs.grow();
a.p1.x = xx;
a.p1.y = yy;
a.p2.x = xc;
a.p2.y = yc;
}
}
}
Aggregations