use of boofcv.struct.sfm.Stereo2D3D in project BoofCV by lessthanoptimal.
the class TestPnPStereoDistanceReprojectionSq method checkBehind.
public void checkBehind(double Tz, double Pz) {
// Point location in world frame
Point3D_F64 X = new Point3D_F64(0.1, -0.04, Pz);
DMatrixRMaj K_left = PerspectiveOps.calibrationMatrix(param.left, (DMatrixRMaj) null);
DMatrixRMaj K_right = PerspectiveOps.calibrationMatrix(param.right, (DMatrixRMaj) null);
// create a noisy observed
Point2D_F64 obsLeft = PerspectiveOps.renderPixel(worldToLeft, K_left, X);
Point2D_F64 obsRight = PerspectiveOps.renderPixel(worldToRight, K_right, X);
// convert to normalized image coordinates
PerspectiveOps.convertPixelToNorm(K_left, obsLeft, obsLeft);
PerspectiveOps.convertPixelToNorm(K_right, obsRight, obsRight);
PnPStereoDistanceReprojectionSq alg = new PnPStereoDistanceReprojectionSq();
StereoParameters param = new StereoParameters(this.param.left, this.param.right, new Se3_F64());
param.rightToLeft.getT().set(0.1, 0, Tz);
alg.setStereoParameters(param);
alg.setModel(new Se3_F64());
double found = alg.computeDistance(new Stereo2D3D(obsLeft, obsRight, X));
assertTrue(Double.MAX_VALUE == found);
}
use of boofcv.struct.sfm.Stereo2D3D in project BoofCV by lessthanoptimal.
the class TestPnPStereoJacobianRodrigues method compareToNumerical.
private void compareToNumerical(double noise) {
Se3_F64 worldToLeft = new Se3_F64();
ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, 0.1, 1, -0.2, worldToLeft.getR());
worldToLeft.getT().set(-0.3, 0.4, 1);
generateScene(numPoints, worldToLeft, false);
addNoise(noise);
PnPStereoJacobianRodrigues alg = new PnPStereoJacobianRodrigues();
alg.setLeftToRight(leftToRight);
alg.setObservations(pointPose);
StereoPose storage = new StereoPose(new Se3_F64(), leftToRight);
ResidualsCodecToMatrix<StereoPose, Stereo2D3D> func = new ResidualsCodecToMatrix<>(codec, new PnPStereoResidualReprojection(), storage);
func.setObservations(pointPose);
StereoPose pose = new StereoPose(worldToLeft, leftToRight);
double[] param = new double[codec.getParamLength()];
codec.encode(pose, param);
// DerivativeChecker.jacobianPrint(func,alg,param,1e-6);
assertTrue(DerivativeChecker.jacobian(func, alg, param, 1e-6));
}
use of boofcv.struct.sfm.Stereo2D3D in project BoofCV by lessthanoptimal.
the class TestPnPStereoDistanceReprojectionSq method checkErrorArray.
@Test
public void checkErrorArray() {
DMatrixRMaj K_left = PerspectiveOps.calibrationMatrix(param.left, (DMatrixRMaj) null);
DMatrixRMaj K_right = PerspectiveOps.calibrationMatrix(param.right, (DMatrixRMaj) null);
PnPStereoDistanceReprojectionSq alg = new PnPStereoDistanceReprojectionSq();
alg.setStereoParameters(param);
alg.setModel(worldToLeft);
int N = 10;
double[] expected = new double[N * 4];
List<Stereo2D3D> obs = new ArrayList<>();
for (int i = 0; i < N; i++) {
// Point location in world frame
Point3D_F64 X = new Point3D_F64(rand.nextGaussian(), rand.nextGaussian(), 2.3);
// create a noisy observed
Point2D_F64 obsLeft = PerspectiveOps.renderPixel(worldToLeft, K_left, X);
obsLeft.x += rand.nextGaussian() * 0.05;
obsLeft.y += rand.nextGaussian() * 0.05;
Point2D_F64 obsRight = PerspectiveOps.renderPixel(worldToRight, K_right, X);
obsRight.x += rand.nextGaussian() * 0.05;
obsRight.y += rand.nextGaussian() * 0.05;
// convert to normalized image coordinates
PerspectiveOps.convertPixelToNorm(K_left, obsLeft, obsLeft);
PerspectiveOps.convertPixelToNorm(K_right, obsRight, obsRight);
Stereo2D3D p = new Stereo2D3D(obsLeft, obsRight, X);
expected[i] = alg.computeDistance(p);
obs.add(p);
}
double[] found = new double[N];
alg.computeDistance(obs, found);
for (int i = 0; i < N; i++) assertEquals(expected[i], found[i], 1e-8);
}
use of boofcv.struct.sfm.Stereo2D3D in project BoofCV by lessthanoptimal.
the class TestPnPStereoDistanceReprojectionSq method checkErrorSingle.
@Test
public void checkErrorSingle() {
// Point location in world frame
Point3D_F64 X = new Point3D_F64(0.1, -0.04, 2.3);
DMatrixRMaj K_left = PerspectiveOps.calibrationMatrix(param.left, (DMatrixRMaj) null);
DMatrixRMaj K_right = PerspectiveOps.calibrationMatrix(param.right, (DMatrixRMaj) null);
// errors
double deltaX0 = 0.1;
double deltaY0 = -0.2;
double deltaX1 = -0.3;
double deltaY1 = 0.05;
// create a noisy observed
Point2D_F64 obsLeft = PerspectiveOps.renderPixel(worldToLeft, K_left, X);
obsLeft.x += deltaX0;
obsLeft.y += deltaY0;
Point2D_F64 obsRight = PerspectiveOps.renderPixel(worldToRight, K_right, X);
obsRight.x += deltaX1;
obsRight.y += deltaY1;
// convert to normalized image coordinates
PerspectiveOps.convertPixelToNorm(K_left, obsLeft, obsLeft);
PerspectiveOps.convertPixelToNorm(K_right, obsRight, obsRight);
PnPStereoDistanceReprojectionSq alg = new PnPStereoDistanceReprojectionSq();
alg.setStereoParameters(param);
alg.setModel(worldToLeft);
double found = alg.computeDistance(new Stereo2D3D(obsLeft, obsRight, X));
double expected = deltaX0 * deltaX0 + deltaY0 * deltaY0 + deltaX1 * deltaX1 + deltaY1 * deltaY1;
assertEquals(expected, found, 1e-8);
}
use of boofcv.struct.sfm.Stereo2D3D in project BoofCV by lessthanoptimal.
the class VisOdomDualTrackPnP method addNewTracks.
/**
* Spawns tracks in each image and associates features together.
*/
private void addNewTracks() {
trackerLeft.spawnTracks();
trackerRight.spawnTracks();
List<PointTrack> newLeft = trackerLeft.getNewTracks(null);
List<PointTrack> newRight = trackerRight.getNewTracks(null);
// get a list of new tracks and their descriptions
addNewToList(inputLeft, newLeft, pointsLeft, descLeft);
addNewToList(inputRight, newRight, pointsRight, descRight);
// associate using L2R
assocL2R.setSource(pointsLeft, descLeft);
assocL2R.setDestination(pointsRight, descRight);
assocL2R.associate();
FastQueue<AssociatedIndex> matches = assocL2R.getMatches();
// storage for the triangulated location in the camera frame
Point3D_F64 cameraP3 = new Point3D_F64();
for (int i = 0; i < matches.size; i++) {
AssociatedIndex m = matches.get(i);
PointTrack trackL = newLeft.get(m.src);
PointTrack trackR = newRight.get(m.dst);
// declare additional track information stored in each track. Tracks can be recycled so it
// might not always need to be declared
LeftTrackInfo infoLeft = trackL.getCookie();
if (infoLeft == null)
trackL.cookie = infoLeft = new LeftTrackInfo();
RightTrackInfo infoRight = trackR.getCookie();
if (infoRight == null)
trackR.cookie = infoRight = new RightTrackInfo();
Stereo2D3D p2d3d = infoLeft.location;
// convert pixel observations into normalized image coordinates
leftImageToNorm.compute(trackL.x, trackL.y, p2d3d.leftObs);
rightImageToNorm.compute(trackR.x, trackR.y, p2d3d.rightObs);
// triangulate 3D coordinate in the current camera frame
if (triangulate.triangulate(p2d3d.leftObs, p2d3d.rightObs, leftToRight, cameraP3)) {
// put the track into the current keyframe coordinate system
SePointOps_F64.transform(currToKey, cameraP3, p2d3d.location);
// save a reference to the matching track in the right camera frame
infoLeft.right = trackR;
infoLeft.lastConsistent = infoLeft.lastInlier = tick;
infoRight.left = trackL;
} else {
// triangulation failed, drop track
trackerLeft.dropTrack(trackL);
// TODO need way to mark right tracks which are unassociated after this loop
throw new RuntimeException("This special case needs to be handled!");
}
}
// drop tracks that were not associated
GrowQueue_I32 unassignedRight = assocL2R.getUnassociatedDestination();
for (int i = 0; i < unassignedRight.size; i++) {
int index = unassignedRight.get(i);
// System.out.println(" unassigned right "+newRight.get(index).x+" "+newRight.get(index).y);
trackerRight.dropTrack(newRight.get(index));
}
GrowQueue_I32 unassignedLeft = assocL2R.getUnassociatedSource();
for (int i = 0; i < unassignedLeft.size; i++) {
int index = unassignedLeft.get(i);
trackerLeft.dropTrack(newLeft.get(index));
}
// System.out.println("Total left "+trackerLeft.getAllTracks(null).size()+" right "+trackerRight.getAllTracks(null).size());
// System.out.println("Associated: "+matches.size+" new left "+newLeft.size()+" new right "+newRight.size());
// System.out.println("New Tracks: Total: Left "+trackerLeft.getAllTracks(null).size()+" right "+
// trackerRight.getAllTracks(null).size());
// List<PointTrack> temp = trackerLeft.getActiveTracks(null);
// for( PointTrack t : temp ) {
// if( t.cookie == null )
// System.out.println("BUG!");
// }
// temp = trackerRight.getActiveTracks(null);
// for( PointTrack t : temp ) {
// if( t.cookie == null )
// System.out.println("BUG!");
// }
}
Aggregations