use of georegression.struct.se.Se3_F64 in project BoofCV by lessthanoptimal.
the class TestRectifyCalibrated method createPose.
private Se3_F64 createPose(double rotX, double rotY, double rotZ, double x, double y, double z) {
Se3_F64 ret = new Se3_F64();
ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, rotX, rotY, rotZ, ret.getR());
ret.getT().set(x, y, z);
return ret;
}
use of georegression.struct.se.Se3_F64 in project BoofCV by lessthanoptimal.
the class TestRectifyFundamental method createScene.
public void createScene() {
DMatrixRMaj K = new DMatrixRMaj(3, 3, true, 500, 0, 250, 0, 520, 270, 0, 0, 1);
// define the camera's motion
motion = new Se3_F64();
motion.getR().set(ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, -0.01, 0.1, 0.05, null));
motion.getT().set(-0.5, 0.1, -0.05);
DMatrixRMaj E = MultiViewOps.createEssential(motion.getR(), motion.getT());
F = MultiViewOps.createFundamental(E, K);
// randomly generate points in space
List<Point3D_F64> worldPts = GeoTestingOps.randomPoints_F64(-1, 1, -1, 1, 2, 3, N, rand);
// transform points into second camera's reference frame
pairs = new ArrayList<>();
for (Point3D_F64 p1 : worldPts) {
Point3D_F64 p2 = SePointOps_F64.transform(motion, p1, null);
AssociatedPair pair = new AssociatedPair();
pair.p1.set(p1.x / p1.z, p1.y / p1.z);
pair.p2.set(p2.x / p2.z, p2.y / p2.z);
pairs.add(pair);
GeometryMath_F64.mult(K, pair.p1, pair.p1);
GeometryMath_F64.mult(K, pair.p2, pair.p2);
}
}
use of georegression.struct.se.Se3_F64 in project BoofCV by lessthanoptimal.
the class EpipolarTestSimulation method init.
public void init(int N, boolean isFundamental) {
// define the camera's motion
worldToCamera = new Se3_F64();
worldToCamera.getR().set(ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, 0.05, -0.03, 0.02, null));
worldToCamera.getT().set(0.1, -0.1, 0.01);
// randomly generate points in space
worldPts = GeoTestingOps.randomPoints_F64(-1, 1, -1, 1, 2, 3, N, rand);
// transform points into second camera's reference frame
pairs = new ArrayList<>();
currentObs = new ArrayList<>();
for (Point3D_F64 p1 : worldPts) {
Point3D_F64 p2 = SePointOps_F64.transform(worldToCamera, p1, null);
AssociatedPair pair = new AssociatedPair();
pair.p1.set(p1.x / p1.z, p1.y / p1.z);
pair.p2.set(p2.x / p2.z, p2.y / p2.z);
pairs.add(pair);
if (isFundamental) {
GeometryMath_F64.mult(K, pair.p1, pair.p1);
GeometryMath_F64.mult(K, pair.p2, pair.p2);
}
currentObs.add(pair.p2);
}
}
use of georegression.struct.se.Se3_F64 in project BoofCV by lessthanoptimal.
the class TestQuadPoseEstimator method estimateP3P.
@Test
public void estimateP3P() {
LensDistortionNarrowFOV distortion = createDistortion();
Se3_F64 fiducialToCamera = new Se3_F64();
fiducialToCamera.getT().set(0.2, -0.15, 2);
ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, 0.05, 0.015, 0.001, fiducialToCamera.R);
QuadPoseEstimator alg = new QuadPoseEstimator(1e-8, 200);
alg.setLensDistoriton(distortion);
double r = 1.5;
alg.setFiducial(r, -r, r, r, -r, r, -r, -r);
WorldToCameraToPixel worldToPixel = PerspectiveOps.createWorldToPixel(distortion, fiducialToCamera);
alg.listObs.add(worldToPixel.transform(alg.points.get(0).location));
alg.listObs.add(worldToPixel.transform(alg.points.get(1).location));
alg.listObs.add(worldToPixel.transform(alg.points.get(2).location));
alg.listObs.add(worldToPixel.transform(alg.points.get(3).location));
Point2Transform2_F64 pixelToNorm = distortion.undistort_F64(true, false);
for (int i = 0; i < 4; i++) {
Point2D_F64 pixel = alg.listObs.get(i);
pixelToNorm.compute(pixel.x, pixel.y, alg.points.get(i).observation);
}
for (int i = 0; i < 4; i++) {
alg.bestError = Double.MAX_VALUE;
alg.estimateP3P(i);
assertEquals(0, alg.bestError, 1e-6);
assertTrue(alg.bestPose.T.distance(fiducialToCamera.T) < 1e-6);
assertTrue(MatrixFeatures_DDRM.isIdentical(alg.bestPose.R, fiducialToCamera.R, 1e-6));
}
}
use of georegression.struct.se.Se3_F64 in project BoofCV by lessthanoptimal.
the class TestQuadPoseEstimator method basicTest.
@Test
public void basicTest() {
LensDistortionNarrowFOV distortion = createDistortion();
Se3_F64 expectedW2C = new Se3_F64();
expectedW2C.T.set(0.1, -0.05, 4);
ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, 0.03, 0, 0, expectedW2C.R);
Quadrilateral_F64 quadPlane = new Quadrilateral_F64(-0.5, 0.5, 0.5, 0.5, 0.5, -0.5, -0.5, -0.5);
Quadrilateral_F64 quadViewed = new Quadrilateral_F64();
project(expectedW2C, quadPlane.a, quadViewed.a);
project(expectedW2C, quadPlane.b, quadViewed.b);
project(expectedW2C, quadPlane.c, quadViewed.c);
project(expectedW2C, quadPlane.d, quadViewed.d);
QuadPoseEstimator alg = new QuadPoseEstimator(1e-8, 200);
alg.setFiducial(-0.5, 0.5, 0.5, 0.5, 0.5, -0.5, -0.5, -0.5);
alg.setLensDistoriton(distortion);
assertTrue(alg.process(quadViewed, false));
Se3_F64 found = alg.getWorldToCamera();
assertTrue(found.T.distance(expectedW2C.T) < 1e-6);
assertTrue(MatrixFeatures_DDRM.isIdentical(found.R, expectedW2C.R, 1e-6));
// project the found markers back onto the marker and see if it returns the expected result
Point2D_F64 foundMarker = new Point2D_F64();
Point2D_F64 pixel = new Point2D_F64();
alg.normToPixel.compute(quadViewed.a.x, quadViewed.a.y, pixel);
alg.pixelToMarker(pixel.x, pixel.y, foundMarker);
assertTrue(foundMarker.distance(-0.5, 0.5) < 1e-6);
alg.normToPixel.compute(quadViewed.b.x, quadViewed.b.y, pixel);
alg.pixelToMarker(pixel.x, pixel.y, foundMarker);
assertTrue(foundMarker.distance(0.5, 0.5) < 1e-6);
alg.normToPixel.compute(quadViewed.c.x, quadViewed.c.y, pixel);
alg.pixelToMarker(pixel.x, pixel.y, foundMarker);
assertTrue(foundMarker.distance(0.5, -0.5) < 1e-6);
alg.normToPixel.compute(quadViewed.d.x, quadViewed.d.y, pixel);
alg.pixelToMarker(pixel.x, pixel.y, foundMarker);
assertTrue(foundMarker.distance(-0.5, -0.5) < 1e-6);
}
Aggregations