use of georegression.struct.point.Point4D_F64 in project BoofCV by lessthanoptimal.
the class CommonTriangulationChecks method convertH.
protected Point4D_F64 convertH(Point3D_F64 X) {
double scale = rand.nextGaussian();
if (Math.abs(scale) < 1e-5)
scale = 0.001;
Point4D_F64 P = new Point4D_F64();
P.x = X.x * scale;
P.y = X.y * scale;
P.z = X.z * scale;
P.w = scale;
return P;
}
use of georegression.struct.point.Point4D_F64 in project BoofCV by lessthanoptimal.
the class TestTriangulateProjectiveLinearDLT method triangulate_projective_noise.
/**
* Add a tinny bit of noise and see if it blows up
*/
@Test
void triangulate_projective_noise() {
createScene();
TriangulateProjectiveLinearDLT alg = new TriangulateProjectiveLinearDLT();
Point4D_F64 foundX = new Point4D_F64();
obsPixels.get(0).x += 0.01;
obsPixels.get(0).y -= 0.01;
alg.triangulate(obsPixels, cameraMatrices, foundX);
// project the found coordinate back on to each image
Point2D_F64 foundPixel = new Point2D_F64();
for (int i = 0; i < cameraMatrices.size(); i++) {
DMatrixRMaj P = cameraMatrices.get(i);
Point2D_F64 expected = obsPixels.get(i);
GeometryMath_F64.mult(P, foundX, foundPixel);
assertEquals(0, expected.distance(foundPixel), 0.03);
}
}
use of georegression.struct.point.Point4D_F64 in project BoofCV by lessthanoptimal.
the class ExampleMultiViewSparseReconstruction method visualizeSparseCloud.
/**
* To visualize the results we will render a sparse point cloud along with the location of each camera in the
* scene.
*/
public void visualizeSparseCloud() {
checkTrue(scene.isHomogenous());
List<Point3D_F64> cloudXyz = new ArrayList<>();
Point4D_F64 world = new Point4D_F64();
// NOTE: By default the colors found below are not used. Look before to see why and how to turn them on.
//
// Colorize the cloud by reprojecting the images. The math is straight forward but there's a lot of book
// keeping that needs to be done due to the scene data structure. A class is provided to make this process easy
var imageLookup = new LookUpImageFilesByIndex(imageFiles);
var colorize = new ColorizeMultiViewStereoResults<>(new LookUpColorRgbFormats.PL_U8(), imageLookup);
DogArray_I32 rgb = new DogArray_I32();
rgb.resize(scene.points.size);
colorize.processScenePoints(scene, // String encodes the image's index
(viewIdx) -> viewIdx + "", // Assign the RGB color
(pointIdx, r, g, b) -> rgb.set(pointIdx, (r << 16) | (g << 8) | b));
// Convert the structure into regular 3D points from homogenous
for (int i = 0; i < scene.points.size; i++) {
scene.points.get(i).get(world);
// array would be out of sync. Let's just throw it far far away then.
if (world.w == 0.0)
cloudXyz.add(new Point3D_F64(0, 0, Double.MAX_VALUE));
else
cloudXyz.add(new Point3D_F64(world.x / world.w, world.y / world.w, world.z / world.w));
}
PointCloudViewer viewer = VisualizeData.createPointCloudViewer();
viewer.setFog(true);
// We just did a bunch of work to look up the true color of points, however for sparse data it's easy to see
// the structure with psuedo color. Comment out the line below to see the true color.
viewer.setColorizer(new TwoAxisRgbPlane.Z_XY(1.0).fperiod(40));
viewer.setDotSize(1);
viewer.setTranslationStep(0.15);
viewer.addCloud((idx, p) -> p.setTo(cloudXyz.get(idx)), rgb::get, rgb.size);
viewer.setCameraHFov(UtilAngle.radian(60));
SwingUtilities.invokeLater(() -> {
// Show where the cameras are
BoofSwingUtil.visualizeCameras(scene, viewer);
// Size the window and show it to the user
viewer.getComponent().setPreferredSize(new Dimension(600, 600));
ShowImages.showWindow(viewer.getComponent(), "Refined Scene", true);
var copy = new DogArray<>(Point3dRgbI_F64::new);
viewer.copyCloud(copy);
try (var out = new FileOutputStream("saved_cloud.ply")) {
PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(copy.toList()), true, out);
} catch (IOException e) {
e.printStackTrace();
}
});
}
use of georegression.struct.point.Point4D_F64 in project BoofCV by lessthanoptimal.
the class TestCompatibleProjectiveHomography method refineWorld.
@Test
void refineWorld() {
createScene(20, false);
// Find the homography
DMatrixRMaj H = new DMatrixRMaj(4, 4);
CompatibleProjectiveHomography alg = new CompatibleProjectiveHomography();
List<Point3D_F64> world3a = new ArrayList<>();
List<Point3D_F64> world3b = new ArrayList<>();
for (int i = 0; i < worldPts.size(); i++) {
Point4D_F64 a = worldPts.get(i);
Point4D_F64 b = sceneB.get(i);
world3a.add(new Point3D_F64(a.x / a.w, a.y / a.w, a.z / a.w));
world3b.add(new Point3D_F64(b.x / b.w, b.y / b.w, b.z / b.w));
}
// Get the initial value of H
assertTrue(alg.fitPoints(worldPts, sceneB, H));
// break the model
H.data[3] += 0.1;
H.data[7] -= 0.8;
// Refine it
// alg.lm.setVerbose(System.out,0);
alg.refineWorld(world3a, world3b, H);
checkCamerasH(H, 1e-5);
}
use of georegression.struct.point.Point4D_F64 in project BoofCV by lessthanoptimal.
the class TestCompatibleProjectiveHomography method createScene.
@Override
public void createScene(int numFeatures, boolean planar) {
super.createScene(numFeatures, planar);
// Triangulate 3D points in another projective
TrifocalTensor T = MultiViewOps.createTrifocal(cameras.get(0), cameras.get(1), cameras.get(2), null);
DMatrixRMaj P1 = new DMatrixRMaj(3, 4);
DMatrixRMaj P2 = new DMatrixRMaj(3, 4);
DMatrixRMaj P3 = new DMatrixRMaj(3, 4);
MultiViewOps.trifocalToCameraMatrices(T, P2, P3);
CommonOps_DDRM.setIdentity(P1);
TriangulateNViewsProjective triangulator = FactoryMultiView.triangulateNViewProj(ConfigTriangulation.GEOMETRIC());
List<Point2D_F64> observations = new ArrayList<>();
for (int i = 0; i < 3; i++) {
observations.add(new Point2D_F64());
}
camerasB = new ArrayList<>();
camerasB.add(P1);
camerasB.add(P2);
camerasB.add(P3);
sceneB = new ArrayList<>();
for (int i = 0; i < numFeatures; i++) {
AssociatedTriple t = triples.get(i);
observations.get(0).setTo(t.p1);
observations.get(1).setTo(t.p2);
observations.get(2).setTo(t.p3);
Point4D_F64 location = new Point4D_F64();
assertTrue(triangulator.triangulate(observations, camerasB, location));
sceneB.add(location);
}
}
Aggregations