use of boofcv.struct.distort.Point2Transform3_F64 in project BoofCV by lessthanoptimal.
the class ExampleFisheyeToEquirectangular method createMask.
/**
* Creates a mask telling the algorithm which pixels are valid and which are not. The field-of-view (FOV) of the
* camera is known so we will use that information to do a better job of filtering out invalid pixels than
* it can do alone.
*/
public static GrayU8 createMask(CameraUniversalOmni model, LensDistortionWideFOV distortion, double fov) {
GrayU8 mask = new GrayU8(model.width, model.height);
Point2Transform3_F64 p2s = distortion.undistortPtoS_F64();
Point3D_F64 ref = new Point3D_F64(0, 0, 1);
Point3D_F64 X = new Point3D_F64();
p2s.compute(model.cx, model.cy, X);
for (int y = 0; y < model.height; y++) {
for (int x = 0; x < model.width; x++) {
p2s.compute(x, y, X);
if (Double.isNaN(X.x) || Double.isNaN(X.y) || Double.isNaN(X.z)) {
continue;
}
double angle = UtilVector3D_F64.acute(ref, X);
if (Double.isNaN(angle)) {
continue;
}
if (angle <= fov / 2.0)
mask.unsafe_set(x, y, 1);
}
}
return mask;
}
use of boofcv.struct.distort.Point2Transform3_F64 in project BoofCV by lessthanoptimal.
the class TestNarrowToWidePtoP_F64 method checkFOVBounds.
/**
* Request points at the border and see if it has the expected vertical and horizontal FOV
*/
@Test
public void checkFOVBounds() {
NarrowToWidePtoP_F64 alg = createAlg();
Point2D_F64 foundA = new Point2D_F64();
Point2D_F64 foundB = new Point2D_F64();
Point3D_F64 vA = new Point3D_F64();
Point3D_F64 vB = new Point3D_F64();
// Compute the horizontal FOV
alg.compute(0, 250, foundA);
alg.compute(500, 250, foundB);
Point2Transform3_F64 wideToSphere = createModelWide().undistortPtoS_F64();
wideToSphere.compute(foundA.x, foundA.y, vA);
wideToSphere.compute(foundB.x, foundB.y, vB);
double found = UtilVector3D_F64.acute(new Vector3D_F64(vA), new Vector3D_F64(vB));
double expected = 2.0 * Math.atan(250.0 / 400.0);
assertEquals(expected, found, 0.01);
// Compute the vertical FOV
alg.compute(250, 0, foundA);
alg.compute(250, 500, foundB);
wideToSphere.compute(foundA.x, foundA.y, vA);
wideToSphere.compute(foundB.x, foundB.y, vB);
found = UtilVector3D_F64.acute(new Vector3D_F64(vA), new Vector3D_F64(vB));
expected = 2.0 * Math.atan(250.0 / 400.0);
assertEquals(expected, found, 0.001);
}
use of boofcv.struct.distort.Point2Transform3_F64 in project BoofCV by lessthanoptimal.
the class GeneralLensDistortionWideFOVChecks method pixel_unit_pixel_F64.
@Test
public void pixel_unit_pixel_F64() {
LensDistortionWideFOV alg = create();
Point2Transform3_F64 undistort = alg.undistortPtoS_F64();
Point3Transform2_F64 distort = alg.distortStoP_F64();
Point3D_F64 middle = new Point3D_F64();
Point2D_F64 found = new Point2D_F64();
undistort.compute(240, 260, middle);
distort.compute(middle.x, middle.y, middle.z, found);
assertEquals(240, found.x, pixel_tol_F64);
assertEquals(260, found.y, pixel_tol_F64);
}
Aggregations