use of georegression.struct.point.Point3D_F64 in project BoofCV by lessthanoptimal.
the class EpipolarTestSimulation method init.
public void init(int N, boolean isFundamental) {
// define the camera's motion
worldToCamera = new Se3_F64();
worldToCamera.getR().set(ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, 0.05, -0.03, 0.02, null));
worldToCamera.getT().set(0.1, -0.1, 0.01);
// randomly generate points in space
worldPts = GeoTestingOps.randomPoints_F64(-1, 1, -1, 1, 2, 3, N, rand);
// transform points into second camera's reference frame
pairs = new ArrayList<>();
currentObs = new ArrayList<>();
for (Point3D_F64 p1 : worldPts) {
Point3D_F64 p2 = SePointOps_F64.transform(worldToCamera, p1, null);
AssociatedPair pair = new AssociatedPair();
pair.p1.set(p1.x / p1.z, p1.y / p1.z);
pair.p2.set(p2.x / p2.z, p2.y / p2.z);
pairs.add(pair);
if (isFundamental) {
GeometryMath_F64.mult(K, pair.p1, pair.p1);
GeometryMath_F64.mult(K, pair.p2, pair.p2);
}
currentObs.add(pair.p2);
}
}
use of georegression.struct.point.Point3D_F64 in project BoofCV by lessthanoptimal.
the class TestQuadPoseEstimator method project.
private void project(Se3_F64 worldToCamera, Point2D_F64 p, Point2D_F64 v) {
Point3D_F64 a = new Point3D_F64(p.x, p.y, 0);
Point3D_F64 b = new Point3D_F64();
SePointOps_F64.transform(worldToCamera, a, b);
v.x = b.x / b.z;
v.y = b.y / b.z;
}
use of georegression.struct.point.Point3D_F64 in project BoofCV by lessthanoptimal.
the class GenericFiducialDetectorChecks method checkImageLocation.
@Test
public void checkImageLocation() {
for (ImageType type : types) {
ImageBase image = loadImage(type);
FiducialDetector detector = createDetector(type);
// It's not specified if the center should be undistorted or distorted. Just make it easier by
// using undistorted
LensDistortionNarrowFOV distortion = loadDistortion(false);
detector.setLensDistortion(distortion, image.width, image.height);
detector.detect(image);
assertTrue(detector.totalFound() >= 1);
assertTrue(detector.is3D());
for (int i = 0; i < detector.totalFound(); i++) {
Se3_F64 fidToCam = new Se3_F64();
Point2D_F64 found = new Point2D_F64();
detector.getFiducialToCamera(i, fidToCam);
detector.getCenter(i, found);
Point2D_F64 rendered = new Point2D_F64();
WorldToCameraToPixel worldToPixel = PerspectiveOps.createWorldToPixel(distortion, fidToCam);
worldToPixel.transform(new Point3D_F64(0, 0, 0), rendered);
// see if the reprojected is near the pixel location
assertTrue(rendered.distance(found) <= pixelAndProjectedTol);
}
}
}
use of georegression.struct.point.Point3D_F64 in project BoofCV by lessthanoptimal.
the class VisualizeDepthVisualOdometryApp method updateGUI.
protected void updateGUI() {
if (!noFault) {
numFaults++;
return;
}
showTracks = statusPanel.isShowAll();
showInliers = statusPanel.isShowInliers();
if (renderedDepth == null) {
renderedDepth = new BufferedImage(imageDepth.width, imageDepth.height, BufferedImage.TYPE_INT_RGB);
}
switch(algType) {
case FEATURE:
drawFeatures((AccessPointTracks3D) alg, bufferedRGB);
break;
case DIRECT:
fractionInBounds = ((PyramidDirectColorDepth_to_DepthVisualOdometry) alg).getFractionInBounds();
break;
}
final Se3_F64 leftToWorld = ((Se3_F64) alg.getCameraToWorld()).copy();
// TODO magic value from kinect. Add to config file?
VisualizeImageData.disparity(imageDepth, renderedDepth, 0, 10000, 0);
SwingUtilities.invokeLater(new Runnable() {
public void run() {
guiLeft.setImage(bufferedRGB);
guiDepth.setImage(renderedDepth);
guiLeft.autoSetPreferredSize();
guiDepth.autoSetPreferredSize();
guiLeft.repaint();
guiDepth.repaint();
statusPanel.setCameraToWorld(leftToWorld);
statusPanel.setNumFaults(numFaults);
statusPanel.setFps(fps);
statusPanel.setFrameNumber(frameNumber - 1);
statusPanel.setPaused(streamPaused);
switch(algType) {
case FEATURE:
{
featurePanel.setNumTracks(numTracks);
featurePanel.setNumInliers(numInliers);
}
break;
case DIRECT:
{
directPanel.setInBounds(fractionInBounds);
}
break;
}
}
});
double r = 0.15;
Point3D_F64 p1 = new Point3D_F64(-r, -r, 0);
Point3D_F64 p2 = new Point3D_F64(r, -r, 0);
Point3D_F64 p3 = new Point3D_F64(r, r, 0);
Point3D_F64 p4 = new Point3D_F64(-r, r, 0);
SePointOps_F64.transform(leftToWorld, p1, p1);
SePointOps_F64.transform(leftToWorld, p2, p2);
SePointOps_F64.transform(leftToWorld, p3, p3);
SePointOps_F64.transform(leftToWorld, p4, p4);
guiCam3D.add(p1, p2, p3, p4);
guiCam3D.repaint();
}
use of georegression.struct.point.Point3D_F64 in project BoofCV by lessthanoptimal.
the class VisualizeDepthVisualOdometryApp method drawFeatures.
private void drawFeatures(AccessPointTracks3D tracker, BufferedImage image) {
numInliers = 0;
Graphics2D g2 = image.createGraphics();
List<Point2D_F64> points = tracker.getAllTracks();
if (points.size() == 0)
return;
double[] ranges = new double[points.size()];
for (int i = 0; i < points.size(); i++) {
ranges[i] = tracker.getTrackLocation(i).z;
}
Arrays.sort(ranges);
double maxRange = ranges[(int) (ranges.length * 0.8)];
for (int i = 0; i < points.size(); i++) {
Point2D_F64 pixel = points.get(i);
if (showTracks && tracker.isNew(i)) {
VisualizeFeatures.drawPoint(g2, (int) pixel.x, (int) pixel.y, 3, Color.GREEN);
continue;
}
if (tracker.isInlier(i)) {
if (showInliers)
VisualizeFeatures.drawPoint(g2, (int) pixel.x, (int) pixel.y, 7, Color.BLUE, false);
numInliers++;
}
if (!showTracks)
continue;
Point3D_F64 p3 = tracker.getTrackLocation(i);
double r = p3.z / maxRange;
if (r < 0)
r = 0;
else if (r > 1)
r = 1;
int color = (255 << 16) | ((int) (255 * r) << 8);
VisualizeFeatures.drawPoint(g2, (int) pixel.x, (int) pixel.y, 3, new Color(color));
}
numTracks = points.size();
// g2.setColor(Color.BLACK);
// g2.fillRect(25,15,80,45);
// g2.setColor(Color.CYAN);
// g2.drawString("Total: " + numTracks, 30, 30);
// g2.drawString("Inliers: "+numInliers,30,50);
}
Aggregations