Search in sources :

Example 1 with PointCloudViewer

use of boofcv.gui.d3.PointCloudViewer in project BoofCV by lessthanoptimal.

the class ExampleStereoDisparity3D method main.

public static void main(String[] args) {
    // ------------- Compute Stereo Correspondence
    // Load camera images and stereo camera parameters
    String calibDir = UtilIO.pathExample("calibration/stereo/Bumblebee2_Chess/");
    String imageDir = UtilIO.pathExample("stereo/");
    StereoParameters param = CalibrationIO.load(new File(calibDir, "stereo.yaml"));
    // load and convert images into a BoofCV format
    BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "chair01_left.jpg");
    BufferedImage origRight = UtilImageIO.loadImage(imageDir, "chair01_right.jpg");
    GrayU8 distLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8) null);
    GrayU8 distRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8) null);
    // re-scale input images
    GrayU8 scaledLeft = new GrayU8((int) (distLeft.width * scale), (int) (distLeft.height * scale));
    GrayU8 scaledRight = new GrayU8((int) (distRight.width * scale), (int) (distRight.height * scale));
    new FDistort(distLeft, scaledLeft).scaleExt().apply();
    new FDistort(distRight, scaledRight).scaleExt().apply();
    // Don't forget to adjust camera parameters for the change in scale!
    PerspectiveOps.scaleIntrinsic(param.left, scale);
    PerspectiveOps.scaleIntrinsic(param.right, scale);
    // rectify images and compute disparity
    GrayU8 rectLeft = new GrayU8(scaledLeft.width, scaledLeft.height);
    GrayU8 rectRight = new GrayU8(scaledRight.width, scaledRight.height);
    RectifyCalibrated rectAlg = ExampleStereoDisparity.rectify(scaledLeft, scaledRight, param, rectLeft, rectRight);
    // GrayU8 disparity = ExampleStereoDisparity.denseDisparity(rectLeft, rectRight, 3,minDisparity, maxDisparity);
    GrayF32 disparity = ExampleStereoDisparity.denseDisparitySubpixel(rectLeft, rectRight, 3, minDisparity, maxDisparity);
    // ------------- Convert disparity image into a 3D point cloud
    // The point cloud will be in the left cameras reference frame
    DMatrixRMaj rectK = rectAlg.getCalibrationMatrix();
    DMatrixRMaj rectR = rectAlg.getRectifiedRotation();
    // used to display the point cloud
    PointCloudViewer viewer = new PointCloudViewer(rectK, 10);
    viewer.setPreferredSize(new Dimension(rectLeft.width, rectLeft.height));
    // extract intrinsic parameters from rectified camera
    double baseline = param.getBaseline();
    double fx = rectK.get(0, 0);
    double fy = rectK.get(1, 1);
    double cx = rectK.get(0, 2);
    double cy = rectK.get(1, 2);
    // Iterate through each pixel in disparity image and compute its 3D coordinate
    Point3D_F64 pointRect = new Point3D_F64();
    Point3D_F64 pointLeft = new Point3D_F64();
    for (int y = 0; y < disparity.height; y++) {
        for (int x = 0; x < disparity.width; x++) {
            double d = disparity.unsafe_get(x, y) + minDisparity;
            // skip over pixels were no correspondence was found
            if (d >= rangeDisparity)
                continue;
            // Coordinate in rectified camera frame
            pointRect.z = baseline * fx / d;
            pointRect.x = pointRect.z * (x - cx) / fx;
            pointRect.y = pointRect.z * (y - cy) / fy;
            // rotate into the original left camera frame
            GeometryMath_F64.multTran(rectR, pointRect, pointLeft);
            // add pixel to the view for display purposes and sets its gray scale value
            int v = rectLeft.unsafe_get(x, y);
            viewer.addPoint(pointLeft.x, pointLeft.y, pointLeft.z, v << 16 | v << 8 | v);
        }
    }
    // display the results.  Click and drag to change point cloud camera
    BufferedImage visualized = VisualizeImageData.disparity(disparity, null, minDisparity, maxDisparity, 0);
    ShowImages.showWindow(visualized, "Disparity");
    ShowImages.showWindow(viewer, "Point Cloud");
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) RectifyCalibrated(boofcv.alg.geo.rectify.RectifyCalibrated) DMatrixRMaj(org.ejml.data.DMatrixRMaj) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) GrayF32(boofcv.struct.image.GrayF32) FDistort(boofcv.abst.distort.FDistort) PointCloudViewer(boofcv.gui.d3.PointCloudViewer) GrayU8(boofcv.struct.image.GrayU8) StereoParameters(boofcv.struct.calib.StereoParameters) File(java.io.File)

Example 2 with PointCloudViewer

use of boofcv.gui.d3.PointCloudViewer in project BoofCV by lessthanoptimal.

the class ExampleMultiviewSceneReconstruction method process.

/**
 * Process the images and reconstructor the scene as a point cloud using matching interest points between
 * images.
 */
public void process(CameraPinholeRadial intrinsic, List<BufferedImage> colorImages) {
    pixelToNorm = LensDistortionOps.narrow(intrinsic).undistort_F64(true, false);
    estimateEssential = FactoryMultiViewRobust.essentialRansac(new ConfigEssential(intrinsic), new ConfigRansac(4000, inlierTol));
    estimatePnP = FactoryMultiViewRobust.pnpRansac(new ConfigPnP(intrinsic), new ConfigRansac(4000, inlierTol));
    // find features in each image
    detectImageFeatures(colorImages);
    // see which images are the most similar to each o ther
    double[][] matrix = computeConnections();
    printConnectionMatrix(matrix);
    // find the image which is connected to the most other images.  Use that as the origin of the arbitrary
    // coordinate system
    int bestImage = selectMostConnectFrame(colorImages, matrix);
    // Use two images to initialize the scene reconstruction
    initializeReconstruction(colorImages, matrix, bestImage);
    // Process rest of the images and compute 3D coordinates
    List<Integer> seed = new ArrayList<>();
    seed.add(bestImage);
    performReconstruction(seed, -1, matrix);
    // Bundle adjustment would normally be done at this point, but has been omitted since the current
    // implementation is too slow for a large number of points
    // display a point cloud from the 3D features
    PointCloudViewer gui = new PointCloudViewer(intrinsic, 1);
    for (Feature3D t : featuresAll) {
        gui.addPoint(t.worldPt.x, t.worldPt.y, t.worldPt.z, t.color);
    }
    gui.setPreferredSize(new Dimension(500, 500));
    ShowImages.showWindow(gui, "Points");
}
Also used : ArrayList(java.util.ArrayList) PointCloudViewer(boofcv.gui.d3.PointCloudViewer) DetectDescribePoint(boofcv.abst.feature.detdesc.DetectDescribePoint)

Example 3 with PointCloudViewer

use of boofcv.gui.d3.PointCloudViewer in project BoofCV by lessthanoptimal.

the class DisplayKinectPointCloudApp method main.

public static void main(String[] args) throws IOException {
    String baseDir = UtilIO.pathExample("kinect/basket");
    String nameRgb = "basket_rgb.png";
    String nameDepth = "basket_depth.png";
    String nameCalib = "intrinsic.yaml";
    CameraPinholeRadial param = CalibrationIO.load(new File(baseDir, nameCalib));
    GrayU16 depth = UtilImageIO.loadImage(new File(baseDir, nameDepth), false, ImageType.single(GrayU16.class));
    Planar<GrayU8> rgb = UtilImageIO.loadImage(new File(baseDir, nameRgb), true, ImageType.pl(3, GrayU8.class));
    FastQueue<Point3D_F64> cloud = new FastQueue<Point3D_F64>(Point3D_F64.class, true);
    FastQueueArray_I32 cloudColor = new FastQueueArray_I32(3);
    VisualDepthOps.depthTo3D(param, rgb, depth, cloud, cloudColor);
    DMatrixRMaj K = PerspectiveOps.calibrationMatrix(param, (DMatrixRMaj) null);
    PointCloudViewer viewer = new PointCloudViewer(K, 10.0);
    viewer.setPreferredSize(new Dimension(rgb.width, rgb.height));
    for (int i = 0; i < cloud.size; i++) {
        Point3D_F64 p = cloud.get(i);
        int[] color = cloudColor.get(i);
        int c = (color[0] << 16) | (color[1] << 8) | color[2];
        viewer.addPoint(p.x, p.y, p.z, c);
    }
    ShowImages.showWindow(viewer, "Point Cloud", true);
    System.out.println("Total points = " + cloud.size);
// BufferedImage depthOut = VisualizeImageData.disparity(depth, null, 0, UtilOpenKinect.FREENECT_DEPTH_MM_MAX_VALUE, 0);
// ShowImages.showWindow(depthOut,"Depth Image", true);
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) GrayU16(boofcv.struct.image.GrayU16) FastQueue(org.ddogleg.struct.FastQueue) DMatrixRMaj(org.ejml.data.DMatrixRMaj) FastQueueArray_I32(boofcv.struct.FastQueueArray_I32) CameraPinholeRadial(boofcv.struct.calib.CameraPinholeRadial) PointCloudViewer(boofcv.gui.d3.PointCloudViewer) GrayU8(boofcv.struct.image.GrayU8) File(java.io.File)

Example 4 with PointCloudViewer

use of boofcv.gui.d3.PointCloudViewer in project BoofCV by lessthanoptimal.

the class ExamplePoseOfCalibrationTarget method main.

public static void main(String[] args) {
    // Load camera calibration
    CameraPinholeRadial intrinsic = CalibrationIO.load(UtilIO.pathExample("calibration/mono/Sony_DSC-HX5V_Chess/intrinsic.yaml"));
    LensDistortionNarrowFOV lensDistortion = new LensDistortionRadialTangential(intrinsic);
    // load the video file
    String fileName = UtilIO.pathExample("tracking/chessboard_SonyDSC_01.mjpeg");
    SimpleImageSequence<GrayF32> video = DefaultMediaManager.INSTANCE.openVideo(fileName, ImageType.single(GrayF32.class));
    // DefaultMediaManager.INSTANCE.openCamera(null, 640, 480, ImageType.single(GrayF32.class));
    // Let's use the FiducialDetector interface since it is much easier than coding up
    // the entire thing ourselves.  Look at FiducialDetector's code if you want to understand how it works.
    CalibrationFiducialDetector<GrayF32> detector = FactoryFiducial.calibChessboard(new ConfigChessboard(4, 5, 0.03), GrayF32.class);
    detector.setLensDistortion(lensDistortion, intrinsic.width, intrinsic.height);
    // Get the 2D coordinate of calibration points for visualization purposes
    List<Point2D_F64> calibPts = detector.getCalibrationPoints();
    // Set up visualization
    PointCloudViewer viewer = new PointCloudViewer(intrinsic, 0.01);
    // make the view more interest.  From the side.
    DMatrixRMaj rotY = ConvertRotation3D_F64.rotY(-Math.PI / 2.0, null);
    viewer.setWorldToCamera(new Se3_F64(rotY, new Vector3D_F64(0.75, 0, 1.25)));
    ImagePanel imagePanel = new ImagePanel(intrinsic.width, intrinsic.height);
    viewer.setPreferredSize(new Dimension(intrinsic.width, intrinsic.height));
    PanelGridPanel gui = new PanelGridPanel(1, imagePanel, viewer);
    gui.setMaximumSize(gui.getPreferredSize());
    ShowImages.showWindow(gui, "Calibration Target Pose", true);
    // Allows the user to click on the image and pause
    MousePauseHelper pauseHelper = new MousePauseHelper(gui);
    // saves the target's center location
    List<Point3D_F64> path = new ArrayList<>();
    // Process each frame in the video sequence
    Se3_F64 targetToCamera = new Se3_F64();
    while (video.hasNext()) {
        // detect calibration points
        detector.detect(video.next());
        if (detector.totalFound() == 1) {
            detector.getFiducialToCamera(0, targetToCamera);
            // Visualization.  Show a path with green points and the calibration points in black
            viewer.reset();
            Point3D_F64 center = new Point3D_F64();
            SePointOps_F64.transform(targetToCamera, center, center);
            path.add(center);
            for (Point3D_F64 p : path) {
                viewer.addPoint(p.x, p.y, p.z, 0x00FF00);
            }
            for (int j = 0; j < calibPts.size(); j++) {
                Point2D_F64 p = calibPts.get(j);
                Point3D_F64 p3 = new Point3D_F64(p.x, p.y, 0);
                SePointOps_F64.transform(targetToCamera, p3, p3);
                viewer.addPoint(p3.x, p3.y, p3.z, 0);
            }
        }
        imagePanel.setImage((BufferedImage) video.getGuiImage());
        viewer.repaint();
        imagePanel.repaint();
        BoofMiscOps.pause(30);
        while (pauseHelper.isPaused()) {
            BoofMiscOps.pause(30);
        }
    }
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) LensDistortionNarrowFOV(boofcv.alg.distort.LensDistortionNarrowFOV) DMatrixRMaj(org.ejml.data.DMatrixRMaj) ArrayList(java.util.ArrayList) LensDistortionRadialTangential(boofcv.alg.distort.radtan.LensDistortionRadialTangential) Vector3D_F64(georegression.struct.point.Vector3D_F64) PanelGridPanel(boofcv.gui.PanelGridPanel) GrayF32(boofcv.struct.image.GrayF32) CameraPinholeRadial(boofcv.struct.calib.CameraPinholeRadial) Point2D_F64(georegression.struct.point.Point2D_F64) PointCloudViewer(boofcv.gui.d3.PointCloudViewer) ConfigChessboard(boofcv.abst.fiducial.calib.ConfigChessboard) Se3_F64(georegression.struct.se.Se3_F64) ImagePanel(boofcv.gui.image.ImagePanel) MousePauseHelper(boofcv.gui.MousePauseHelper)

Example 5 with PointCloudViewer

use of boofcv.gui.d3.PointCloudViewer in project BoofCV by lessthanoptimal.

the class ExampleDepthPointCloud method main.

public static void main(String[] args) throws IOException {
    String nameRgb = UtilIO.pathExample("kinect/basket/basket_rgb.png");
    String nameDepth = UtilIO.pathExample("kinect/basket/basket_depth.png");
    String nameCalib = UtilIO.pathExample("kinect/basket/visualdepth.yaml");
    VisualDepthParameters param = CalibrationIO.load(nameCalib);
    BufferedImage buffered = UtilImageIO.loadImage(nameRgb);
    Planar<GrayU8> rgb = ConvertBufferedImage.convertFromPlanar(buffered, null, true, GrayU8.class);
    GrayU16 depth = ConvertBufferedImage.convertFrom(UtilImageIO.loadImage(nameDepth), null, GrayU16.class);
    FastQueue<Point3D_F64> cloud = new FastQueue<>(Point3D_F64.class, true);
    FastQueueArray_I32 cloudColor = new FastQueueArray_I32(3);
    VisualDepthOps.depthTo3D(param.visualParam, rgb, depth, cloud, cloudColor);
    DMatrixRMaj K = PerspectiveOps.calibrationMatrix(param.visualParam, (DMatrixRMaj) null);
    PointCloudViewer viewer = new PointCloudViewer(K, 15);
    viewer.setPreferredSize(new Dimension(rgb.width, rgb.height));
    for (int i = 0; i < cloud.size; i++) {
        Point3D_F64 p = cloud.get(i);
        int[] color = cloudColor.get(i);
        int c = (color[0] << 16) | (color[1] << 8) | color[2];
        viewer.addPoint(p.x, p.y, p.z, c);
    }
    // ---------- Display depth image
    // use the actual max value in the image to maximize its appearance
    int maxValue = ImageStatistics.max(depth);
    BufferedImage depthOut = VisualizeImageData.disparity(depth, null, 0, maxValue, 0);
    ShowImages.showWindow(depthOut, "Depth Image");
    // ---------- Display colorized point cloud
    ShowImages.showWindow(viewer, "Point Cloud");
    System.out.println("Total points = " + cloud.size);
}
Also used : VisualDepthParameters(boofcv.struct.calib.VisualDepthParameters) Point3D_F64(georegression.struct.point.Point3D_F64) GrayU16(boofcv.struct.image.GrayU16) FastQueue(org.ddogleg.struct.FastQueue) DMatrixRMaj(org.ejml.data.DMatrixRMaj) FastQueueArray_I32(boofcv.struct.FastQueueArray_I32) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) PointCloudViewer(boofcv.gui.d3.PointCloudViewer) GrayU8(boofcv.struct.image.GrayU8)

Aggregations

PointCloudViewer (boofcv.gui.d3.PointCloudViewer)5 Point3D_F64 (georegression.struct.point.Point3D_F64)4 DMatrixRMaj (org.ejml.data.DMatrixRMaj)4 GrayU8 (boofcv.struct.image.GrayU8)3 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)2 FastQueueArray_I32 (boofcv.struct.FastQueueArray_I32)2 CameraPinholeRadial (boofcv.struct.calib.CameraPinholeRadial)2 GrayF32 (boofcv.struct.image.GrayF32)2 GrayU16 (boofcv.struct.image.GrayU16)2 BufferedImage (java.awt.image.BufferedImage)2 File (java.io.File)2 ArrayList (java.util.ArrayList)2 FastQueue (org.ddogleg.struct.FastQueue)2 FDistort (boofcv.abst.distort.FDistort)1 DetectDescribePoint (boofcv.abst.feature.detdesc.DetectDescribePoint)1 ConfigChessboard (boofcv.abst.fiducial.calib.ConfigChessboard)1 LensDistortionNarrowFOV (boofcv.alg.distort.LensDistortionNarrowFOV)1 LensDistortionRadialTangential (boofcv.alg.distort.radtan.LensDistortionRadialTangential)1 RectifyCalibrated (boofcv.alg.geo.rectify.RectifyCalibrated)1 MousePauseHelper (boofcv.gui.MousePauseHelper)1