Search in sources :

Example 6 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class ExampleVisualOdometryDepth method main.

public static void main(String[] args) throws IOException {
    MediaManager media = DefaultMediaManager.INSTANCE;
    String directory = UtilIO.pathExample("kinect/straight");
    // load camera description and the video sequence
    VisualDepthParameters param = CalibrationIO.load(media.openFile(directory + "visualdepth.yaml"));
    // specify how the image features are going to be tracked
    PkltConfig configKlt = new PkltConfig();
    configKlt.pyramidScaling = new int[] { 1, 2, 4, 8 };
    configKlt.templateRadius = 3;
    PointTrackerTwoPass<GrayU8> tracker = FactoryPointTrackerTwoPass.klt(configKlt, new ConfigGeneralDetector(600, 3, 1), GrayU8.class, GrayS16.class);
    DepthSparse3D<GrayU16> sparseDepth = new DepthSparse3D.I<>(1e-3);
    // declares the algorithm
    DepthVisualOdometry<GrayU8, GrayU16> visualOdometry = FactoryVisualOdometry.depthDepthPnP(1.5, 120, 2, 200, 50, true, sparseDepth, tracker, GrayU8.class, GrayU16.class);
    // Pass in intrinsic/extrinsic calibration.  This can be changed in the future.
    visualOdometry.setCalibration(param.visualParam, new DoNothing2Transform2_F32());
    // Process the video sequence and output the location plus number of inliers
    SimpleImageSequence<GrayU8> videoVisual = media.openVideo(directory + "rgb.mjpeg", ImageType.single(GrayU8.class));
    SimpleImageSequence<GrayU16> videoDepth = media.openVideo(directory + "depth.mpng", ImageType.single(GrayU16.class));
    while (videoVisual.hasNext()) {
        GrayU8 visual = videoVisual.next();
        GrayU16 depth = videoDepth.next();
        if (!visualOdometry.process(visual, depth)) {
            throw new RuntimeException("VO Failed!");
        }
        Se3_F64 leftToWorld = visualOdometry.getCameraToWorld();
        Vector3D_F64 T = leftToWorld.getT();
        System.out.printf("Location %8.2f %8.2f %8.2f      inliers %s\n", T.x, T.y, T.z, inlierPercent(visualOdometry));
    }
}
Also used : VisualDepthParameters(boofcv.struct.calib.VisualDepthParameters) GrayU16(boofcv.struct.image.GrayU16) PkltConfig(boofcv.alg.tracker.klt.PkltConfig) ConfigGeneralDetector(boofcv.abst.feature.detect.interest.ConfigGeneralDetector) DoNothing2Transform2_F32(boofcv.struct.distort.DoNothing2Transform2_F32) Vector3D_F64(georegression.struct.point.Vector3D_F64) MediaManager(boofcv.io.MediaManager) DefaultMediaManager(boofcv.io.wrapper.DefaultMediaManager) GrayU8(boofcv.struct.image.GrayU8) Se3_F64(georegression.struct.se.Se3_F64)

Example 7 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class CaptureCalibrationImagesApp method process.

public void process() throws IOException {
    // make sure there is a "log" directory
    new File("log").mkdir();
    int w = UtilOpenKinect.getWidth(resolution);
    int h = UtilOpenKinect.getHeight(resolution);
    buffRgb = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB);
    savedRgb = new Planar<>(GrayU8.class, w, h, 3);
    savedDepth = new GrayU16(w, h);
    gui = ShowImages.showWindow(buffRgb, "Kinect RGB");
    gui.addKeyListener(this);
    gui.requestFocus();
    StreamOpenKinectRgbDepth stream = new StreamOpenKinectRgbDepth();
    Context kinect = Freenect.createContext();
    if (kinect.numDevices() < 0)
        throw new RuntimeException("No kinect found!");
    Device device = kinect.openDevice(0);
    stream.start(device, resolution, this);
    long targetTime = System.currentTimeMillis() + period;
    updateDisplay = true;
    while (true) {
        BoofMiscOps.pause(100);
        if (targetTime < System.currentTimeMillis()) {
            userChoice = -1;
            savedImages = false;
            updateDisplay = false;
            while (true) {
                if (savedImages && userChoice != -1) {
                    if (userChoice == 1) {
                        UtilImageIO.savePPM(savedRgb, String.format(directory + "rgb%07d.ppm", frameNumber), buffer);
                        UtilOpenKinect.saveDepth(savedDepth, String.format(directory + "depth%07d.depth", frameNumber), buffer);
                        frameNumber++;
                        text = "Image Saved!";
                    } else {
                        text = "Image Discarded!";
                    }
                    timeText = System.currentTimeMillis() + 500;
                    updateDisplay = true;
                    targetTime = System.currentTimeMillis() + period;
                    break;
                }
                BoofMiscOps.pause(50);
            }
        }
    }
}
Also used : Context(org.openkinect.freenect.Context) StreamOpenKinectRgbDepth(boofcv.openkinect.StreamOpenKinectRgbDepth) GrayU16(boofcv.struct.image.GrayU16) Device(org.openkinect.freenect.Device) GrayU8(boofcv.struct.image.GrayU8) File(java.io.File) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage)

Example 8 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class CreateRgbPointCloudFileApp method main.

public static void main(String[] args) throws IOException {
    String baseDir = "log/";
    String nameRgb = baseDir + "rgb0000000.ppm";
    String nameDepth = baseDir + "depth0000000.depth";
    String nameCalib = baseDir + "intrinsic.yaml";
    CameraPinholeRadial param = CalibrationIO.load(nameCalib);
    GrayU16 depth = new GrayU16(1, 1);
    Planar<GrayU8> rgb = new Planar<>(GrayU8.class, 1, 1, 3);
    UtilImageIO.loadPPM_U8(nameRgb, rgb, null);
    UtilOpenKinect.parseDepth(nameDepth, depth, null);
    FastQueue<Point3D_F64> cloud = new FastQueue<Point3D_F64>(Point3D_F64.class, true);
    FastQueueArray_I32 cloudColor = new FastQueueArray_I32(3);
    VisualDepthOps.depthTo3D(param, rgb, depth, cloud, cloudColor);
    DataOutputStream file = new DataOutputStream(new FileOutputStream("kinect_pointcloud.txt"));
    file.write("# Kinect RGB Point cloud. Units: millimeters. Format: X Y Z R G B\n".getBytes());
    for (int i = 0; i < cloud.size; i++) {
        Point3D_F64 p = cloud.get(i);
        int[] color = cloudColor.get(i);
        String line = String.format("%.10f %.10f %.10f %d %d %d\n", p.x, p.y, p.z, color[0], color[1], color[2]);
        file.write(line.getBytes());
    }
    file.close();
    System.out.println("Total points = " + cloud.size);
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) GrayU16(boofcv.struct.image.GrayU16) FastQueue(org.ddogleg.struct.FastQueue) DataOutputStream(java.io.DataOutputStream) FastQueueArray_I32(boofcv.struct.FastQueueArray_I32) CameraPinholeRadial(boofcv.struct.calib.CameraPinholeRadial) FileOutputStream(java.io.FileOutputStream) Planar(boofcv.struct.image.Planar) GrayU8(boofcv.struct.image.GrayU8)

Example 9 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class DisplayKinectPointCloudApp method main.

public static void main(String[] args) throws IOException {
    String baseDir = UtilIO.pathExample("kinect/basket");
    String nameRgb = "basket_rgb.png";
    String nameDepth = "basket_depth.png";
    String nameCalib = "intrinsic.yaml";
    CameraPinholeRadial param = CalibrationIO.load(new File(baseDir, nameCalib));
    GrayU16 depth = UtilImageIO.loadImage(new File(baseDir, nameDepth), false, ImageType.single(GrayU16.class));
    Planar<GrayU8> rgb = UtilImageIO.loadImage(new File(baseDir, nameRgb), true, ImageType.pl(3, GrayU8.class));
    FastQueue<Point3D_F64> cloud = new FastQueue<Point3D_F64>(Point3D_F64.class, true);
    FastQueueArray_I32 cloudColor = new FastQueueArray_I32(3);
    VisualDepthOps.depthTo3D(param, rgb, depth, cloud, cloudColor);
    DMatrixRMaj K = PerspectiveOps.calibrationMatrix(param, (DMatrixRMaj) null);
    PointCloudViewer viewer = new PointCloudViewer(K, 10.0);
    viewer.setPreferredSize(new Dimension(rgb.width, rgb.height));
    for (int i = 0; i < cloud.size; i++) {
        Point3D_F64 p = cloud.get(i);
        int[] color = cloudColor.get(i);
        int c = (color[0] << 16) | (color[1] << 8) | color[2];
        viewer.addPoint(p.x, p.y, p.z, c);
    }
    ShowImages.showWindow(viewer, "Point Cloud", true);
    System.out.println("Total points = " + cloud.size);
// BufferedImage depthOut = VisualizeImageData.disparity(depth, null, 0, UtilOpenKinect.FREENECT_DEPTH_MM_MAX_VALUE, 0);
// ShowImages.showWindow(depthOut,"Depth Image", true);
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) GrayU16(boofcv.struct.image.GrayU16) FastQueue(org.ddogleg.struct.FastQueue) DMatrixRMaj(org.ejml.data.DMatrixRMaj) FastQueueArray_I32(boofcv.struct.FastQueueArray_I32) CameraPinholeRadial(boofcv.struct.calib.CameraPinholeRadial) PointCloudViewer(boofcv.gui.d3.PointCloudViewer) GrayU8(boofcv.struct.image.GrayU8) File(java.io.File)

Example 10 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class ExampleImageConvert method convert.

void convert() {
    // Converting between BoofCV image types is easy with ConvertImage.  ConvertImage copies
    // the value of a pixel in one image into another image.  When doing so you need to take
    // in account the storage capabilities of these different class types.
    // Going from an unsigned 8-bit image to unsigned 16-bit image is no problem
    GrayU16 imageU16 = new GrayU16(gray.width, gray.height);
    ConvertImage.convert(gray, imageU16);
    // You can convert back into the 8-bit image from the 16-bit image with no problem
    // in this situation because imageU16 does not use the full range of 16-bit values
    ConvertImage.convert(imageU16, gray);
    // Here is an example where you over flow the image after converting
    // There won't be an exception or any error messages but the output image will be corrupted
    GrayU8 imageBad = new GrayU8(derivX.width, derivX.height);
    ConvertImage.convert(derivX, imageBad);
    // One way to get around this problem rescale and adjust the pixel values so that they
    // will be within a valid range.
    GrayS16 scaledAbs = new GrayS16(derivX.width, derivX.height);
    GPixelMath.abs(derivX, scaledAbs);
    GPixelMath.multiply(scaledAbs, 255.0 / ImageStatistics.max(scaledAbs), scaledAbs);
    // If you just want to see the values of a 16-bit image there are built in utility functions
    // for visualizing their values too
    BufferedImage colorX = VisualizeImageData.colorizeSign(derivX, null, -1);
    // Let's see what all the bad image looks like
    // ConvertBufferedImage is similar to ImageConvert in that it does a direct coversion with out
    // adjusting the pixel's value
    BufferedImage outBad = new BufferedImage(imageBad.width, imageBad.height, BufferedImage.TYPE_INT_RGB);
    BufferedImage outScaled = new BufferedImage(imageBad.width, imageBad.height, BufferedImage.TYPE_INT_RGB);
    ListDisplayPanel panel = new ListDisplayPanel();
    panel.addImage(ConvertBufferedImage.convertTo(scaledAbs, outScaled), "Scaled");
    panel.addImage(colorX, "Visualized");
    panel.addImage(ConvertBufferedImage.convertTo(imageBad, outBad), "Bad");
    ShowImages.showWindow(panel, "Image Convert", true);
}
Also used : ListDisplayPanel(boofcv.gui.ListDisplayPanel) GrayU16(boofcv.struct.image.GrayU16) GrayS16(boofcv.struct.image.GrayS16) GrayU8(boofcv.struct.image.GrayU8) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage)

Aggregations

GrayU16 (boofcv.struct.image.GrayU16)16 GrayU8 (boofcv.struct.image.GrayU8)12 Test (org.junit.Test)8 Point3D_F64 (georegression.struct.point.Point3D_F64)6 FastQueue (org.ddogleg.struct.FastQueue)5 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)4 FastQueueArray_I32 (boofcv.struct.FastQueueArray_I32)4 BufferedImage (java.awt.image.BufferedImage)4 ConfigGeneralDetector (boofcv.abst.feature.detect.interest.ConfigGeneralDetector)3 PkltConfig (boofcv.alg.tracker.klt.PkltConfig)3 CameraPinholeRadial (boofcv.struct.calib.CameraPinholeRadial)3 Planar (boofcv.struct.image.Planar)3 File (java.io.File)3 PointCloudViewer (boofcv.gui.d3.PointCloudViewer)2 VisualDepthParameters (boofcv.struct.calib.VisualDepthParameters)2 Vector3D_F64 (georegression.struct.point.Vector3D_F64)2 Se3_F64 (georegression.struct.se.Se3_F64)2 DMatrixRMaj (org.ejml.data.DMatrixRMaj)2 AccessPointTracks3D (boofcv.abst.sfm.AccessPointTracks3D)1 DoNothingPixelTransform_F32 (boofcv.alg.distort.DoNothingPixelTransform_F32)1