Search in sources :

Example 11 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class TestUtilOpenKinect method saveDepth_parseDepth.

@Test
public void saveDepth_parseDepth() throws IOException {
    GrayU16 depth = new GrayU16(width, height);
    ImageMiscOps.fillUniform(depth, rand, 0, 10000);
    GrowQueue_I8 data = new GrowQueue_I8();
    GrayU16 found = new GrayU16(width, height);
    UtilOpenKinect.saveDepth(depth, "temp.depth", data);
    UtilOpenKinect.parseDepth("temp.depth", found, data);
    for (int i = 0; i < height; i++) {
        for (int j = 0; j < width; j++) {
            int a = depth.get(j, i);
            int b = found.get(j, i);
            assertEquals(a, b);
        }
    }
    // clean up
    File f = new File("temp.depth");
    assertTrue(f.delete());
}
Also used : GrayU16(boofcv.struct.image.GrayU16) GrowQueue_I8(org.ddogleg.struct.GrowQueue_I8) File(java.io.File) Test(org.junit.Test)

Example 12 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class TestConvolveNormalizedNaive_SB method vertical2_U16_U8.

/**
 * Check it against one specific type to see if the core algorithm is correct
 */
@Test
public void vertical2_U16_U8() {
    Kernel1D_S32 kernelY = new Kernel1D_S32(new int[] { 1, 2, 3, 4, 5, 6 }, 6, 4);
    Kernel1D_S32 kernelX = new Kernel1D_S32(new int[] { 4, 2, 1, 4, 3, 6 }, 5, 2);
    GrayU16 input = new GrayU16(15, 16);
    ImageMiscOps.fillUniform(input, rand, 0, 80);
    GrayU8 output = new GrayU8(15, 16);
    ConvolveNormalizedNaive_SB.vertical(kernelX, kernelY, input, output);
    GrayU8 alt = new GrayU8(15, 16);
    ConvolveImageNoBorder.vertical(kernelY, input, alt, kernelX.computeSum() * kernelY.computeSum());
    for (int y = 0; y < output.height; y++) {
        for (int x = 0; x < output.width; x++) {
            int expected = vertical2(x, y, kernelX, kernelY, input);
            int found = output.get(x, y);
            assertEquals(x + "  " + y, expected, found);
        }
    }
}
Also used : Kernel1D_S32(boofcv.struct.convolve.Kernel1D_S32) GrayU16(boofcv.struct.image.GrayU16) GrayU8(boofcv.struct.image.GrayU8) Test(org.junit.Test)

Example 13 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class TestVisualDepthOps method depthTo3D.

@Test
public void depthTo3D() {
    GrayU16 depth = new GrayU16(width, height);
    depth.set(200, 80, 3400);
    depth.set(600, 420, 50);
    FastQueue<Point3D_F64> pts = new FastQueue<>(Point3D_F64.class, true);
    VisualDepthOps.depthTo3D(param, depth, pts);
    assertEquals(2, pts.size());
    assertEquals(0, compute(200, 80, 3400).distance(pts.get(0)), 1e-8);
    assertEquals(0, compute(600, 420, 50).distance(pts.get(1)), 1e-8);
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) GrayU16(boofcv.struct.image.GrayU16) FastQueue(org.ddogleg.struct.FastQueue) Test(org.junit.Test)

Example 14 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class TestDepthSparse3D method basicTest.

@Test
public void basicTest() {
    GrayU16 depth = new GrayU16(w, h);
    depth.set(5, 6, 1000);
    CameraPinholeRadial param = new CameraPinholeRadial(1, 1, 0, 5, 10, w, h).fsetRadial(0, 0);
    PixelTransform2_F32 v2d = new PixelTransform2_F32() {

        @Override
        public void compute(int x, int y) {
            distX = x + 1;
            distY = y + 2;
        }
    };
    DepthSparse3D<GrayU16> alg = new DepthSparse3D.I<>(2.1);
    alg.configure(LensDistortionOps.narrow(param), v2d);
    alg.setDepthImage(depth);
    assertTrue(alg.process(4, 4));
    Point3D_F64 found = alg.getWorldPt();
    Point2D_F64 norm = new Point2D_F64();
    PerspectiveOps.convertPixelToNorm(param, new Point2D_F64(4, 4), norm);
    double z = 1000 * 2.1;
    assertEquals(z, found.z, 1e-8);
    assertEquals(norm.x * z, found.x, 1e-8);
    assertEquals(norm.y * z, found.y, 1e-8);
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) GrayU16(boofcv.struct.image.GrayU16) CameraPinholeRadial(boofcv.struct.calib.CameraPinholeRadial) Point2D_F64(georegression.struct.point.Point2D_F64) PixelTransform2_F32(boofcv.struct.distort.PixelTransform2_F32) Test(org.junit.Test)

Example 15 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class ExampleDepthPointCloud method main.

public static void main(String[] args) throws IOException {
    String nameRgb = UtilIO.pathExample("kinect/basket/basket_rgb.png");
    String nameDepth = UtilIO.pathExample("kinect/basket/basket_depth.png");
    String nameCalib = UtilIO.pathExample("kinect/basket/visualdepth.yaml");
    VisualDepthParameters param = CalibrationIO.load(nameCalib);
    BufferedImage buffered = UtilImageIO.loadImage(nameRgb);
    Planar<GrayU8> rgb = ConvertBufferedImage.convertFromPlanar(buffered, null, true, GrayU8.class);
    GrayU16 depth = ConvertBufferedImage.convertFrom(UtilImageIO.loadImage(nameDepth), null, GrayU16.class);
    FastQueue<Point3D_F64> cloud = new FastQueue<>(Point3D_F64.class, true);
    FastQueueArray_I32 cloudColor = new FastQueueArray_I32(3);
    VisualDepthOps.depthTo3D(param.visualParam, rgb, depth, cloud, cloudColor);
    DMatrixRMaj K = PerspectiveOps.calibrationMatrix(param.visualParam, (DMatrixRMaj) null);
    PointCloudViewer viewer = new PointCloudViewer(K, 15);
    viewer.setPreferredSize(new Dimension(rgb.width, rgb.height));
    for (int i = 0; i < cloud.size; i++) {
        Point3D_F64 p = cloud.get(i);
        int[] color = cloudColor.get(i);
        int c = (color[0] << 16) | (color[1] << 8) | color[2];
        viewer.addPoint(p.x, p.y, p.z, c);
    }
    // ---------- Display depth image
    // use the actual max value in the image to maximize its appearance
    int maxValue = ImageStatistics.max(depth);
    BufferedImage depthOut = VisualizeImageData.disparity(depth, null, 0, maxValue, 0);
    ShowImages.showWindow(depthOut, "Depth Image");
    // ---------- Display colorized point cloud
    ShowImages.showWindow(viewer, "Point Cloud");
    System.out.println("Total points = " + cloud.size);
}
Also used : VisualDepthParameters(boofcv.struct.calib.VisualDepthParameters) Point3D_F64(georegression.struct.point.Point3D_F64) GrayU16(boofcv.struct.image.GrayU16) FastQueue(org.ddogleg.struct.FastQueue) DMatrixRMaj(org.ejml.data.DMatrixRMaj) FastQueueArray_I32(boofcv.struct.FastQueueArray_I32) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) PointCloudViewer(boofcv.gui.d3.PointCloudViewer) GrayU8(boofcv.struct.image.GrayU8)

Aggregations

GrayU16 (boofcv.struct.image.GrayU16)16 GrayU8 (boofcv.struct.image.GrayU8)12 Test (org.junit.Test)8 Point3D_F64 (georegression.struct.point.Point3D_F64)6 FastQueue (org.ddogleg.struct.FastQueue)5 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)4 FastQueueArray_I32 (boofcv.struct.FastQueueArray_I32)4 BufferedImage (java.awt.image.BufferedImage)4 ConfigGeneralDetector (boofcv.abst.feature.detect.interest.ConfigGeneralDetector)3 PkltConfig (boofcv.alg.tracker.klt.PkltConfig)3 CameraPinholeRadial (boofcv.struct.calib.CameraPinholeRadial)3 Planar (boofcv.struct.image.Planar)3 File (java.io.File)3 PointCloudViewer (boofcv.gui.d3.PointCloudViewer)2 VisualDepthParameters (boofcv.struct.calib.VisualDepthParameters)2 Vector3D_F64 (georegression.struct.point.Vector3D_F64)2 Se3_F64 (georegression.struct.se.Se3_F64)2 DMatrixRMaj (org.ejml.data.DMatrixRMaj)2 AccessPointTracks3D (boofcv.abst.sfm.AccessPointTracks3D)1 DoNothingPixelTransform_F32 (boofcv.alg.distort.DoNothingPixelTransform_F32)1