Search in sources :

Example 1 with Point3dRgbI_F64

use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.

the class PointCloudUtils_F64 method filter.

/**
 * Creates a new list of points while filtering out points
 */
public static DogArray<Point3dRgbI_F64> filter(AccessPointIndex<Point3D_F64> accessPoint, AccessColorIndex accessColor, int size, BoofLambdas.FilterInt filter, @Nullable DogArray<Point3dRgbI_F64> output) {
    if (output == null)
        output = new DogArray<>(Point3dRgbI_F64::new);
    output.reset();
    // Guess how much memory is needed
    output.reserve(size / 5);
    Point3dRgbI_F64 p = output.grow();
    for (int pointIdx = 0; pointIdx < size; pointIdx++) {
        if (!filter.keep(pointIdx))
            continue;
        accessPoint.getPoint(pointIdx, p);
        p.rgb = accessColor.getRGB(pointIdx);
        p = output.grow();
    }
    output.removeTail();
    return output;
}
Also used : Point3dRgbI_F64(boofcv.struct.Point3dRgbI_F64) DogArray(org.ddogleg.struct.DogArray)

Example 2 with Point3dRgbI_F64

use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.

the class TestDisparityToColorPointCloud method doesItCrash.

/**
 * Simple test to see if it crashes. Very little validation of results is done
 */
@Test
void doesItCrash() {
    double baseline = 1.0;
    DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(500.0, 500, 0, 250, 250);
    DMatrixRMaj rectifiedR = CommonOps_DDRM.identity(3);
    Point2Transform2_F64 rectifiedToColor = new DoNothing2Transform2_F64();
    int disparityMin = 2;
    int disparityRange = 100;
    var alg = new DisparityToColorPointCloud();
    alg.configure(baseline, K, rectifiedR, rectifiedToColor, disparityMin, disparityRange);
    var disparity = new GrayF32(width, height);
    var color = new DisparityToColorPointCloud.ColorImage() {

        @Override
        public boolean isInBounds(int x, int y) {
            return true;
        }

        @Override
        public int getRGB(int x, int y) {
            return 0xFF00FF;
        }
    };
    var output = new DogArray<>(Point3dRgbI_F64::new);
    alg.process(disparity, color, PointCloudWriter.wrapF64RGB(output));
    assertEquals(width * height, output.size);
    for (int i = 0; i < output.size; i++) {
        assertEquals(0xFF00FF, output.get(i).rgb);
    }
}
Also used : GrayF32(boofcv.struct.image.GrayF32) DMatrixRMaj(org.ejml.data.DMatrixRMaj) Point2Transform2_F64(boofcv.struct.distort.Point2Transform2_F64) DoNothing2Transform2_F64(boofcv.struct.distort.DoNothing2Transform2_F64) Point3dRgbI_F64(boofcv.struct.Point3dRgbI_F64) DogArray(org.ddogleg.struct.DogArray) Test(org.junit.jupiter.api.Test)

Example 3 with Point3dRgbI_F64

use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.

the class TestPlyCodec method encode_decode_3DRGB_binary.

@Test
void encode_decode_3DRGB_binary() throws IOException {
    List<Point3dRgbI_F64> expected = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        int r = (10 * i) & 0xFF;
        int g = (28 * i) & 0xFF;
        int b = (58 * i) & 0xFF;
        int rgb = r << 16 | g << 16 | b;
        expected.add(new Point3dRgbI_F64(i * 123.45, i - 1.01, i + 2.34, rgb));
    }
    for (boolean asFloat : new boolean[] { true, false }) {
        DogArray<Point3dRgbI_F64> found = new DogArray<>(Point3dRgbI_F64::new);
        ByteArrayOutputStream output = new ByteArrayOutputStream();
        PlyCodec.saveBinary(PointCloudReader.wrapF64RGB(expected), ByteOrder.BIG_ENDIAN, false, asFloat, output);
        ByteArrayInputStream input = new ByteArrayInputStream(output.toByteArray());
        PlyCodec.read(input, PointCloudWriter.wrapF64RGB(found));
        assertEquals(expected.size(), found.size);
        double tol = asFloat ? UtilEjml.TEST_F32 : UtilEjml.TEST_F64;
        for (int i = 0; i < found.size; i++) {
            assertEquals(0.0, found.get(i).distance(expected.get(i)), tol);
        }
    }
}
Also used : Point3dRgbI_F64(boofcv.struct.Point3dRgbI_F64) ArrayList(java.util.ArrayList) DogArray(org.ddogleg.struct.DogArray) Test(org.junit.jupiter.api.Test)

Example 4 with Point3dRgbI_F64

use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.

the class ExampleMultiViewSparseReconstruction method visualizeSparseCloud.

/**
 * To visualize the results we will render a sparse point cloud along with the location of each camera in the
 * scene.
 */
public void visualizeSparseCloud() {
    checkTrue(scene.isHomogenous());
    List<Point3D_F64> cloudXyz = new ArrayList<>();
    Point4D_F64 world = new Point4D_F64();
    // NOTE: By default the colors found below are not used. Look before to see why and how to turn them on.
    // 
    // Colorize the cloud by reprojecting the images. The math is straight forward but there's a lot of book
    // keeping that needs to be done due to the scene data structure. A class is provided to make this process easy
    var imageLookup = new LookUpImageFilesByIndex(imageFiles);
    var colorize = new ColorizeMultiViewStereoResults<>(new LookUpColorRgbFormats.PL_U8(), imageLookup);
    DogArray_I32 rgb = new DogArray_I32();
    rgb.resize(scene.points.size);
    colorize.processScenePoints(scene, // String encodes the image's index
    (viewIdx) -> viewIdx + "", // Assign the RGB color
    (pointIdx, r, g, b) -> rgb.set(pointIdx, (r << 16) | (g << 8) | b));
    // Convert the structure into regular 3D points from homogenous
    for (int i = 0; i < scene.points.size; i++) {
        scene.points.get(i).get(world);
        // array would be out of sync. Let's just throw it far far away then.
        if (world.w == 0.0)
            cloudXyz.add(new Point3D_F64(0, 0, Double.MAX_VALUE));
        else
            cloudXyz.add(new Point3D_F64(world.x / world.w, world.y / world.w, world.z / world.w));
    }
    PointCloudViewer viewer = VisualizeData.createPointCloudViewer();
    viewer.setFog(true);
    // We just did a bunch of work to look up the true color of points, however for sparse data it's easy to see
    // the structure with psuedo color. Comment out the line below to see the true color.
    viewer.setColorizer(new TwoAxisRgbPlane.Z_XY(1.0).fperiod(40));
    viewer.setDotSize(1);
    viewer.setTranslationStep(0.15);
    viewer.addCloud((idx, p) -> p.setTo(cloudXyz.get(idx)), rgb::get, rgb.size);
    viewer.setCameraHFov(UtilAngle.radian(60));
    SwingUtilities.invokeLater(() -> {
        // Show where the cameras are
        BoofSwingUtil.visualizeCameras(scene, viewer);
        // Size the window and show it to the user
        viewer.getComponent().setPreferredSize(new Dimension(600, 600));
        ShowImages.showWindow(viewer.getComponent(), "Refined Scene", true);
        var copy = new DogArray<>(Point3dRgbI_F64::new);
        viewer.copyCloud(copy);
        try (var out = new FileOutputStream("saved_cloud.ply")) {
            PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(copy.toList()), true, out);
        } catch (IOException e) {
            e.printStackTrace();
        }
    });
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) LookUpColorRgbFormats(boofcv.core.image.LookUpColorRgbFormats) ArrayList(java.util.ArrayList) DogArray_I32(org.ddogleg.struct.DogArray_I32) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) DogArray(org.ddogleg.struct.DogArray) ColorizeMultiViewStereoResults(boofcv.alg.mvs.ColorizeMultiViewStereoResults) FileOutputStream(java.io.FileOutputStream) PointCloudViewer(boofcv.visualize.PointCloudViewer) Point3dRgbI_F64(boofcv.struct.Point3dRgbI_F64) Point4D_F64(georegression.struct.point.Point4D_F64) LookUpImageFilesByIndex(boofcv.io.image.LookUpImageFilesByIndex)

Example 5 with Point3dRgbI_F64

use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.

the class ExampleMultiViewDenseReconstruction method saveCloudToDisk.

private static void saveCloudToDisk(SparseSceneToDenseCloud<GrayU8> sparseToDense) {
    // Save the dense point cloud to disk in PLY format
    try (FileOutputStream out = new FileOutputStream("saved_cloud.ply")) {
        // Filter points which are far away to make it easier to view in 3rd party viewers that auto scale
        // You might need to adjust the threshold for your application if too many points are cut
        double distanceThreshold = 50.0;
        List<Point3D_F64> cloud = sparseToDense.getCloud();
        DogArray_I32 colorsRgb = sparseToDense.getColorRgb();
        DogArray<Point3dRgbI_F64> filtered = PointCloudUtils_F64.filter((idx, p) -> p.setTo(cloud.get(idx)), colorsRgb::get, cloud.size(), (idx) -> cloud.get(idx).norm() <= distanceThreshold, null);
        PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(filtered.toList()), true, out);
    } catch (IOException e) {
        e.printStackTrace();
    }
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) FileOutputStream(java.io.FileOutputStream) Point3dRgbI_F64(boofcv.struct.Point3dRgbI_F64) DogArray_I32(org.ddogleg.struct.DogArray_I32) IOException(java.io.IOException)

Aggregations

Point3dRgbI_F64 (boofcv.struct.Point3dRgbI_F64)8 DogArray (org.ddogleg.struct.DogArray)6 Point3D_F64 (georegression.struct.point.Point3D_F64)3 ArrayList (java.util.ArrayList)3 Test (org.junit.jupiter.api.Test)3 Point3D_F32 (georegression.struct.point.Point3D_F32)2 FileOutputStream (java.io.FileOutputStream)2 IOException (java.io.IOException)2 DogArray_I32 (org.ddogleg.struct.DogArray_I32)2 PointCloudWriter (boofcv.alg.cloud.PointCloudWriter)1 ColorizeMultiViewStereoResults (boofcv.alg.mvs.ColorizeMultiViewStereoResults)1 LookUpColorRgbFormats (boofcv.core.image.LookUpColorRgbFormats)1 LookUpImageFilesByIndex (boofcv.io.image.LookUpImageFilesByIndex)1 DoNothing2Transform2_F64 (boofcv.struct.distort.DoNothing2Transform2_F64)1 Point2Transform2_F64 (boofcv.struct.distort.Point2Transform2_F64)1 GrayF32 (boofcv.struct.image.GrayF32)1 PointCloudViewer (boofcv.visualize.PointCloudViewer)1 Point4D_F64 (georegression.struct.point.Point4D_F64)1 UncheckedIOException (java.io.UncheckedIOException)1 BigDogArray_I32 (org.ddogleg.struct.BigDogArray_I32)1