use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.
the class PointCloudUtils_F64 method filter.
/**
* Creates a new list of points while filtering out points
*/
public static DogArray<Point3dRgbI_F64> filter(AccessPointIndex<Point3D_F64> accessPoint, AccessColorIndex accessColor, int size, BoofLambdas.FilterInt filter, @Nullable DogArray<Point3dRgbI_F64> output) {
if (output == null)
output = new DogArray<>(Point3dRgbI_F64::new);
output.reset();
// Guess how much memory is needed
output.reserve(size / 5);
Point3dRgbI_F64 p = output.grow();
for (int pointIdx = 0; pointIdx < size; pointIdx++) {
if (!filter.keep(pointIdx))
continue;
accessPoint.getPoint(pointIdx, p);
p.rgb = accessColor.getRGB(pointIdx);
p = output.grow();
}
output.removeTail();
return output;
}
use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.
the class TestDisparityToColorPointCloud method doesItCrash.
/**
* Simple test to see if it crashes. Very little validation of results is done
*/
@Test
void doesItCrash() {
double baseline = 1.0;
DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(500.0, 500, 0, 250, 250);
DMatrixRMaj rectifiedR = CommonOps_DDRM.identity(3);
Point2Transform2_F64 rectifiedToColor = new DoNothing2Transform2_F64();
int disparityMin = 2;
int disparityRange = 100;
var alg = new DisparityToColorPointCloud();
alg.configure(baseline, K, rectifiedR, rectifiedToColor, disparityMin, disparityRange);
var disparity = new GrayF32(width, height);
var color = new DisparityToColorPointCloud.ColorImage() {
@Override
public boolean isInBounds(int x, int y) {
return true;
}
@Override
public int getRGB(int x, int y) {
return 0xFF00FF;
}
};
var output = new DogArray<>(Point3dRgbI_F64::new);
alg.process(disparity, color, PointCloudWriter.wrapF64RGB(output));
assertEquals(width * height, output.size);
for (int i = 0; i < output.size; i++) {
assertEquals(0xFF00FF, output.get(i).rgb);
}
}
use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.
the class TestPlyCodec method encode_decode_3DRGB_binary.
@Test
void encode_decode_3DRGB_binary() throws IOException {
List<Point3dRgbI_F64> expected = new ArrayList<>();
for (int i = 0; i < 10; i++) {
int r = (10 * i) & 0xFF;
int g = (28 * i) & 0xFF;
int b = (58 * i) & 0xFF;
int rgb = r << 16 | g << 16 | b;
expected.add(new Point3dRgbI_F64(i * 123.45, i - 1.01, i + 2.34, rgb));
}
for (boolean asFloat : new boolean[] { true, false }) {
DogArray<Point3dRgbI_F64> found = new DogArray<>(Point3dRgbI_F64::new);
ByteArrayOutputStream output = new ByteArrayOutputStream();
PlyCodec.saveBinary(PointCloudReader.wrapF64RGB(expected), ByteOrder.BIG_ENDIAN, false, asFloat, output);
ByteArrayInputStream input = new ByteArrayInputStream(output.toByteArray());
PlyCodec.read(input, PointCloudWriter.wrapF64RGB(found));
assertEquals(expected.size(), found.size);
double tol = asFloat ? UtilEjml.TEST_F32 : UtilEjml.TEST_F64;
for (int i = 0; i < found.size; i++) {
assertEquals(0.0, found.get(i).distance(expected.get(i)), tol);
}
}
}
use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.
the class ExampleMultiViewSparseReconstruction method visualizeSparseCloud.
/**
* To visualize the results we will render a sparse point cloud along with the location of each camera in the
* scene.
*/
public void visualizeSparseCloud() {
checkTrue(scene.isHomogenous());
List<Point3D_F64> cloudXyz = new ArrayList<>();
Point4D_F64 world = new Point4D_F64();
// NOTE: By default the colors found below are not used. Look before to see why and how to turn them on.
//
// Colorize the cloud by reprojecting the images. The math is straight forward but there's a lot of book
// keeping that needs to be done due to the scene data structure. A class is provided to make this process easy
var imageLookup = new LookUpImageFilesByIndex(imageFiles);
var colorize = new ColorizeMultiViewStereoResults<>(new LookUpColorRgbFormats.PL_U8(), imageLookup);
DogArray_I32 rgb = new DogArray_I32();
rgb.resize(scene.points.size);
colorize.processScenePoints(scene, // String encodes the image's index
(viewIdx) -> viewIdx + "", // Assign the RGB color
(pointIdx, r, g, b) -> rgb.set(pointIdx, (r << 16) | (g << 8) | b));
// Convert the structure into regular 3D points from homogenous
for (int i = 0; i < scene.points.size; i++) {
scene.points.get(i).get(world);
// array would be out of sync. Let's just throw it far far away then.
if (world.w == 0.0)
cloudXyz.add(new Point3D_F64(0, 0, Double.MAX_VALUE));
else
cloudXyz.add(new Point3D_F64(world.x / world.w, world.y / world.w, world.z / world.w));
}
PointCloudViewer viewer = VisualizeData.createPointCloudViewer();
viewer.setFog(true);
// We just did a bunch of work to look up the true color of points, however for sparse data it's easy to see
// the structure with psuedo color. Comment out the line below to see the true color.
viewer.setColorizer(new TwoAxisRgbPlane.Z_XY(1.0).fperiod(40));
viewer.setDotSize(1);
viewer.setTranslationStep(0.15);
viewer.addCloud((idx, p) -> p.setTo(cloudXyz.get(idx)), rgb::get, rgb.size);
viewer.setCameraHFov(UtilAngle.radian(60));
SwingUtilities.invokeLater(() -> {
// Show where the cameras are
BoofSwingUtil.visualizeCameras(scene, viewer);
// Size the window and show it to the user
viewer.getComponent().setPreferredSize(new Dimension(600, 600));
ShowImages.showWindow(viewer.getComponent(), "Refined Scene", true);
var copy = new DogArray<>(Point3dRgbI_F64::new);
viewer.copyCloud(copy);
try (var out = new FileOutputStream("saved_cloud.ply")) {
PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(copy.toList()), true, out);
} catch (IOException e) {
e.printStackTrace();
}
});
}
use of boofcv.struct.Point3dRgbI_F64 in project BoofCV by lessthanoptimal.
the class ExampleMultiViewDenseReconstruction method saveCloudToDisk.
private static void saveCloudToDisk(SparseSceneToDenseCloud<GrayU8> sparseToDense) {
// Save the dense point cloud to disk in PLY format
try (FileOutputStream out = new FileOutputStream("saved_cloud.ply")) {
// Filter points which are far away to make it easier to view in 3rd party viewers that auto scale
// You might need to adjust the threshold for your application if too many points are cut
double distanceThreshold = 50.0;
List<Point3D_F64> cloud = sparseToDense.getCloud();
DogArray_I32 colorsRgb = sparseToDense.getColorRgb();
DogArray<Point3dRgbI_F64> filtered = PointCloudUtils_F64.filter((idx, p) -> p.setTo(cloud.get(idx)), colorsRgb::get, cloud.size(), (idx) -> cloud.get(idx).norm() <= distanceThreshold, null);
PointCloudIO.save3D(PointCloudIO.Format.PLY, PointCloudReader.wrapF64RGB(filtered.toList()), true, out);
} catch (IOException e) {
e.printStackTrace();
}
}
Aggregations