use of boofcv.struct.distort.DoNothing2Transform2_F64 in project narchy by automenta.
the class ExampleStereoTwoViewsOneCamera method update.
protected boolean update(BufferedImage inLeft) {
// ShowImages.showGrid(2, "source", prev, next);
// matched features between the two images
computeMatches(prev, next);
// convert from pixel coordinates into normalized image coordinates
convertToNormalizedCoordinates(matchedFeatures, intrinsic);
// Robustly estimate camera motion
Se3_F64 leftToRight = estimateCameraMotion(intrinsic);
if (leftToRight == null)
// no motion
return false;
drawInliers(intrinsic, inliers);
// Rectify and remove lens distortion for stereo processing
DMatrixRMaj rectifiedK = new DMatrixRMaj(3, 3);
GrayU8 rectifiedLeft = distortedPrev.createSameShape();
GrayU8 rectifiedRight = distortedNext.createSameShape();
rectifyImages(distortedPrev, distortedNext, leftToRight, intrinsic, rectifiedLeft, rectifiedRight, rectifiedK);
// compute disparity
StereoDisparity<GrayS16, GrayF32> disparityAlg = FactoryStereoDisparity.regionSubpixelWta(DisparityAlgorithms.RECT_FIVE, minDisparity, maxDisparity, 3, 3, 0, 15, 0.1, GrayS16.class);
// Apply the Laplacian across the image to add extra resistance to changes in lighting or camera gain
GrayS16 derivLeft = new GrayS16(rectifiedLeft.width, rectifiedLeft.height);
LaplacianEdge.process(rectifiedLeft, derivLeft);
GrayS16 derivRight = new GrayS16(rectifiedRight.width, rectifiedRight.height);
LaplacianEdge.process(rectifiedRight, derivRight);
// process and return the results
disparityAlg.process(derivLeft, derivRight);
GrayF32 disparity = disparityAlg.getDisparity();
// show results
BufferedImage visualized = VisualizeImageData.disparity(disparity, null, minDisparity, maxDisparity, 0);
ShowImages.showWindow(visualized, "Disparity");
// BufferedImage outLeft = ConvertBufferedImage.convertTo(rectifiedLeft, null);
// BufferedImage outRight = ConvertBufferedImage.convertTo(rectifiedRight, null);
// ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification");
double baseline = leftToRight.getT().norm();
gui.configure(baseline, rectifiedK, new DoNothing2Transform2_F64(), minDisparity, maxDisparity);
gui.process(leftToRight, disparity, inLeft);
return true;
}
use of boofcv.struct.distort.DoNothing2Transform2_F64 in project BoofCV by lessthanoptimal.
the class QrPose3DUtils method setLensDistortion.
/**
* Specifies transform from pixel to normalize image coordinates
*/
public void setLensDistortion(@Nullable Point2Transform2_F64 pixelToNorm, @Nullable Point2Transform2_F64 undistToDist) {
if (pixelToNorm == null || undistToDist == null) {
this.pixelToNorm = new DoNothing2Transform2_F64();
this.undistToDist = new DoNothing2Transform2_F64();
} else {
this.pixelToNorm = pixelToNorm;
this.undistToDist = undistToDist;
}
}
use of boofcv.struct.distort.DoNothing2Transform2_F64 in project BoofCV by lessthanoptimal.
the class MicroQrPose3DUtils method setLensDistortion.
/**
* Specifies transform from pixel to normalize image coordinates
*/
public void setLensDistortion(@Nullable Point2Transform2_F64 pixelToNorm, @Nullable Point2Transform2_F64 undistToDist) {
if (pixelToNorm == null || undistToDist == null) {
this.pixelToNorm = new DoNothing2Transform2_F64();
this.undistToDist = new DoNothing2Transform2_F64();
} else {
this.pixelToNorm = pixelToNorm;
this.undistToDist = undistToDist;
}
}
use of boofcv.struct.distort.DoNothing2Transform2_F64 in project BoofCV by lessthanoptimal.
the class ExampleStereoTwoViewsOneCamera method showPointCloud.
/**
* Show results as a point cloud
*/
public static void showPointCloud(ImageGray disparity, BufferedImage left, Se3_F64 motion, DMatrixRMaj rectifiedK, DMatrixRMaj rectifiedR, int disparityMin, int disparityRange) {
DisparityToColorPointCloud d2c = new DisparityToColorPointCloud();
PointCloudWriter.CloudArraysF32 cloud = new PointCloudWriter.CloudArraysF32();
double baseline = motion.getT().norm();
d2c.configure(baseline, rectifiedK, rectifiedR, new DoNothing2Transform2_F64(), disparityMin, disparityRange);
d2c.process(disparity, UtilDisparitySwing.wrap(left), cloud);
CameraPinhole rectifiedPinhole = PerspectiveOps.matrixToPinhole(rectifiedK, disparity.width, disparity.height, null);
// skew the view to make the structure easier to see
Se3_F64 cameraToWorld = SpecialEuclideanOps_F64.eulerXyz(-baseline * 5, 0, 0, 0, 0.2, 0, null);
PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
pcv.setCameraHFov(PerspectiveOps.computeHFov(rectifiedPinhole));
pcv.setCameraToWorld(cameraToWorld);
pcv.setTranslationStep(baseline / 3);
pcv.addCloud(cloud.cloudXyz, cloud.cloudRgb);
pcv.setDotSize(1);
pcv.setTranslationStep(baseline / 10);
pcv.getComponent().setPreferredSize(new Dimension(left.getWidth(), left.getHeight()));
ShowImages.showWindow(pcv.getComponent(), "Point Cloud", true);
}
use of boofcv.struct.distort.DoNothing2Transform2_F64 in project BoofCV by lessthanoptimal.
the class TestDisparityToColorPointCloud method doesItCrash.
/**
* Simple test to see if it crashes. Very little validation of results is done
*/
@Test
void doesItCrash() {
double baseline = 1.0;
DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(500.0, 500, 0, 250, 250);
DMatrixRMaj rectifiedR = CommonOps_DDRM.identity(3);
Point2Transform2_F64 rectifiedToColor = new DoNothing2Transform2_F64();
int disparityMin = 2;
int disparityRange = 100;
var alg = new DisparityToColorPointCloud();
alg.configure(baseline, K, rectifiedR, rectifiedToColor, disparityMin, disparityRange);
var disparity = new GrayF32(width, height);
var color = new DisparityToColorPointCloud.ColorImage() {
@Override
public boolean isInBounds(int x, int y) {
return true;
}
@Override
public int getRGB(int x, int y) {
return 0xFF00FF;
}
};
var output = new DogArray<>(Point3dRgbI_F64::new);
alg.process(disparity, color, PointCloudWriter.wrapF64RGB(output));
assertEquals(width * height, output.size);
for (int i = 0; i < output.size; i++) {
assertEquals(0xFF00FF, output.get(i).rgb);
}
}
Aggregations