use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.
the class DisplayPinholeCalibrationPanel method undoRadialDistortion.
private void undoRadialDistortion(BufferedImage image) {
if (undoRadial == null)
return;
ConvertBufferedImage.convertFrom(image, origMS, true);
if (correctedMS.getNumBands() != origMS.getNumBands())
correctedMS.setNumberOfBands(origMS.getNumBands());
correctedMS.reshape(origMS.width, origMS.height);
for (int i = 0; i < origMS.getNumBands(); i++) {
GrayF32 in = origMS.getBand(i);
GrayF32 out = correctedMS.getBand(i);
undoRadial.apply(in, out);
}
undistorted = ConvertBufferedImage.checkDeclare(origMS.width, origMS.height, undistorted, BufferedImage.TYPE_INT_RGB);
ConvertBufferedImage.convertTo(correctedMS, undistorted, true);
}
use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.
the class RenderCalibrationTargetsGraphics2D method getGrayF32.
public GrayF32 getGrayF32() {
GrayF32 gray = new GrayF32(bufferred.getWidth(), bufferred.getHeight());
ConvertBufferedImage.convertFrom(bufferred, gray);
return gray;
}
use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.
the class TestDetectDescribeFusion method checkWithOrientation.
@Test
public void checkWithOrientation() {
final InterestPointDetector<GrayF32> detector = FactoryInterestPoint.fastHessian(null);
final OrientationImage ori = FactoryOrientationAlgs.nogradient(1.0 / 2.0, 5, GrayF32.class);
final DescribeRegionPoint<GrayF32, BrightFeature> desc = FactoryDescribeRegionPoint.surfStable(null, GrayF32.class);
new GenericTestsDetectDescribePoint(true, true, ImageType.single(GrayF32.class), BrightFeature.class) {
@Override
public DetectDescribePoint createDetDesc() {
return new DetectDescribeFusion(detector, ori, desc);
}
}.allTests();
}
use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.
the class TestDetectDescribeMultiFusion method checkFeatureNotInBounds.
/**
* If a feature is not in bounds make sure everything is handled correctly
*/
@Test
public void checkFeatureNotInBounds() {
DetectorInterestPointMulti detector = new DummyDetector(2);
DescribeRegionPoint describe = new TestDetectDescribeFusion.DummyRegionPoint();
DetectDescribeMultiFusion alg = new DetectDescribeMultiFusion(detector, null, describe);
alg.process(new GrayF32(2, 2));
assertEquals(2, alg.getNumberOfSets());
for (int n = 0; n < alg.getNumberOfSets(); n++) {
PointDescSet set = alg.getFeatureSet(n);
// one feature should not be inside the image
if (n == 0)
assertEquals(n + 8, set.getNumberOfFeatures());
else
assertEquals(n + 9, set.getNumberOfFeatures());
for (int i = 0; i < set.getNumberOfFeatures(); i++) {
assertTrue(set.getDescription(i) != null);
assertTrue(set.getLocation(i) != null);
}
}
}
use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.
the class FactoryStereoDisparity method regionSparseWta.
/**
* WTA algorithms that computes disparity on a sparse per-pixel basis as requested..
*
* @param minDisparity Minimum disparity that it will check. Must be ≥ 0 and < maxDisparity
* @param maxDisparity Maximum disparity that it will calculate. Must be > 0
* @param regionRadiusX Radius of the rectangular region along x-axis.
* @param regionRadiusY Radius of the rectangular region along y-axis.
* @param maxPerPixelError Maximum allowed error in a region per pixel. Set to < 0 to disable.
* @param texture Tolerance for how similar optimal region is to other region. Closer to zero is more tolerant.
* Try 0.1
* @param subpixelInterpolation true to turn on sub-pixel interpolation
* @param imageType Type of input image.
* @param <T> Image type
* @return Sparse disparity algorithm
*/
public static <T extends ImageGray<T>> StereoDisparitySparse<T> regionSparseWta(int minDisparity, int maxDisparity, int regionRadiusX, int regionRadiusY, double maxPerPixelError, double texture, boolean subpixelInterpolation, Class<T> imageType) {
double maxError = (regionRadiusX * 2 + 1) * (regionRadiusY * 2 + 1) * maxPerPixelError;
if (imageType == GrayU8.class) {
DisparitySparseSelect<int[]> select;
if (subpixelInterpolation)
select = selectDisparitySparseSubpixel_S32((int) maxError, texture);
else
select = selectDisparitySparse_S32((int) maxError, texture);
DisparitySparseScoreSadRect<int[], GrayU8> score = scoreDisparitySparseSadRect_U8(minDisparity, maxDisparity, regionRadiusX, regionRadiusY);
return new WrapDisparitySparseSadRect(score, select);
} else if (imageType == GrayF32.class) {
DisparitySparseSelect<float[]> select;
if (subpixelInterpolation)
select = selectDisparitySparseSubpixel_F32((int) maxError, texture);
else
select = selectDisparitySparse_F32((int) maxError, texture);
DisparitySparseScoreSadRect<float[], GrayF32> score = scoreDisparitySparseSadRect_F32(minDisparity, maxDisparity, regionRadiusX, regionRadiusY);
return new WrapDisparitySparseSadRect(score, select);
} else
throw new RuntimeException("Image type not supported: " + imageType.getSimpleName());
}
Aggregations