Search in sources :

Example 1 with GrayF32

use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.

the class DisplayPinholeCalibrationPanel method undoRadialDistortion.

private void undoRadialDistortion(BufferedImage image) {
    if (undoRadial == null)
        return;
    ConvertBufferedImage.convertFrom(image, origMS, true);
    if (correctedMS.getNumBands() != origMS.getNumBands())
        correctedMS.setNumberOfBands(origMS.getNumBands());
    correctedMS.reshape(origMS.width, origMS.height);
    for (int i = 0; i < origMS.getNumBands(); i++) {
        GrayF32 in = origMS.getBand(i);
        GrayF32 out = correctedMS.getBand(i);
        undoRadial.apply(in, out);
    }
    undistorted = ConvertBufferedImage.checkDeclare(origMS.width, origMS.height, undistorted, BufferedImage.TYPE_INT_RGB);
    ConvertBufferedImage.convertTo(correctedMS, undistorted, true);
}
Also used : GrayF32(boofcv.struct.image.GrayF32)

Example 2 with GrayF32

use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.

the class RenderCalibrationTargetsGraphics2D method getGrayF32.

public GrayF32 getGrayF32() {
    GrayF32 gray = new GrayF32(bufferred.getWidth(), bufferred.getHeight());
    ConvertBufferedImage.convertFrom(bufferred, gray);
    return gray;
}
Also used : GrayF32(boofcv.struct.image.GrayF32)

Example 3 with GrayF32

use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.

the class TestDetectDescribeFusion method checkWithOrientation.

@Test
public void checkWithOrientation() {
    final InterestPointDetector<GrayF32> detector = FactoryInterestPoint.fastHessian(null);
    final OrientationImage ori = FactoryOrientationAlgs.nogradient(1.0 / 2.0, 5, GrayF32.class);
    final DescribeRegionPoint<GrayF32, BrightFeature> desc = FactoryDescribeRegionPoint.surfStable(null, GrayF32.class);
    new GenericTestsDetectDescribePoint(true, true, ImageType.single(GrayF32.class), BrightFeature.class) {

        @Override
        public DetectDescribePoint createDetDesc() {
            return new DetectDescribeFusion(detector, ori, desc);
        }
    }.allTests();
}
Also used : OrientationImage(boofcv.abst.feature.orientation.OrientationImage) BrightFeature(boofcv.struct.feature.BrightFeature) GrayF32(boofcv.struct.image.GrayF32) Test(org.junit.Test)

Example 4 with GrayF32

use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.

the class TestDetectDescribeMultiFusion method checkFeatureNotInBounds.

/**
 * If a feature is not in bounds make sure everything is handled correctly
 */
@Test
public void checkFeatureNotInBounds() {
    DetectorInterestPointMulti detector = new DummyDetector(2);
    DescribeRegionPoint describe = new TestDetectDescribeFusion.DummyRegionPoint();
    DetectDescribeMultiFusion alg = new DetectDescribeMultiFusion(detector, null, describe);
    alg.process(new GrayF32(2, 2));
    assertEquals(2, alg.getNumberOfSets());
    for (int n = 0; n < alg.getNumberOfSets(); n++) {
        PointDescSet set = alg.getFeatureSet(n);
        // one feature should not be inside the image
        if (n == 0)
            assertEquals(n + 8, set.getNumberOfFeatures());
        else
            assertEquals(n + 9, set.getNumberOfFeatures());
        for (int i = 0; i < set.getNumberOfFeatures(); i++) {
            assertTrue(set.getDescription(i) != null);
            assertTrue(set.getLocation(i) != null);
        }
    }
}
Also used : GrayF32(boofcv.struct.image.GrayF32) DescribeRegionPoint(boofcv.abst.feature.describe.DescribeRegionPoint) FactoryDescribeRegionPoint(boofcv.factory.feature.describe.FactoryDescribeRegionPoint) DetectorInterestPointMulti(boofcv.abst.feature.detect.interest.DetectorInterestPointMulti) DescribeRegionPoint(boofcv.abst.feature.describe.DescribeRegionPoint) FactoryInterestPoint(boofcv.factory.feature.detect.interest.FactoryInterestPoint) FactoryDescribeRegionPoint(boofcv.factory.feature.describe.FactoryDescribeRegionPoint) Test(org.junit.Test)

Example 5 with GrayF32

use of boofcv.struct.image.GrayF32 in project BoofCV by lessthanoptimal.

the class FactoryStereoDisparity method regionSparseWta.

/**
 * WTA algorithms that computes disparity on a sparse per-pixel basis as requested..
 *
 * @param minDisparity Minimum disparity that it will check. Must be &ge; 0 and &lt; maxDisparity
 * @param maxDisparity Maximum disparity that it will calculate. Must be &gt; 0
 * @param regionRadiusX Radius of the rectangular region along x-axis.
 * @param regionRadiusY Radius of the rectangular region along y-axis.
 * @param maxPerPixelError Maximum allowed error in a region per pixel.  Set to &lt; 0 to disable.
 * @param texture Tolerance for how similar optimal region is to other region.  Closer to zero is more tolerant.
 *                Try 0.1
 * @param subpixelInterpolation true to turn on sub-pixel interpolation
 * @param imageType Type of input image.
 * @param <T> Image type
 * @return Sparse disparity algorithm
 */
public static <T extends ImageGray<T>> StereoDisparitySparse<T> regionSparseWta(int minDisparity, int maxDisparity, int regionRadiusX, int regionRadiusY, double maxPerPixelError, double texture, boolean subpixelInterpolation, Class<T> imageType) {
    double maxError = (regionRadiusX * 2 + 1) * (regionRadiusY * 2 + 1) * maxPerPixelError;
    if (imageType == GrayU8.class) {
        DisparitySparseSelect<int[]> select;
        if (subpixelInterpolation)
            select = selectDisparitySparseSubpixel_S32((int) maxError, texture);
        else
            select = selectDisparitySparse_S32((int) maxError, texture);
        DisparitySparseScoreSadRect<int[], GrayU8> score = scoreDisparitySparseSadRect_U8(minDisparity, maxDisparity, regionRadiusX, regionRadiusY);
        return new WrapDisparitySparseSadRect(score, select);
    } else if (imageType == GrayF32.class) {
        DisparitySparseSelect<float[]> select;
        if (subpixelInterpolation)
            select = selectDisparitySparseSubpixel_F32((int) maxError, texture);
        else
            select = selectDisparitySparse_F32((int) maxError, texture);
        DisparitySparseScoreSadRect<float[], GrayF32> score = scoreDisparitySparseSadRect_F32(minDisparity, maxDisparity, regionRadiusX, regionRadiusY);
        return new WrapDisparitySparseSadRect(score, select);
    } else
        throw new RuntimeException("Image type not supported: " + imageType.getSimpleName());
}
Also used : GrayF32(boofcv.struct.image.GrayF32) WrapDisparitySparseSadRect(boofcv.abst.feature.disparity.WrapDisparitySparseSadRect) DisparitySparseSelect(boofcv.alg.feature.disparity.DisparitySparseSelect) GrayU8(boofcv.struct.image.GrayU8) DisparitySparseScoreSadRect(boofcv.alg.feature.disparity.DisparitySparseScoreSadRect)

Aggregations

GrayF32 (boofcv.struct.image.GrayF32)530 Test (org.junit.Test)291 BufferedImage (java.awt.image.BufferedImage)81 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)76 GrayU8 (boofcv.struct.image.GrayU8)49 Planar (boofcv.struct.image.Planar)34 ArrayList (java.util.ArrayList)28 ImageBorder_F32 (boofcv.core.image.border.ImageBorder_F32)20 ImageGray (boofcv.struct.image.ImageGray)20 File (java.io.File)20 CameraPinholeRadial (boofcv.struct.calib.CameraPinholeRadial)19 Se3_F64 (georegression.struct.se.Se3_F64)18 TupleDesc_F64 (boofcv.struct.feature.TupleDesc_F64)17 GrayS8 (boofcv.struct.image.GrayS8)16 ListDisplayPanel (boofcv.gui.ListDisplayPanel)14 PathLabel (boofcv.io.PathLabel)14 Kernel2D_F32 (boofcv.struct.convolve.Kernel2D_F32)13 GrayS16 (boofcv.struct.image.GrayS16)13 GrayS32 (boofcv.struct.image.GrayS32)13 Point2D_F64 (georegression.struct.point.Point2D_F64)13