Search in sources :

Example 6 with FDistort

use of boofcv.abst.distort.FDistort in project BoofCV by lessthanoptimal.

the class VisualizeAverageDownSample method main.

public static void main(String[] args) {
    BufferedImage original = UtilImageIO.loadImage(UtilIO.pathExample("simple_objects.jpg"));
    Planar<GrayF32> input = new Planar<>(GrayF32.class, original.getWidth(), original.getHeight(), 3);
    ConvertBufferedImage.convertFromPlanar(original, input, true, GrayF32.class);
    Planar<GrayF32> output = new Planar<>(GrayF32.class, original.getWidth() / 3, original.getHeight() / 3, 3);
    Planar<GrayF32> output2 = new Planar<>(GrayF32.class, original.getWidth() / 3, original.getHeight() / 3, 3);
    AverageDownSampleOps.down(input, output);
    new FDistort(input, output2).scaleExt().apply();
    BufferedImage outputFull = ConvertBufferedImage.convertTo_F32(output, null, true);
    BufferedImage outputFull2 = ConvertBufferedImage.convertTo_F32(output2, null, true);
    ShowImages.showWindow(original, "Original");
    ShowImages.showWindow(outputFull, "3x small average");
    ShowImages.showWindow(outputFull2, "3x small bilinear");
}
Also used : GrayF32(boofcv.struct.image.GrayF32) FDistort(boofcv.abst.distort.FDistort) Planar(boofcv.struct.image.Planar) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage)

Example 7 with FDistort

use of boofcv.abst.distort.FDistort in project BoofCV by lessthanoptimal.

the class TestPyramidFloatScale method _update.

public void _update(GrayF32 input) {
    InterpolatePixelS<GrayF32> interp = FactoryInterpolation.bilinearPixelS(input, BorderType.EXTENDED);
    PyramidFloatScale<GrayF32> alg = new PyramidFloatScale<>(interp, new double[] { 3, 5 }, imageType);
    alg.process(input);
    // test the first layer
    GrayF32 expected = new GrayF32((int) Math.ceil(width / 3.0), (int) Math.ceil(height / 3.0));
    new FDistort(input, expected).scale().apply();
    GrayF32 found = alg.getLayer(0);
    BoofTesting.assertEquals(expected, found, 1e-4);
    // test the second layer
    GrayF32 next = new GrayF32((int) Math.ceil(width / 5.0), (int) Math.ceil(height / 5.0));
    new FDistort(expected, next).scale().apply();
    found = alg.getLayer(1);
    BoofTesting.assertEquals(next, found, 1e-4);
}
Also used : GrayF32(boofcv.struct.image.GrayF32) FDistort(boofcv.abst.distort.FDistort)

Example 8 with FDistort

use of boofcv.abst.distort.FDistort in project BoofCV by lessthanoptimal.

the class ExampleStereoDisparity3D method main.

public static void main(String[] args) {
    // ------------- Compute Stereo Correspondence
    // Load camera images and stereo camera parameters
    String calibDir = UtilIO.pathExample("calibration/stereo/Bumblebee2_Chess/");
    String imageDir = UtilIO.pathExample("stereo/");
    StereoParameters param = CalibrationIO.load(new File(calibDir, "stereo.yaml"));
    // load and convert images into a BoofCV format
    BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "chair01_left.jpg");
    BufferedImage origRight = UtilImageIO.loadImage(imageDir, "chair01_right.jpg");
    GrayU8 distLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8) null);
    GrayU8 distRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8) null);
    // re-scale input images
    GrayU8 scaledLeft = new GrayU8((int) (distLeft.width * scale), (int) (distLeft.height * scale));
    GrayU8 scaledRight = new GrayU8((int) (distRight.width * scale), (int) (distRight.height * scale));
    new FDistort(distLeft, scaledLeft).scaleExt().apply();
    new FDistort(distRight, scaledRight).scaleExt().apply();
    // Don't forget to adjust camera parameters for the change in scale!
    PerspectiveOps.scaleIntrinsic(param.left, scale);
    PerspectiveOps.scaleIntrinsic(param.right, scale);
    // rectify images and compute disparity
    GrayU8 rectLeft = new GrayU8(scaledLeft.width, scaledLeft.height);
    GrayU8 rectRight = new GrayU8(scaledRight.width, scaledRight.height);
    RectifyCalibrated rectAlg = ExampleStereoDisparity.rectify(scaledLeft, scaledRight, param, rectLeft, rectRight);
    // GrayU8 disparity = ExampleStereoDisparity.denseDisparity(rectLeft, rectRight, 3,minDisparity, maxDisparity);
    GrayF32 disparity = ExampleStereoDisparity.denseDisparitySubpixel(rectLeft, rectRight, 3, minDisparity, maxDisparity);
    // ------------- Convert disparity image into a 3D point cloud
    // The point cloud will be in the left cameras reference frame
    DMatrixRMaj rectK = rectAlg.getCalibrationMatrix();
    DMatrixRMaj rectR = rectAlg.getRectifiedRotation();
    // used to display the point cloud
    PointCloudViewer viewer = new PointCloudViewer(rectK, 10);
    viewer.setPreferredSize(new Dimension(rectLeft.width, rectLeft.height));
    // extract intrinsic parameters from rectified camera
    double baseline = param.getBaseline();
    double fx = rectK.get(0, 0);
    double fy = rectK.get(1, 1);
    double cx = rectK.get(0, 2);
    double cy = rectK.get(1, 2);
    // Iterate through each pixel in disparity image and compute its 3D coordinate
    Point3D_F64 pointRect = new Point3D_F64();
    Point3D_F64 pointLeft = new Point3D_F64();
    for (int y = 0; y < disparity.height; y++) {
        for (int x = 0; x < disparity.width; x++) {
            double d = disparity.unsafe_get(x, y) + minDisparity;
            // skip over pixels were no correspondence was found
            if (d >= rangeDisparity)
                continue;
            // Coordinate in rectified camera frame
            pointRect.z = baseline * fx / d;
            pointRect.x = pointRect.z * (x - cx) / fx;
            pointRect.y = pointRect.z * (y - cy) / fy;
            // rotate into the original left camera frame
            GeometryMath_F64.multTran(rectR, pointRect, pointLeft);
            // add pixel to the view for display purposes and sets its gray scale value
            int v = rectLeft.unsafe_get(x, y);
            viewer.addPoint(pointLeft.x, pointLeft.y, pointLeft.z, v << 16 | v << 8 | v);
        }
    }
    // display the results.  Click and drag to change point cloud camera
    BufferedImage visualized = VisualizeImageData.disparity(disparity, null, minDisparity, maxDisparity, 0);
    ShowImages.showWindow(visualized, "Disparity");
    ShowImages.showWindow(viewer, "Point Cloud");
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) RectifyCalibrated(boofcv.alg.geo.rectify.RectifyCalibrated) DMatrixRMaj(org.ejml.data.DMatrixRMaj) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) GrayF32(boofcv.struct.image.GrayF32) FDistort(boofcv.abst.distort.FDistort) PointCloudViewer(boofcv.gui.d3.PointCloudViewer) GrayU8(boofcv.struct.image.GrayU8) StereoParameters(boofcv.struct.calib.StereoParameters) File(java.io.File)

Example 9 with FDistort

use of boofcv.abst.distort.FDistort in project BoofCV by lessthanoptimal.

the class GenericFiducialDetectorChecks method checkStability.

/**
 * See if the stability estimation is reasonable.  First detect targets in the full sized image.  Then shrink it
 * by 15% and see if the instability increases.  The instability should always increase for smaller objects with
 * the same orientation since the geometry is worse.
 */
@Test
public void checkStability() {
    for (ImageType type : types) {
        ImageBase image = loadImage(type);
        FiducialDetector detector = createDetector(type);
        detector.setLensDistortion(loadDistortion(true), image.width, image.height);
        detector.detect(image);
        assertTrue(detector.totalFound() >= 1);
        long[] foundIds = new long[detector.totalFound()];
        double[] location = new double[detector.totalFound()];
        double[] orientation = new double[detector.totalFound()];
        FiducialStability results = new FiducialStability();
        for (int i = 0; i < detector.totalFound(); i++) {
            detector.computeStability(i, 0.2, results);
            foundIds[i] = detector.getId(i);
            location[i] = results.location;
            orientation[i] = results.orientation;
        }
        ImageBase shrunk = image.createSameShape();
        new FDistort(image, shrunk).affine(0.8, 0, 0, 0.8, 0, 0).apply();
        detector.detect(shrunk);
        assertTrue(detector.totalFound() == foundIds.length);
        for (int i = 0; i < detector.totalFound(); i++) {
            detector.computeStability(i, 0.2, results);
            long id = detector.getId(i);
            boolean matched = false;
            for (int j = 0; j < foundIds.length; j++) {
                if (foundIds[j] == id) {
                    matched = true;
                    assertTrue(location[j] < results.location);
                    assertTrue(orientation[j] < results.orientation);
                    break;
                }
            }
            assertTrue(matched);
        }
    }
}
Also used : FDistort(boofcv.abst.distort.FDistort) ImageBase(boofcv.struct.image.ImageBase) ImageType(boofcv.struct.image.ImageType) Test(org.junit.Test)

Example 10 with FDistort

use of boofcv.abst.distort.FDistort in project BoofCV by lessthanoptimal.

the class TestSparseFlowObjectTracker method checkMotion.

protected void checkMotion(double tranX, double tranY, double rot) {
    GrayU8 frame0 = new GrayU8(320, 240);
    GrayU8 frame1 = new GrayU8(320, 240);
    ImageMiscOps.fillUniform(frame0, rand, 0, 256);
    double c = Math.cos(rot);
    double s = Math.sin(rot);
    new FDistort(frame0, frame1).affine(c, -s, s, c, tranX, tranY).apply();
    SfotConfig config = new SfotConfig();
    ImageGradient<GrayU8, GrayS16> gradient = FactoryDerivative.sobel(GrayU8.class, GrayS16.class);
    SparseFlowObjectTracker<GrayU8, GrayS16> alg = new SparseFlowObjectTracker<>(config, GrayU8.class, GrayS16.class, gradient);
    RectangleRotate_F64 region0 = new RectangleRotate_F64(120, 140, 30, 40, 0.1);
    RectangleRotate_F64 region1 = new RectangleRotate_F64();
    alg.init(frame0, region0);
    assertTrue(alg.update(frame1, region1));
    double expectedX = c * region0.cx - s * region0.cy + tranX;
    double expectedY = s * region0.cx + c * region0.cy + tranY;
    double expectedYaw = UtilAngle.bound(region0.theta + rot);
    assertEquals(expectedX, region1.cx, 0.5);
    assertEquals(expectedY, region1.cy, 0.5);
    assertEquals(expectedYaw, region1.theta, 0.01);
}
Also used : FDistort(boofcv.abst.distort.FDistort) GrayS16(boofcv.struct.image.GrayS16) GrayU8(boofcv.struct.image.GrayU8) RectangleRotate_F64(boofcv.struct.RectangleRotate_F64)

Aggregations

FDistort (boofcv.abst.distort.FDistort)23 BufferedImage (java.awt.image.BufferedImage)11 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)9 GrayF32 (boofcv.struct.image.GrayF32)8 GrayU8 (boofcv.struct.image.GrayU8)3 Kernel1D (boofcv.struct.convolve.Kernel1D)2 Kernel2D (boofcv.struct.convolve.Kernel2D)2 ImageGray (boofcv.struct.image.ImageGray)2 Point2D_F64 (georegression.struct.point.Point2D_F64)2 File (java.io.File)2 DMatrixRMaj (org.ejml.data.DMatrixRMaj)2 Estimate1ofEpipolar (boofcv.abst.geo.Estimate1ofEpipolar)1 SteerableKernel (boofcv.alg.filter.kernel.SteerableKernel)1 SteerableKernel_F32 (boofcv.alg.filter.kernel.impl.SteerableKernel_F32)1 SteerableKernel_I32 (boofcv.alg.filter.kernel.impl.SteerableKernel_I32)1 RectifyCalibrated (boofcv.alg.geo.rectify.RectifyCalibrated)1 ListDisplayPanel (boofcv.gui.ListDisplayPanel)1 PanelGridPanel (boofcv.gui.PanelGridPanel)1 PointCloudViewer (boofcv.gui.d3.PointCloudViewer)1 AnimatePanel (boofcv.gui.image.AnimatePanel)1