Search in sources :

Example 1 with Quadrilateral_F64

use of georegression.struct.shapes.Quadrilateral_F64 in project BoofCV by lessthanoptimal.

the class TestBaseDetectFiducialSquare method checkFindKnown.

private void checkFindKnown(CameraPinholeRadial intrinsic, double tol) {
    GrayU8 pattern = createPattern(6 * 20, false);
    Quadrilateral_F64 where = new Quadrilateral_F64(50, 50, 130, 60, 140, 150, 40, 140);
    // Quadrilateral_F64 where = new Quadrilateral_F64(50,50,  100,50,  100,150,  50,150);
    GrayU8 image = new GrayU8(width, height);
    ImageMiscOps.fill(image, 255);
    render(pattern, where, image);
    Dummy dummy = new Dummy();
    dummy.configure(new LensDistortionRadialTangential(intrinsic), width, height, false);
    dummy.process(image);
    assertEquals(1, dummy.detected.size());
    Quadrilateral_F64 found = dummy.getFound().get(0).distortedPixels;
    // System.out.println("found "+found);
    // System.out.println("where "+where);
    checkMatch(where, found.a, tol);
    checkMatch(where, found.b, tol);
    checkMatch(where, found.c, tol);
    checkMatch(where, found.d, tol);
    // see if the undistorted image is as expected
    checkPattern(dummy.detected.get(0));
}
Also used : LensDistortionRadialTangential(boofcv.alg.distort.radtan.LensDistortionRadialTangential) Quadrilateral_F64(georegression.struct.shapes.Quadrilateral_F64) GrayU8(boofcv.struct.image.GrayU8)

Example 2 with Quadrilateral_F64

use of georegression.struct.shapes.Quadrilateral_F64 in project BoofCV by lessthanoptimal.

the class ExampleVideoMosaic method main.

public static void main(String[] args) {
    // Configure the feature detector
    ConfigPointDetector configDetector = new ConfigPointDetector();
    configDetector.type = PointDetectorTypes.SHI_TOMASI;
    configDetector.general.maxFeatures = 300;
    configDetector.general.radius = 3;
    configDetector.general.threshold = 1;
    // Use a KLT tracker
    PointTracker<GrayF32> tracker = FactoryPointTracker.klt(4, configDetector, 3, GrayF32.class, GrayF32.class);
    // This estimates the 2D image motion
    // An Affine2D_F64 model also works quite well.
    ImageMotion2D<GrayF32, Homography2D_F64> motion2D = FactoryMotion2D.createMotion2D(220, 3, 2, 30, 0.6, 0.5, false, tracker, new Homography2D_F64());
    // wrap it so it output color images while estimating motion from gray
    ImageMotion2D<Planar<GrayF32>, Homography2D_F64> motion2DColor = new PlToGrayMotion2D<>(motion2D, GrayF32.class);
    // This fuses the images together
    StitchingFromMotion2D<Planar<GrayF32>, Homography2D_F64> stitch = FactoryMotion2D.createVideoStitch(0.5, motion2DColor, ImageType.pl(3, GrayF32.class));
    // Load an image sequence
    MediaManager media = DefaultMediaManager.INSTANCE;
    String fileName = UtilIO.pathExample("mosaic/airplane01.mjpeg");
    SimpleImageSequence<Planar<GrayF32>> video = media.openVideo(fileName, ImageType.pl(3, GrayF32.class));
    Planar<GrayF32> frame = video.next();
    // shrink the input image and center it
    Homography2D_F64 shrink = new Homography2D_F64(0.5, 0, frame.width / 4, 0, 0.5, frame.height / 4, 0, 0, 1);
    shrink = shrink.invert(null);
    // The mosaic will be larger in terms of pixels but the image will be scaled down.
    // To change this into stabilization just make it the same size as the input with no shrink.
    stitch.configure(frame.width, frame.height, shrink);
    // process the first frame
    stitch.process(frame);
    // Create the GUI for displaying the results + input image
    ImageGridPanel gui = new ImageGridPanel(1, 2);
    gui.setImage(0, 0, new BufferedImage(frame.width, frame.height, BufferedImage.TYPE_INT_RGB));
    gui.setImage(0, 1, new BufferedImage(frame.width, frame.height, BufferedImage.TYPE_INT_RGB));
    gui.setPreferredSize(new Dimension(3 * frame.width, frame.height * 2));
    ShowImages.showWindow(gui, "Example Mosaic", true);
    boolean enlarged = false;
    // process the video sequence one frame at a time
    while (video.hasNext()) {
        frame = video.next();
        if (!stitch.process(frame))
            throw new RuntimeException("You should handle failures");
        // if the current image is close to the image border recenter the mosaic
        Quadrilateral_F64 corners = stitch.getImageCorners(frame.width, frame.height, null);
        if (nearBorder(corners.a, stitch) || nearBorder(corners.b, stitch) || nearBorder(corners.c, stitch) || nearBorder(corners.d, stitch)) {
            stitch.setOriginToCurrent();
            // only enlarge the image once
            if (!enlarged) {
                enlarged = true;
                // double the image size and shift it over to keep it centered
                int widthOld = stitch.getStitchedImage().width;
                int heightOld = stitch.getStitchedImage().height;
                int widthNew = widthOld * 2;
                int heightNew = heightOld * 2;
                int tranX = (widthNew - widthOld) / 2;
                int tranY = (heightNew - heightOld) / 2;
                Homography2D_F64 newToOldStitch = new Homography2D_F64(1, 0, -tranX, 0, 1, -tranY, 0, 0, 1);
                stitch.resizeStitchImage(widthNew, heightNew, newToOldStitch);
                gui.setImage(0, 1, new BufferedImage(widthNew, heightNew, BufferedImage.TYPE_INT_RGB));
            }
            corners = stitch.getImageCorners(frame.width, frame.height, null);
        }
        // display the mosaic
        ConvertBufferedImage.convertTo(frame, gui.getImage(0, 0), true);
        ConvertBufferedImage.convertTo(stitch.getStitchedImage(), gui.getImage(0, 1), true);
        // draw a red quadrilateral around the current frame in the mosaic
        Graphics2D g2 = gui.getImage(0, 1).createGraphics();
        g2.setColor(Color.RED);
        g2.drawLine((int) corners.a.x, (int) corners.a.y, (int) corners.b.x, (int) corners.b.y);
        g2.drawLine((int) corners.b.x, (int) corners.b.y, (int) corners.c.x, (int) corners.c.y);
        g2.drawLine((int) corners.c.x, (int) corners.c.y, (int) corners.d.x, (int) corners.d.y);
        g2.drawLine((int) corners.d.x, (int) corners.d.y, (int) corners.a.x, (int) corners.a.y);
        gui.repaint();
        // throttle the speed just in case it's on a fast computer
        BoofMiscOps.pause(50);
    }
}
Also used : PlToGrayMotion2D(boofcv.abst.sfm.d2.PlToGrayMotion2D) Quadrilateral_F64(georegression.struct.shapes.Quadrilateral_F64) Homography2D_F64(georegression.struct.homography.Homography2D_F64) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) ConfigPointDetector(boofcv.abst.feature.detect.interest.ConfigPointDetector) GrayF32(boofcv.struct.image.GrayF32) MediaManager(boofcv.io.MediaManager) DefaultMediaManager(boofcv.io.wrapper.DefaultMediaManager) Planar(boofcv.struct.image.Planar) ImageGridPanel(boofcv.gui.image.ImageGridPanel)

Example 3 with Quadrilateral_F64

use of georegression.struct.shapes.Quadrilateral_F64 in project BoofCV by lessthanoptimal.

the class SquareBase_to_FiducialDetector method getCenter.

/**
 * Return the intersection of two lines defined by opposing corners. This should also be the geometric center
 *
 * @param which Fiducial's index
 * @param location (output) Storage for the transform. modified.
 */
@Override
public void getCenter(int which, Point2D_F64 location) {
    Quadrilateral_F64 q = alg.getFound().get(which).distortedPixels;
    // compute intersection in undistorted pixels so that the intersection is the true
    // geometric center of the square. Since distorted pixels are being used this will only be approximate
    UtilLine2D_F64.convert(q.a, q.c, line02);
    UtilLine2D_F64.convert(q.b, q.d, line13);
    Intersection2D_F64.intersection(line02, line13, location);
}
Also used : Quadrilateral_F64(georegression.struct.shapes.Quadrilateral_F64)

Example 4 with Quadrilateral_F64

use of georegression.struct.shapes.Quadrilateral_F64 in project BoofCV by lessthanoptimal.

the class ExampleTrackerObjectQuad method main.

public static void main(String[] args) {
    MediaManager media = DefaultMediaManager.INSTANCE;
    String fileName = UtilIO.pathExample("tracking/wildcat_robot.mjpeg");
    // Create the tracker. Comment/Uncomment to change the tracker.
    TrackerObjectQuad tracker = FactoryTrackerObjectQuad.circulant(null, GrayU8.class);
    // FactoryTrackerObjectQuad.sparseFlow(null,GrayU8.class,null);
    // FactoryTrackerObjectQuad.tld(null,GrayU8.class);
    // FactoryTrackerObjectQuad.meanShiftComaniciu2003(new ConfigComaniciu2003(), ImageType.pl(3, GrayU8.class));
    // FactoryTrackerObjectQuad.meanShiftComaniciu2003(new ConfigComaniciu2003(true),ImageType.pl(3,GrayU8.class));
    // Mean-shift likelihood will fail in this video, but is excellent at tracking objects with
    // a single unique color. See ExampleTrackerMeanShiftLikelihood
    // FactoryTrackerObjectQuad.meanShiftLikelihood(30,5,255, MeanShiftLikelihoodType.HISTOGRAM,ImageType.pl(3,GrayU8.class));
    SimpleImageSequence video = media.openVideo(fileName, tracker.getImageType());
    // specify the target's initial location and initialize with the first frame
    Quadrilateral_F64 location = new Quadrilateral_F64(211.0, 162.0, 326.0, 153.0, 335.0, 258.0, 215.0, 249.0);
    ImageBase frame = video.next();
    tracker.initialize(frame, location);
    // For displaying the results
    var gui = new TrackerObjectQuadPanel(null);
    gui.setPreferredSize(new Dimension(frame.getWidth(), frame.getHeight()));
    gui.setImageUI((BufferedImage) video.getGuiImage());
    gui.setTarget(location, true);
    ShowImages.showWindow(gui, "Tracking Results", true);
    // Track the object across each video frame and display the results
    long previous = 0;
    while (video.hasNext()) {
        frame = video.next();
        boolean visible = tracker.process(frame, location);
        gui.setImageUI((BufferedImage) video.getGuiImage());
        gui.setTarget(location, visible);
        gui.repaint();
        // shoot for a specific frame rate
        long time = System.currentTimeMillis();
        BoofMiscOps.pause(Math.max(0, 80 - (time - previous)));
        previous = time;
    }
}
Also used : SimpleImageSequence(boofcv.io.image.SimpleImageSequence) Quadrilateral_F64(georegression.struct.shapes.Quadrilateral_F64) MediaManager(boofcv.io.MediaManager) DefaultMediaManager(boofcv.io.wrapper.DefaultMediaManager) FactoryTrackerObjectQuad(boofcv.factory.tracker.FactoryTrackerObjectQuad) TrackerObjectQuad(boofcv.abst.tracker.TrackerObjectQuad) TrackerObjectQuadPanel(boofcv.gui.tracker.TrackerObjectQuadPanel) ImageBase(boofcv.struct.image.ImageBase)

Example 5 with Quadrilateral_F64

use of georegression.struct.shapes.Quadrilateral_F64 in project BoofCV by lessthanoptimal.

the class TestRefinePolygonToGrayLine method optimize_line_perfect.

public void optimize_line_perfect(boolean black, Class imageType) {
    renderDistortedRectangles(black, imageType);
    RefinePolygonToGrayLine alg = createAlg(4, imageType);
    Quadrilateral_F64 input = new Quadrilateral_F64(x0, y0, x0, y1, x1, y1, x1, y0);
    LineGeneral2D_F64 found = new LineGeneral2D_F64();
    alg.setImage(image);
    assertTrue(alg.optimize(input.a, input.b, found));
    assertTrue(Distance2D_F64.distance(found, input.a) <= 1e-4);
    assertTrue(Distance2D_F64.distance(found, input.b) <= 1e-4);
}
Also used : LineGeneral2D_F64(georegression.struct.line.LineGeneral2D_F64) Quadrilateral_F64(georegression.struct.shapes.Quadrilateral_F64)

Aggregations

Quadrilateral_F64 (georegression.struct.shapes.Quadrilateral_F64)16 GrayF32 (boofcv.struct.image.GrayF32)4 Point2D_F64 (georegression.struct.point.Point2D_F64)4 Test (org.junit.jupiter.api.Test)4 FDistort (boofcv.abst.distort.FDistort)2 MediaManager (boofcv.io.MediaManager)2 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)2 DefaultMediaManager (boofcv.io.wrapper.DefaultMediaManager)2 Polygon2D_I32 (georegression.struct.shapes.Polygon2D_I32)2 BufferedImage (java.awt.image.BufferedImage)2 ConfigPointDetector (boofcv.abst.feature.detect.interest.ConfigPointDetector)1 PlToGrayMotion2D (boofcv.abst.sfm.d2.PlToGrayMotion2D)1 TrackerObjectQuad (boofcv.abst.tracker.TrackerObjectQuad)1 LensDistortionNarrowFOV (boofcv.alg.distort.LensDistortionNarrowFOV)1 LensDistortionBrown (boofcv.alg.distort.brown.LensDistortionBrown)1 LensDistortionRadialTangential (boofcv.alg.distort.radtan.LensDistortionRadialTangential)1 FoundFiducial (boofcv.alg.fiducial.square.FoundFiducial)1 ConfigFiducialBinary (boofcv.factory.fiducial.ConfigFiducialBinary)1 ConfigPolygonDetector (boofcv.factory.shape.ConfigPolygonDetector)1 FactoryShapeDetector (boofcv.factory.shape.FactoryShapeDetector)1