Search in sources :

Example 1 with SimpleImageSequence

use of boofcv.io.image.SimpleImageSequence in project BoofCV by lessthanoptimal.

the class ExampleBackgroundRemovalMoving method main.

public static void main(String[] args) {
    // Example with a moving camera. Highlights why motion estimation is sometimes required
    String fileName = UtilIO.pathExample("tracking/chipmunk.mjpeg");
    // Camera has a bit of jitter in it. Static kinda works but motion reduces false positives
    // String fileName = UtilIO.pathExample("background/horse_jitter.mp4");
    // Comment/Uncomment to switch input image type
    ImageType imageType = ImageType.single(GrayF32.class);
    // ImageType imageType = ImageType.il(3, InterleavedF32.class);
    // ImageType imageType = ImageType.il(3, InterleavedU8.class);
    // Configure the feature detector
    ConfigPointDetector configDetector = new ConfigPointDetector();
    configDetector.type = PointDetectorTypes.SHI_TOMASI;
    configDetector.general.maxFeatures = 300;
    configDetector.general.radius = 6;
    configDetector.general.threshold = 10;
    // Use a KLT tracker
    PointTracker tracker = FactoryPointTracker.klt(4, configDetector, 3, GrayF32.class, null);
    // This estimates the 2D image motion
    ImageMotion2D<GrayF32, Homography2D_F64> motion2D = FactoryMotion2D.createMotion2D(500, 0.5, 3, 100, 0.6, 0.5, false, tracker, new Homography2D_F64());
    ConfigBackgroundBasic configBasic = new ConfigBackgroundBasic(30, 0.005f);
    // Configuration for Gaussian model. Note that the threshold changes depending on the number of image bands
    // 12 = gray scale and 40 = color
    ConfigBackgroundGaussian configGaussian = new ConfigBackgroundGaussian(12, 0.001f);
    configGaussian.initialVariance = 64;
    configGaussian.minimumDifference = 5;
    // Note that GMM doesn't interpolate the input image. Making it harder to model object edges.
    // However it runs faster because of this.
    ConfigBackgroundGmm configGmm = new ConfigBackgroundGmm();
    configGmm.initialVariance = 1600;
    configGmm.significantWeight = 1e-1f;
    // Comment/Uncomment to switch background mode
    BackgroundModelMoving background = FactoryBackgroundModel.movingBasic(configBasic, new PointTransformHomography_F32(), imageType);
    // FactoryBackgroundModel.movingGaussian(configGaussian, new PointTransformHomography_F32(), imageType);
    // FactoryBackgroundModel.movingGmm(configGmm,new PointTransformHomography_F32(), imageType);
    background.setUnknownValue(1);
    MediaManager media = DefaultMediaManager.INSTANCE;
    SimpleImageSequence video = media.openVideo(fileName, background.getImageType());
    // media.openCamera(null,640,480,background.getImageType());
    // ====== Initialize Images
    // storage for segmented image. Background = 0, Foreground = 1
    GrayU8 segmented = new GrayU8(video.getWidth(), video.getHeight());
    // Grey scale image that's the input for motion estimation
    GrayF32 grey = new GrayF32(segmented.width, segmented.height);
    // coordinate frames
    Homography2D_F32 firstToCurrent32 = new Homography2D_F32();
    Homography2D_F32 homeToWorld = new Homography2D_F32();
    homeToWorld.a13 = grey.width / 2;
    homeToWorld.a23 = grey.height / 2;
    // Create a background image twice the size of the input image. Tell it that the home is in the center
    background.initialize(grey.width * 2, grey.height * 2, homeToWorld);
    BufferedImage visualized = new BufferedImage(segmented.width, segmented.height, BufferedImage.TYPE_INT_RGB);
    ImageGridPanel gui = new ImageGridPanel(1, 2);
    gui.setImages(visualized, visualized);
    ShowImages.showWindow(gui, "Detections", true);
    double fps = 0;
    // smoothing factor for FPS
    double alpha = 0.01;
    while (video.hasNext()) {
        ImageBase input = video.next();
        long before = System.nanoTime();
        GConvertImage.convert(input, grey);
        if (!motion2D.process(grey)) {
            throw new RuntimeException("Should handle this scenario");
        }
        Homography2D_F64 firstToCurrent64 = motion2D.getFirstToCurrent();
        ConvertMatrixData.convert(firstToCurrent64, firstToCurrent32);
        background.segment(firstToCurrent32, input, segmented);
        background.updateBackground(firstToCurrent32, input);
        long after = System.nanoTime();
        fps = (1.0 - alpha) * fps + alpha * (1.0 / ((after - before) / 1e9));
        VisualizeBinaryData.renderBinary(segmented, false, visualized);
        gui.setImage(0, 0, (BufferedImage) video.getGuiImage());
        gui.setImage(0, 1, visualized);
        gui.repaint();
        System.out.println("FPS = " + fps);
        BoofMiscOps.sleep(5);
    }
}
Also used : ConfigBackgroundBasic(boofcv.factory.background.ConfigBackgroundBasic) BackgroundModelMoving(boofcv.alg.background.BackgroundModelMoving) SimpleImageSequence(boofcv.io.image.SimpleImageSequence) PointTransformHomography_F32(boofcv.alg.distort.PointTransformHomography_F32) Homography2D_F32(georegression.struct.homography.Homography2D_F32) Homography2D_F64(georegression.struct.homography.Homography2D_F64) BufferedImage(java.awt.image.BufferedImage) ImageType(boofcv.struct.image.ImageType) ConfigPointDetector(boofcv.abst.feature.detect.interest.ConfigPointDetector) ConfigBackgroundGaussian(boofcv.factory.background.ConfigBackgroundGaussian) GrayF32(boofcv.struct.image.GrayF32) MediaManager(boofcv.io.MediaManager) DefaultMediaManager(boofcv.io.wrapper.DefaultMediaManager) ConfigBackgroundGmm(boofcv.factory.background.ConfigBackgroundGmm) GrayU8(boofcv.struct.image.GrayU8) ImageGridPanel(boofcv.gui.image.ImageGridPanel) PointTracker(boofcv.abst.tracker.PointTracker) FactoryPointTracker(boofcv.factory.tracker.FactoryPointTracker) ImageBase(boofcv.struct.image.ImageBase)

Example 2 with SimpleImageSequence

use of boofcv.io.image.SimpleImageSequence in project BoofCV by lessthanoptimal.

the class ExampleTrackerObjectQuad method main.

public static void main(String[] args) {
    MediaManager media = DefaultMediaManager.INSTANCE;
    String fileName = UtilIO.pathExample("tracking/wildcat_robot.mjpeg");
    // Create the tracker. Comment/Uncomment to change the tracker.
    TrackerObjectQuad tracker = FactoryTrackerObjectQuad.circulant(null, GrayU8.class);
    // FactoryTrackerObjectQuad.sparseFlow(null,GrayU8.class,null);
    // FactoryTrackerObjectQuad.tld(null,GrayU8.class);
    // FactoryTrackerObjectQuad.meanShiftComaniciu2003(new ConfigComaniciu2003(), ImageType.pl(3, GrayU8.class));
    // FactoryTrackerObjectQuad.meanShiftComaniciu2003(new ConfigComaniciu2003(true),ImageType.pl(3,GrayU8.class));
    // Mean-shift likelihood will fail in this video, but is excellent at tracking objects with
    // a single unique color. See ExampleTrackerMeanShiftLikelihood
    // FactoryTrackerObjectQuad.meanShiftLikelihood(30,5,255, MeanShiftLikelihoodType.HISTOGRAM,ImageType.pl(3,GrayU8.class));
    SimpleImageSequence video = media.openVideo(fileName, tracker.getImageType());
    // specify the target's initial location and initialize with the first frame
    Quadrilateral_F64 location = new Quadrilateral_F64(211.0, 162.0, 326.0, 153.0, 335.0, 258.0, 215.0, 249.0);
    ImageBase frame = video.next();
    tracker.initialize(frame, location);
    // For displaying the results
    var gui = new TrackerObjectQuadPanel(null);
    gui.setPreferredSize(new Dimension(frame.getWidth(), frame.getHeight()));
    gui.setImageUI((BufferedImage) video.getGuiImage());
    gui.setTarget(location, true);
    ShowImages.showWindow(gui, "Tracking Results", true);
    // Track the object across each video frame and display the results
    long previous = 0;
    while (video.hasNext()) {
        frame = video.next();
        boolean visible = tracker.process(frame, location);
        gui.setImageUI((BufferedImage) video.getGuiImage());
        gui.setTarget(location, visible);
        gui.repaint();
        // shoot for a specific frame rate
        long time = System.currentTimeMillis();
        BoofMiscOps.pause(Math.max(0, 80 - (time - previous)));
        previous = time;
    }
}
Also used : SimpleImageSequence(boofcv.io.image.SimpleImageSequence) Quadrilateral_F64(georegression.struct.shapes.Quadrilateral_F64) MediaManager(boofcv.io.MediaManager) DefaultMediaManager(boofcv.io.wrapper.DefaultMediaManager) FactoryTrackerObjectQuad(boofcv.factory.tracker.FactoryTrackerObjectQuad) TrackerObjectQuad(boofcv.abst.tracker.TrackerObjectQuad) TrackerObjectQuadPanel(boofcv.gui.tracker.TrackerObjectQuadPanel) ImageBase(boofcv.struct.image.ImageBase)

Example 3 with SimpleImageSequence

use of boofcv.io.image.SimpleImageSequence in project BoofCV by lessthanoptimal.

the class ExampleBackgroundRemovalStationary method main.

public static void main(String[] args) {
    String fileName = UtilIO.pathExample("background/street_intersection.mp4");
    // String fileName = UtilIO.pathExample("background/rubixfire.mp4"); // dynamic background
    // String fileName = UtilIO.pathExample("background/horse_jitter.mp4"); // degraded performance because of jitter
    // String fileName = UtilIO.pathExample("tracking/chipmunk.mjpeg"); // Camera moves. Stationary will fail here
    // Comment/Uncomment to switch input image type
    ImageType imageType = ImageType.single(GrayF32.class);
    // ImageType imageType = ImageType.il(3, InterleavedF32.class);
    // ImageType imageType = ImageType.il(3, InterleavedU8.class);
    // ConfigBackgroundGmm configGmm = new ConfigBackgroundGmm();
    // Comment/Uncomment to switch algorithms
    BackgroundModelStationary background = FactoryBackgroundModel.stationaryBasic(new ConfigBackgroundBasic(35, 0.005f), imageType);
    // FactoryBackgroundModel.stationaryGmm(configGmm, imageType);
    MediaManager media = DefaultMediaManager.INSTANCE;
    SimpleImageSequence video = media.openVideo(fileName, background.getImageType());
    // media.openCamera(null,640,480,background.getImageType());
    // Declare storage for segmented image. 1 = moving foreground and 0 = background
    GrayU8 segmented = new GrayU8(video.getWidth(), video.getHeight());
    var visualized = new BufferedImage(segmented.width, segmented.height, BufferedImage.TYPE_INT_RGB);
    var gui = new ImageGridPanel(1, 2);
    gui.setImages(visualized, visualized);
    ShowImages.showWindow(gui, "Static Scene: Background Segmentation", true);
    double fps = 0;
    // smoothing factor for FPS
    double alpha = 0.01;
    while (video.hasNext()) {
        ImageBase input = video.next();
        long before = System.nanoTime();
        background.updateBackground(input, segmented);
        long after = System.nanoTime();
        fps = (1.0 - alpha) * fps + alpha * (1.0 / ((after - before) / 1e9));
        VisualizeBinaryData.renderBinary(segmented, false, visualized);
        gui.setImage(0, 0, (BufferedImage) video.getGuiImage());
        gui.setImage(0, 1, visualized);
        gui.repaint();
        System.out.println("FPS = " + fps);
        BoofMiscOps.sleep(5);
    }
    System.out.println("done!");
}
Also used : ConfigBackgroundBasic(boofcv.factory.background.ConfigBackgroundBasic) SimpleImageSequence(boofcv.io.image.SimpleImageSequence) BufferedImage(java.awt.image.BufferedImage) ImageType(boofcv.struct.image.ImageType) BackgroundModelStationary(boofcv.alg.background.BackgroundModelStationary) MediaManager(boofcv.io.MediaManager) DefaultMediaManager(boofcv.io.wrapper.DefaultMediaManager) GrayU8(boofcv.struct.image.GrayU8) ImageGridPanel(boofcv.gui.image.ImageGridPanel) ImageBase(boofcv.struct.image.ImageBase)

Example 4 with SimpleImageSequence

use of boofcv.io.image.SimpleImageSequence in project BoofCV by lessthanoptimal.

the class DemonstrationBase method openWebcam.

public void openWebcam() {
    synchronized (lockStartingProcess) {
        if (startingProcess) {
            System.out.println("Ignoring webcam request. Detected spamming");
            return;
        }
        startingProcess = true;
    }
    synchronized (inputStreams) {
        if (inputStreams.size() != 1)
            throw new IllegalArgumentException("Input streams not equal to 1. Override openImage()");
    }
    stopAllInputProcessing();
    // Let he user select and configure the webcam. If canceled it will return null
    OpenWebcamDialog.Selection s = OpenWebcamDialog.showDialog(window);
    if (s == null) {
        synchronized (lockStartingProcess) {
            startingProcess = false;
        }
        return;
    }
    synchronized (inputStreams) {
        inputMethod = InputMethod.WEBCAM;
        inputFilePath = null;
        // default to no delay in processing for a real time stream
        streamPeriod = 0;
        CacheSequenceStream cache = inputStreams.get(0);
        SimpleImageSequence sequence = media.openCamera(s.camera.getName(), s.width, s.height, cache.getImageType());
        if (sequence == null) {
            showRejectDiaglog("Can't open webcam");
        } else {
            cache.reset();
            cache.setSequence(sequence);
            if (threadProcess != null)
                throw new RuntimeException("There was still an active stream thread!");
            setInputName("Webcam");
            handleInputChange(0, inputMethod, sequence.getWidth(), sequence.getHeight());
            threadProcess = new SynchronizedStreamsThread();
            threadPool.execute(threadProcess);
        }
    }
}
Also used : SimpleImageSequence(boofcv.io.image.SimpleImageSequence) OpenWebcamDialog(boofcv.io.webcamcapture.OpenWebcamDialog)

Example 5 with SimpleImageSequence

use of boofcv.io.image.SimpleImageSequence in project BoofCV by lessthanoptimal.

the class DemonstrationBase method openVideo.

/**
 * Before invoking this function make sure waitingToOpenImage is false AND that the previous input has been stopped
 */
protected void openVideo(boolean reopen, String... filePaths) {
    synchronized (lockStartingProcess) {
        if (startingProcess) {
            System.out.println("Ignoring video request. Detected spamming");
            return;
        }
        startingProcess = true;
    }
    synchronized (inputStreams) {
        if (inputStreams.size() != filePaths.length)
            throw new IllegalArgumentException("inputStreams.size() != filePaths.length. Override openVideo(). " + inputStreams.size() + " != " + filePaths.length);
    }
    inputFileSet = filePaths;
    stopAllInputProcessing();
    streamPaused = false;
    boolean failed = false;
    for (int which = 0; which < filePaths.length; which++) {
        CacheSequenceStream cache = inputStreams.get(which);
        SimpleImageSequence sequence = media.openVideo(filePaths[which], cache.getImageType());
        if (sequence == null) {
            failed = true;
            System.out.println("Can't find file. " + filePaths[which]);
            break;
        }
        configureVideo(which, sequence);
        synchronized (inputStreams) {
            cache.reset();
            cache.setSequence(sequence);
        }
    }
    if (!failed) {
        setInputName(new File(filePaths[0]).getName());
        synchronized (inputStreams) {
            inputMethod = InputMethod.VIDEO;
            // default to 33 FPS for a video
            streamPeriod = 33;
            if (threadProcess != null)
                throw new RuntimeException("There was still an active stream thread!");
            threadProcess = new SynchronizedStreamsThread();
        }
        for (int i = 0; i < inputStreams.size(); i++) {
            CacheSequenceStream stream = inputStreams.get(i);
            // load the first image to get the size then reset
            // so that it starts processing at the first image
            int width = 0, height = 0;
            if (stream.hasNext()) {
                stream.cacheNext();
                width = stream.getWidth();
                height = stream.getHeight();
                stream.reset();
            }
            handleInputChange(i, inputMethod, width, height);
        }
        threadPool.execute(threadProcess);
    } else {
        synchronized (inputStreams) {
            inputMethod = InputMethod.NONE;
            inputFilePath = null;
        }
        synchronized (lockStartingProcess) {
            startingProcess = false;
        }
        showRejectDiaglog("Can't open file");
    }
}
Also used : SimpleImageSequence(boofcv.io.image.SimpleImageSequence) File(java.io.File)

Aggregations

SimpleImageSequence (boofcv.io.image.SimpleImageSequence)7 DefaultMediaManager (boofcv.io.wrapper.DefaultMediaManager)4 GrayU8 (boofcv.struct.image.GrayU8)4 ImageBase (boofcv.struct.image.ImageBase)4 MediaManager (boofcv.io.MediaManager)3 ImageType (boofcv.struct.image.ImageType)3 PointTracker (boofcv.abst.tracker.PointTracker)2 ConfigBackgroundBasic (boofcv.factory.background.ConfigBackgroundBasic)2 FactoryPointTracker (boofcv.factory.tracker.FactoryPointTracker)2 ImageGridPanel (boofcv.gui.image.ImageGridPanel)2 BufferedImage (java.awt.image.BufferedImage)2 File (java.io.File)2 BoofVerbose (boofcv.BoofVerbose)1 ConfigPointDetector (boofcv.abst.feature.detect.interest.ConfigPointDetector)1 SceneStructureMetric (boofcv.abst.geo.bundle.SceneStructureMetric)1 PointTrack (boofcv.abst.tracker.PointTrack)1 TrackerObjectQuad (boofcv.abst.tracker.TrackerObjectQuad)1 BackgroundModelMoving (boofcv.alg.background.BackgroundModelMoving)1 BackgroundModelStationary (boofcv.alg.background.BackgroundModelStationary)1 PointCloudReader (boofcv.alg.cloud.PointCloudReader)1