Search in sources :

Example 1 with PkltConfig

use of boofcv.alg.tracker.klt.PkltConfig in project BoofCV by lessthanoptimal.

the class ExamplePointFeatureTracker method createKLT.

/**
 * A simple way to create a Kanade-Lucas-Tomasi (KLT) tracker.
 */
public void createKLT() {
    PkltConfig config = new PkltConfig();
    config.templateRadius = 3;
    config.pyramidScaling = new int[] { 1, 2, 4, 8 };
    tracker = FactoryPointTracker.klt(config, new ConfigGeneralDetector(600, 6, 1), imageType, derivType);
}
Also used : PkltConfig(boofcv.alg.tracker.klt.PkltConfig) ConfigGeneralDetector(boofcv.abst.feature.detect.interest.ConfigGeneralDetector)

Example 2 with PkltConfig

use of boofcv.alg.tracker.klt.PkltConfig in project BoofCV by lessthanoptimal.

the class ExampleTrackingKlt method main.

public static void main(String[] args) {
    // tune the tracker for the image size and visual appearance
    ConfigGeneralDetector configDetector = new ConfigGeneralDetector(-1, 8, 1);
    PkltConfig configKlt = new PkltConfig(3, new int[] { 1, 2, 4, 8 });
    PointTracker<GrayF32> tracker = FactoryPointTracker.klt(configKlt, configDetector, GrayF32.class, null);
    // Open a webcam at a resolution close to 640x480
    Webcam webcam = UtilWebcamCapture.openDefault(640, 480);
    // Create the panel used to display the image and feature tracks
    ImagePanel gui = new ImagePanel();
    gui.setPreferredSize(webcam.getViewSize());
    ShowImages.showWindow(gui, "KLT Tracker", true);
    int minimumTracks = 100;
    while (true) {
        BufferedImage image = webcam.getImage();
        GrayF32 gray = ConvertBufferedImage.convertFrom(image, (GrayF32) null);
        tracker.process(gray);
        List<PointTrack> tracks = tracker.getActiveTracks(null);
        // Spawn tracks if there are too few
        if (tracks.size() < minimumTracks) {
            tracker.spawnTracks();
            tracks = tracker.getActiveTracks(null);
            minimumTracks = tracks.size() / 2;
        }
        // Draw the tracks
        Graphics2D g2 = image.createGraphics();
        for (PointTrack t : tracks) {
            VisualizeFeatures.drawPoint(g2, (int) t.x, (int) t.y, Color.RED);
        }
        gui.setImageUI(image);
    }
}
Also used : GrayF32(boofcv.struct.image.GrayF32) PointTrack(boofcv.abst.feature.tracker.PointTrack) PkltConfig(boofcv.alg.tracker.klt.PkltConfig) ConfigGeneralDetector(boofcv.abst.feature.detect.interest.ConfigGeneralDetector) Webcam(com.github.sarxos.webcam.Webcam) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) ImagePanel(boofcv.gui.image.ImagePanel)

Example 3 with PkltConfig

use of boofcv.alg.tracker.klt.PkltConfig in project BoofCV by lessthanoptimal.

the class VisualizeStereoVisualOdometryApp method createStereoDepth.

private StereoVisualOdometry<I> createStereoDepth(int whichAlg) {
    Class derivType = GImageDerivativeOps.getDerivativeType(imageType);
    StereoDisparitySparse<I> disparity = FactoryStereoDisparity.regionSparseWta(2, 150, 3, 3, 30, -1, true, imageType);
    PkltConfig kltConfig = new PkltConfig();
    kltConfig.templateRadius = 3;
    kltConfig.pyramidScaling = new int[] { 1, 2, 4, 8 };
    if (whichAlg == 0) {
        ConfigGeneralDetector configDetector = new ConfigGeneralDetector(600, 3, 1);
        PointTrackerTwoPass<I> tracker = FactoryPointTrackerTwoPass.klt(kltConfig, configDetector, imageType, derivType);
        return FactoryVisualOdometry.stereoDepth(1.5, 120, 2, 200, 50, false, disparity, tracker, imageType);
    } else if (whichAlg == 1) {
        ConfigGeneralDetector configExtract = new ConfigGeneralDetector(600, 3, 1);
        GeneralFeatureDetector detector = FactoryPointTracker.createShiTomasi(configExtract, derivType);
        DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(null, imageType);
        ScoreAssociateHamming_B score = new ScoreAssociateHamming_B();
        AssociateDescription2D<TupleDesc_B> associate = new AssociateDescTo2D<>(FactoryAssociation.greedy(score, 150, true));
        PointTrackerTwoPass tracker = FactoryPointTrackerTwoPass.dda(detector, describe, associate, null, 1, imageType);
        return FactoryVisualOdometry.stereoDepth(1.5, 80, 3, 200, 50, false, disparity, tracker, imageType);
    } else if (whichAlg == 2) {
        PointTracker<I> tracker = FactoryPointTracker.combined_ST_SURF_KLT(new ConfigGeneralDetector(600, 3, 0), kltConfig, 50, null, null, imageType, derivType);
        PointTrackerTwoPass<I> twopass = new PointTrackerToTwoPass<>(tracker);
        return FactoryVisualOdometry.stereoDepth(1.5, 80, 3, 200, 50, false, disparity, twopass, imageType);
    } else if (whichAlg == 3) {
        ConfigGeneralDetector configDetector = new ConfigGeneralDetector(600, 3, 1);
        PointTracker<I> trackerLeft = FactoryPointTracker.klt(kltConfig, configDetector, imageType, derivType);
        PointTracker<I> trackerRight = FactoryPointTracker.klt(kltConfig, configDetector, imageType, derivType);
        DescribeRegionPoint describe = FactoryDescribeRegionPoint.surfFast(null, imageType);
        return FactoryVisualOdometry.stereoDualTrackerPnP(90, 2, 1.5, 1.5, 200, 50, trackerLeft, trackerRight, describe, imageType);
    } else if (whichAlg == 4) {
        // GeneralFeatureIntensity intensity =
        // FactoryIntensityPoint.hessian(HessianBlobIntensity.Type.TRACE,defaultType);
        GeneralFeatureIntensity intensity = FactoryIntensityPoint.shiTomasi(1, false, imageType);
        NonMaxSuppression nonmax = FactoryFeatureExtractor.nonmax(new ConfigExtract(2, 50, 0, true, false, true));
        GeneralFeatureDetector general = new GeneralFeatureDetector(intensity, nonmax);
        general.setMaxFeatures(600);
        DetectorInterestPointMulti detector = new GeneralToInterestMulti(general, 2, imageType, derivType);
        // DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(new ConfigBrief(true),defaultType);
        // DescribeRegionPoint describe = FactoryDescribeRegionPoint.pixelNCC(5,5,defaultType);
        DescribeRegionPoint describe = FactoryDescribeRegionPoint.surfFast(null, imageType);
        DetectDescribeMulti detDescMulti = new DetectDescribeMultiFusion(detector, null, describe);
        return FactoryVisualOdometry.stereoQuadPnP(1.5, 0.5, 75, Double.MAX_VALUE, 300, 50, detDescMulti, imageType);
    } else {
        throw new RuntimeException("Unknown selection");
    }
}
Also used : NonMaxSuppression(boofcv.abst.feature.detect.extract.NonMaxSuppression) GeneralToInterestMulti(boofcv.abst.feature.detect.interest.GeneralToInterestMulti) PkltConfig(boofcv.alg.tracker.klt.PkltConfig) FactoryDescribeRegionPoint(boofcv.factory.feature.describe.FactoryDescribeRegionPoint) DescribeRegionPoint(boofcv.abst.feature.describe.DescribeRegionPoint) ConfigGeneralDetector(boofcv.abst.feature.detect.interest.ConfigGeneralDetector) DetectDescribeMultiFusion(boofcv.abst.feature.detdesc.DetectDescribeMultiFusion) DetectorInterestPointMulti(boofcv.abst.feature.detect.interest.DetectorInterestPointMulti) ScoreAssociateHamming_B(boofcv.abst.feature.associate.ScoreAssociateHamming_B) FactoryPointTrackerTwoPass(boofcv.factory.feature.tracker.FactoryPointTrackerTwoPass) PointTrackerTwoPass(boofcv.abst.feature.tracker.PointTrackerTwoPass) ConfigExtract(boofcv.abst.feature.detect.extract.ConfigExtract) DetectDescribeMulti(boofcv.abst.feature.detdesc.DetectDescribeMulti) GeneralFeatureDetector(boofcv.alg.feature.detect.interest.GeneralFeatureDetector) PointTrackerToTwoPass(boofcv.abst.feature.tracker.PointTrackerToTwoPass) AssociateDescription2D(boofcv.abst.feature.associate.AssociateDescription2D) PointTracker(boofcv.abst.feature.tracker.PointTracker) FactoryPointTracker(boofcv.factory.feature.tracker.FactoryPointTracker) GeneralFeatureIntensity(boofcv.abst.feature.detect.intensity.GeneralFeatureIntensity)

Example 4 with PkltConfig

use of boofcv.alg.tracker.klt.PkltConfig in project BoofCV by lessthanoptimal.

the class FactoryPointTracker method combined.

/**
 * Creates a tracker that is a hybrid between KLT and Detect-Describe-Associate (DDA) trackers.
 *
 * @see CombinedTrackerScalePoint
 *
 * @param detector Feature detector and describer.
 * @param associate Association algorithm.
 * @param kltConfig Configuration for KLT tracker
 * @param reactivateThreshold Tracks are reactivated after this many have been dropped.  Try 10% of maxMatches
 * @param imageType Input image type.     @return Feature tracker
 */
public static <I extends ImageGray<I>, D extends ImageGray<D>, Desc extends TupleDesc> PointTracker<I> combined(DetectDescribePoint<I, Desc> detector, AssociateDescription<Desc> associate, PkltConfig kltConfig, int reactivateThreshold, Class<I> imageType) {
    Class<D> derivType = GImageDerivativeOps.getDerivativeType(imageType);
    if (kltConfig == null) {
        kltConfig = new PkltConfig();
    }
    CombinedTrackerScalePoint<I, D, Desc> tracker = FactoryTrackerAlg.combined(detector, associate, kltConfig, imageType, derivType);
    return new PointTrackerCombined<>(tracker, reactivateThreshold, imageType, derivType);
}
Also used : PkltConfig(boofcv.alg.tracker.klt.PkltConfig)

Example 5 with PkltConfig

use of boofcv.alg.tracker.klt.PkltConfig in project BoofCV by lessthanoptimal.

the class TestMonoOverhead_to_MonocularPlaneVisualOdometry method createAlgorithm.

protected MonocularPlaneVisualOdometry<GrayU8> createAlgorithm() {
    PkltConfig config = new PkltConfig();
    config.pyramidScaling = new int[] { 1, 2, 4, 8 };
    config.templateRadius = 3;
    ConfigGeneralDetector configDetector = new ConfigGeneralDetector(600, 3, 1);
    PointTracker<GrayU8> tracker = FactoryPointTracker.klt(config, configDetector, GrayU8.class, GrayS16.class);
    double cellSize = 0.015;
    double ransacTol = 0.2;
    return FactoryVisualOdometry.monoPlaneOverhead(cellSize, 25, 0.5, ransacTol, 300, 2, 30, 0.5, 0.3, tracker, ImageType.single(GrayU8.class));
}
Also used : PkltConfig(boofcv.alg.tracker.klt.PkltConfig) ConfigGeneralDetector(boofcv.abst.feature.detect.interest.ConfigGeneralDetector) GrayU8(boofcv.struct.image.GrayU8)

Aggregations

PkltConfig (boofcv.alg.tracker.klt.PkltConfig)18 ConfigGeneralDetector (boofcv.abst.feature.detect.interest.ConfigGeneralDetector)16 GrayU8 (boofcv.struct.image.GrayU8)7 Vector3D_F64 (georegression.struct.point.Vector3D_F64)4 Se3_F64 (georegression.struct.se.Se3_F64)4 DescribeRegionPoint (boofcv.abst.feature.describe.DescribeRegionPoint)3 PointTracker (boofcv.abst.feature.tracker.PointTracker)3 FactoryDescribeRegionPoint (boofcv.factory.feature.describe.FactoryDescribeRegionPoint)3 FactoryPointTracker (boofcv.factory.feature.tracker.FactoryPointTracker)3 MediaManager (boofcv.io.MediaManager)3 DefaultMediaManager (boofcv.io.wrapper.DefaultMediaManager)3 GrayF32 (boofcv.struct.image.GrayF32)3 GrayU16 (boofcv.struct.image.GrayU16)3 BufferedImage (java.awt.image.BufferedImage)3 AssociateDescription2D (boofcv.abst.feature.associate.AssociateDescription2D)2 ScoreAssociateHamming_B (boofcv.abst.feature.associate.ScoreAssociateHamming_B)2 PointTrack (boofcv.abst.feature.tracker.PointTrack)2 PointTrackerToTwoPass (boofcv.abst.feature.tracker.PointTrackerToTwoPass)2 PointTrackerTwoPass (boofcv.abst.feature.tracker.PointTrackerTwoPass)2 GeneralFeatureDetector (boofcv.alg.feature.detect.interest.GeneralFeatureDetector)2