Search in sources :

Example 1 with ConfigDisparityBMBest5

use of boofcv.factory.disparity.ConfigDisparityBMBest5 in project BoofCV by lessthanoptimal.

the class DemoThreeViewControls method addDisparityControls.

private void addDisparityControls() {
    ConfigDisparityBMBest5 configBM = new ConfigDisparityBMBest5();
    ConfigDisparitySGM configSGM = new ConfigDisparitySGM();
    ConfigSpeckleFilter configSpeckle = new ConfigSpeckleFilter();
    configBM.disparityMin = configSGM.disparityMin = 0;
    configBM.disparityRange = configSGM.disparityRange = 200;
    configBM.regionRadiusX = configBM.regionRadiusY = 4;
    configBM.errorType = DisparityError.CENSUS;
    configBM.configCensus.variant = CensusVariants.BLOCK_7_7;
    controlDisparity = new ControlPanelDisparityDense(configBM, configSGM, configSpeckle, GrayU8.class);
    controlDisparity.setListener(this::handleStereoChanged);
}
Also used : ConfigDisparityBMBest5(boofcv.factory.disparity.ConfigDisparityBMBest5) ControlPanelDisparityDense(boofcv.gui.controls.ControlPanelDisparityDense) ConfigDisparitySGM(boofcv.factory.disparity.ConfigDisparitySGM) GrayU8(boofcv.struct.image.GrayU8) ConfigSpeckleFilter(boofcv.abst.disparity.ConfigSpeckleFilter)

Example 2 with ConfigDisparityBMBest5

use of boofcv.factory.disparity.ConfigDisparityBMBest5 in project BoofCV by lessthanoptimal.

the class ExampleTrifocalStereoUncalibrated method computeStereoCloud.

public static void computeStereoCloud(GrayU8 distortedLeft, GrayU8 distortedRight, Planar<GrayU8> colorLeft, Planar<GrayU8> colorRight, CameraPinholeBrown intrinsicLeft, CameraPinholeBrown intrinsicRight, Se3_F64 leftToRight, int minDisparity, int rangeDisparity) {
    // drawInliers(origLeft, origRight, intrinsic, inliers);
    // Rectify and remove lens distortion for stereo processing
    var rectifiedK = new DMatrixRMaj(3, 3);
    var rectifiedR = new DMatrixRMaj(3, 3);
    // rectify a colored image
    Planar<GrayU8> rectColorLeft = colorLeft.createSameShape();
    Planar<GrayU8> rectColorRight = colorLeft.createSameShape();
    GrayU8 rectMask = new GrayU8(colorLeft.width, colorLeft.height);
    rectifyImages(colorLeft, colorRight, leftToRight, intrinsicLeft, intrinsicRight, rectColorLeft, rectColorRight, rectMask, rectifiedK, rectifiedR);
    if (rectifiedK.get(0, 0) < 0)
        throw new RuntimeException("Egads");
    System.out.println("Rectified K");
    rectifiedK.print();
    System.out.println("Rectified R");
    rectifiedR.print();
    GrayU8 rectifiedLeft = distortedLeft.createSameShape();
    GrayU8 rectifiedRight = distortedRight.createSameShape();
    ConvertImage.average(rectColorLeft, rectifiedLeft);
    ConvertImage.average(rectColorRight, rectifiedRight);
    // compute disparity
    var config = new ConfigDisparityBMBest5();
    config.errorType = DisparityError.CENSUS;
    config.disparityMin = minDisparity;
    config.disparityRange = rangeDisparity;
    config.subpixel = true;
    config.regionRadiusX = config.regionRadiusY = 6;
    config.validateRtoL = 1;
    config.texture = 0.2;
    StereoDisparity<GrayU8, GrayF32> disparityAlg = FactoryStereoDisparity.blockMatchBest5(config, GrayU8.class, GrayF32.class);
    // process and return the results
    disparityAlg.process(rectifiedLeft, rectifiedRight);
    GrayF32 disparity = disparityAlg.getDisparity();
    RectifyImageOps.applyMask(disparity, rectMask, 0);
    // show results
    BufferedImage visualized = VisualizeImageData.disparity(disparity, null, rangeDisparity, 0);
    BufferedImage outLeft = ConvertBufferedImage.convertTo(rectColorLeft, null, true);
    BufferedImage outRight = ConvertBufferedImage.convertTo(rectColorRight, null, true);
    ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification", true);
    ShowImages.showWindow(visualized, "Disparity", true);
    showPointCloud(disparity, outLeft, leftToRight, rectifiedK, rectifiedR, minDisparity, rangeDisparity);
}
Also used : GrayF32(boofcv.struct.image.GrayF32) ConfigDisparityBMBest5(boofcv.factory.disparity.ConfigDisparityBMBest5) DMatrixRMaj(org.ejml.data.DMatrixRMaj) GrayU8(boofcv.struct.image.GrayU8) RectifiedPairPanel(boofcv.gui.stereo.RectifiedPairPanel) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage)

Example 3 with ConfigDisparityBMBest5

use of boofcv.factory.disparity.ConfigDisparityBMBest5 in project BoofCV by lessthanoptimal.

the class ExampleMultiBaselineStereo method main.

public static void main(String[] args) {
    // Compute a sparse reconstruction. This will give us intrinsic and extrinsic for all views
    var example = new ExampleMultiViewSparseReconstruction();
    // Specifies the "center" frame to use
    int centerViewIdx = 15;
    example.compute("tree_snow_01.mp4", true);
    // example.compute("ditch_02.mp4", true);
    // example.compute("holiday_display_01.mp4"", true);
    // example.compute("log_building_02.mp4"", true);
    // example.compute("drone_park_01.mp4", false);
    // example.compute("stone_sign.mp4", true);
    // We need a way to load images based on their ID. In this particular case the ID encodes the array index.
    var imageLookup = new LookUpImageFilesByIndex(example.imageFiles);
    // Next we tell it which view to use as the "center", which acts as the common view for all disparity images.
    // The process of selecting the best views to use as centers is a problem all it's own. To keep things
    // we just pick a frame.
    SceneWorkingGraph.View center = example.working.getAllViews().get(centerViewIdx);
    // The final scene refined by bundle adjustment is created by the Working graph. However the 3D relationship
    // between views is contained in the pairwise graph. A View in the working graph has a reference to the view
    // in the pairwise graph. Using that we will find all connected views that have a 3D relationship
    var pairedViewIdxs = new DogArray_I32();
    var sbaIndexToImageID = new TIntObjectHashMap<String>();
    // This relationship between pairwise and working graphs might seem (and is) a bit convoluted. The Pairwise
    // graph is the initial crude sketch of what might be connected. The working graph is an intermediate
    // data structure for computing the metric scene. SBA is a refinement of the working graph.
    // Iterate through all connected views in the pairwise graph and mark their indexes in the working graph
    center.pview.connections.forEach((m) -> {
        // if there isn't a 3D relationship just skip it
        if (!m.is3D)
            return;
        String connectedID = m.other(center.pview).id;
        SceneWorkingGraph.View connected = example.working.views.get(connectedID);
        // Make sure the pairwise view exists in the working graph too
        if (connected == null)
            return;
        // Add this view to the index to name/ID lookup table
        sbaIndexToImageID.put(connected.index, connectedID);
        // Note that this view is one which acts as the second image in the stereo pair
        pairedViewIdxs.add(connected.index);
    });
    // Add the center camera image to the ID look up table
    sbaIndexToImageID.put(centerViewIdx, center.pview.id);
    // Configure there stereo disparity algorithm which is used
    var configDisparity = new ConfigDisparityBMBest5();
    configDisparity.validateRtoL = 1;
    configDisparity.texture = 0.5;
    configDisparity.regionRadiusX = configDisparity.regionRadiusY = 4;
    configDisparity.disparityRange = 120;
    // This is the actual MBS algorithm mentioned previously. It selects the best disparity for each pixel
    // in the original image using a median filter.
    var multiBaseline = new MultiBaselineStereoIndependent<>(imageLookup, ImageType.SB_U8);
    multiBaseline.setStereoDisparity(FactoryStereoDisparity.blockMatchBest5(configDisparity, GrayU8.class, GrayF32.class));
    // Print out verbose debugging and profile information
    multiBaseline.setVerbose(System.out, null);
    multiBaseline.setVerboseProfiling(System.out);
    // Improve stereo by removing small regions, which tends to be noise. Consider adjusting the region size.
    multiBaseline.setDisparitySmoother(FactoryStereoDisparity.removeSpeckle(null, GrayF32.class));
    // Print out debugging information from the smoother
    // Objects.requireNonNull(multiBaseline.getDisparitySmoother()).setVerbose(System.out,null);
    // Creates a list where you can switch between different images/visualizations
    var listDisplay = new ListDisplayPanel();
    listDisplay.setPreferredSize(new Dimension(1000, 300));
    ShowImages.showWindow(listDisplay, "Intermediate Results", true);
    // We will display intermediate results as they come in
    multiBaseline.setListener((leftView, rightView, rectLeft, rectRight, disparity, mask, parameters, rect) -> {
        // Visualize the rectified stereo pair. You can interact with this window and verify
        // that the y-axis is  aligned
        var rectified = new RectifiedPairPanel(true);
        rectified.setImages(ConvertBufferedImage.convertTo(rectLeft, null), ConvertBufferedImage.convertTo(rectRight, null));
        // Cleans up the disparity image by zeroing out pixels that are outside the original image bounds
        RectifyImageOps.applyMask(disparity, mask, 0);
        // Display the colorized disparity
        BufferedImage colorized = VisualizeImageData.disparity(disparity, null, parameters.disparityRange, 0);
        SwingUtilities.invokeLater(() -> {
            listDisplay.addItem(rectified, "Rectified " + leftView + " " + rightView);
            listDisplay.addImage(colorized, leftView + " " + rightView);
        });
    });
    // Process the images and compute a single combined disparity image
    if (!multiBaseline.process(example.scene, center.index, pairedViewIdxs, sbaIndexToImageID::get)) {
        throw new RuntimeException("Failed to fuse stereo views");
    }
    // Extract the point cloud from the fused disparity image
    GrayF32 fusedDisparity = multiBaseline.getFusedDisparity();
    DisparityParameters fusedParam = multiBaseline.getFusedParam();
    BufferedImage colorizedDisp = VisualizeImageData.disparity(fusedDisparity, null, fusedParam.disparityRange, 0);
    ShowImages.showWindow(colorizedDisp, "Fused Disparity");
    // Now compute the point cloud it represents and the color of each pixel.
    // For the fused image, instead of being in rectified image coordinates it's in the original image coordinates
    // this makes extracting color much easier.
    var cloud = new DogArray<>(Point3D_F64::new);
    var cloudRgb = new DogArray_I32(cloud.size);
    // Load the center image in color
    var colorImage = new InterleavedU8(1, 1, 3);
    imageLookup.loadImage(center.pview.id, colorImage);
    // Since the fused image is in the original (i.e. distorted) pixel coordinates and is not rectified,
    // that needs to be taken in account by undistorting the image to create the point cloud.
    CameraPinholeBrown intrinsic = BundleAdjustmentOps.convert(example.scene.cameras.get(center.cameraIdx).model, colorImage.width, colorImage.height, null);
    Point2Transform2_F64 pixel_to_norm = new LensDistortionBrown(intrinsic).distort_F64(true, false);
    MultiViewStereoOps.disparityToCloud(fusedDisparity, fusedParam, new PointToPixelTransform_F64(pixel_to_norm), (pixX, pixY, x, y, z) -> {
        cloud.grow().setTo(x, y, z);
        cloudRgb.add(colorImage.get24(pixX, pixY));
    });
    // Configure the point cloud viewer
    PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
    pcv.setCameraHFov(UtilAngle.radian(70));
    pcv.setTranslationStep(0.15);
    pcv.addCloud(cloud.toList(), cloudRgb.data);
    // pcv.setColorizer(new SingleAxisRgb.Z().fperiod(30.0));
    JComponent viewer = pcv.getComponent();
    viewer.setPreferredSize(new Dimension(600, 600));
    ShowImages.showWindow(viewer, "Point Cloud", true);
    System.out.println("Done");
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) InterleavedU8(boofcv.struct.image.InterleavedU8) ListDisplayPanel(boofcv.gui.ListDisplayPanel) CameraPinholeBrown(boofcv.struct.calib.CameraPinholeBrown) ConfigDisparityBMBest5(boofcv.factory.disparity.ConfigDisparityBMBest5) LensDistortionBrown(boofcv.alg.distort.brown.LensDistortionBrown) RectifiedPairPanel(boofcv.gui.stereo.RectifiedPairPanel) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) PointCloudViewer(boofcv.visualize.PointCloudViewer) PointToPixelTransform_F64(boofcv.struct.distort.PointToPixelTransform_F64) LookUpImageFilesByIndex(boofcv.io.image.LookUpImageFilesByIndex) GrayU8(boofcv.struct.image.GrayU8) SceneWorkingGraph(boofcv.alg.structure.SceneWorkingGraph) MultiBaselineStereoIndependent(boofcv.alg.mvs.MultiBaselineStereoIndependent) Point2Transform2_F64(boofcv.struct.distort.Point2Transform2_F64) DogArray_I32(org.ddogleg.struct.DogArray_I32) DogArray(org.ddogleg.struct.DogArray) GrayF32(boofcv.struct.image.GrayF32) TIntObjectHashMap(gnu.trove.map.hash.TIntObjectHashMap) DisparityParameters(boofcv.alg.mvs.DisparityParameters)

Example 4 with ConfigDisparityBMBest5

use of boofcv.factory.disparity.ConfigDisparityBMBest5 in project BoofCV by lessthanoptimal.

the class ExampleStereoTwoViewsOneCamera method main.

public static void main(String[] args) {
    // specify location of images and calibration
    String calibDir = UtilIO.pathExample("calibration/mono/Sony_DSC-HX5V_Chess/");
    String imageDir = UtilIO.pathExample("stereo/");
    // Camera parameters
    CameraPinholeBrown intrinsic = CalibrationIO.load(new File(calibDir, "intrinsic.yaml"));
    // Input images from the camera moving left to right
    BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "mono_wall_01.jpg");
    BufferedImage origRight = UtilImageIO.loadImage(imageDir, "mono_wall_02.jpg");
    // Input images with lens distortion
    GrayU8 distortedLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8) null);
    GrayU8 distortedRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8) null);
    // matched features between the two images
    List<AssociatedPair> matchedFeatures = ExampleComputeFundamentalMatrix.computeMatches(origLeft, origRight);
    // convert from pixel coordinates into normalized image coordinates
    List<AssociatedPair> matchedCalibrated = convertToNormalizedCoordinates(matchedFeatures, intrinsic);
    // Robustly estimate camera motion
    List<AssociatedPair> inliers = new ArrayList<>();
    Se3_F64 leftToRight = estimateCameraMotion(intrinsic, matchedCalibrated, inliers);
    drawInliers(origLeft, origRight, intrinsic, inliers);
    // Rectify and remove lens distortion for stereo processing
    DMatrixRMaj rectifiedK = new DMatrixRMaj(3, 3);
    DMatrixRMaj rectifiedR = new DMatrixRMaj(3, 3);
    GrayU8 rectifiedLeft = distortedLeft.createSameShape();
    GrayU8 rectifiedRight = distortedRight.createSameShape();
    GrayU8 rectifiedMask = distortedLeft.createSameShape();
    rectifyImages(distortedLeft, distortedRight, leftToRight, intrinsic, intrinsic, rectifiedLeft, rectifiedRight, rectifiedMask, rectifiedK, rectifiedR);
    // compute disparity
    ConfigDisparityBMBest5 config = new ConfigDisparityBMBest5();
    config.errorType = DisparityError.CENSUS;
    config.disparityMin = disparityMin;
    config.disparityRange = disparityRange;
    config.subpixel = true;
    config.regionRadiusX = config.regionRadiusY = 5;
    config.maxPerPixelError = 20;
    config.validateRtoL = 1;
    config.texture = 0.1;
    StereoDisparity<GrayU8, GrayF32> disparityAlg = FactoryStereoDisparity.blockMatchBest5(config, GrayU8.class, GrayF32.class);
    // process and return the results
    disparityAlg.process(rectifiedLeft, rectifiedRight);
    GrayF32 disparity = disparityAlg.getDisparity();
    RectifyImageOps.applyMask(disparity, rectifiedMask, 0);
    // show results
    BufferedImage visualized = VisualizeImageData.disparity(disparity, null, disparityRange, 0);
    BufferedImage outLeft = ConvertBufferedImage.convertTo(rectifiedLeft, null);
    BufferedImage outRight = ConvertBufferedImage.convertTo(rectifiedRight, null);
    ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification", true);
    ShowImages.showWindow(visualized, "Disparity", true);
    showPointCloud(disparity, outLeft, leftToRight, rectifiedK, rectifiedR, disparityMin, disparityRange);
    System.out.println("Total found " + matchedCalibrated.size());
    System.out.println("Total Inliers " + inliers.size());
}
Also used : AssociatedPair(boofcv.struct.geo.AssociatedPair) CameraPinholeBrown(boofcv.struct.calib.CameraPinholeBrown) ConfigDisparityBMBest5(boofcv.factory.disparity.ConfigDisparityBMBest5) ArrayList(java.util.ArrayList) DMatrixRMaj(org.ejml.data.DMatrixRMaj) RectifiedPairPanel(boofcv.gui.stereo.RectifiedPairPanel) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) File(java.io.File) Se3_F64(georegression.struct.se.Se3_F64)

Example 5 with ConfigDisparityBMBest5

use of boofcv.factory.disparity.ConfigDisparityBMBest5 in project BoofCV by lessthanoptimal.

the class TestMultiBaselineStereoIndependent method simulate_constant_disparity.

/**
 * The plane being viewed and the camera's image plane are parallel causing the disparity to have a constant value
 * making it easy check for correctness.
 *
 * @param tolFilled What fraction of fused image should be filled
 * @param tolCorrect Out of the filled pixels what fraction need to have the correct disparity
 */
void simulate_constant_disparity(Se3_F64 world_to_view1, Se3_F64 world_to_view2, double tolFilled, double tolCorrect) {
    // Each camera is different.
    List<CameraPinholeBrown> listIntrinsic = new ArrayList<>();
    listIntrinsic.add(new CameraPinholeBrown().fsetK(150, 140, 0, 105, 100, 210, 200).fsetRadial(0.02, -0.003));
    listIntrinsic.add(new CameraPinholeBrown().fsetK(151, 142, 0, 107.5, 102.5, 215, 205).fsetRadial(0.03, -0.001));
    listIntrinsic.add(new CameraPinholeBrown().fsetK(149, 141, 0, 102.5, 107.5, 205, 215).fsetRadial(0.001, 0.003));
    // Create the scene. This will be used as input into MultiViewToFusedDisparity and in the simulator
    var scene = new SceneStructureMetric(true);
    scene.initialize(3, 3, 0);
    scene.setCamera(0, true, listIntrinsic.get(0));
    scene.setCamera(1, true, listIntrinsic.get(1));
    scene.setCamera(2, true, listIntrinsic.get(2));
    // All views are looking head on at the target. The 2nd and third view have been offset to ensure full coverage and that
    // it's incorporating all the views, otherwise there would be large gaps
    scene.setView(0, 0, true, eulerXyz(0, 0, 0, 0.0, 0, 0, null));
    scene.setView(1, 1, true, world_to_view1);
    scene.setView(2, 2, true, world_to_view2);
    var lookup = new MockLookUp();
    var alg = new MultiBaselineStereoIndependent<>(lookup, ImageType.SB_F32);
    // Not mocking disparity because of how complex that would be to pull off. This makes it a bit of an inexact
    // science to ensure fill in
    var configDisp = new ConfigDisparityBMBest5();
    configDisp.errorType = DisparityError.SAD;
    configDisp.texture = 0.05;
    configDisp.disparityMin = 20;
    configDisp.disparityRange = 80;
    alg.stereoDisparity = FactoryStereoDisparity.blockMatchBest5(configDisp, GrayF32.class, GrayF32.class);
    // Textured target that stereo will work well on
    var texture = new GrayF32(100, 100);
    ImageMiscOps.fillUniform(texture, rand, 50, 255);
    SimulatePlanarWorld sim = new SimulatePlanarWorld();
    sim.addSurface(eulerXyz(0, 0, 2, 0, Math.PI, 0, null), 3, texture);
    List<GrayF32> images = new ArrayList<>();
    TIntObjectMap<String> sbaIndexToViewID = new TIntObjectHashMap<>();
    for (int i = 0; i < listIntrinsic.size(); i++) {
        sbaIndexToViewID.put(i, i + "");
        sim.setCamera(listIntrinsic.get(i));
        sim.setWorldToCamera(scene.motions.get(i).motion);
        images.add(sim.render().clone());
        if (visualize)
            ShowImages.showWindow(images.get(images.size() - 1), "Frame " + i);
    }
    lookup.images = images;
    assertTrue(alg.process(scene, 0, DogArray_I32.array(1, 2), sbaIndexToViewID::get));
    GrayF32 found = alg.getFusedDisparity();
    assertEquals(listIntrinsic.get(0).width, found.width);
    assertEquals(listIntrinsic.get(0).height, found.height);
    if (visualize) {
        ShowImages.showWindow(VisualizeImageData.disparity(found, null, 100, 0x00FF00), "Disparity");
        BoofMiscOps.sleep(60_000);
    }
    DisparityParameters param = alg.getFusedParam();
    // Check the results. Since the target fills the view and is a known constant Z away we can that here.
    // however since a real disparity algorithm is being used its inputs will not be perfect
    int totalFilled = 0;
    int totalCorrect = 0;
    for (int y = 0; y < found.height; y++) {
        for (int x = 0; x < found.width; x++) {
            float d = found.get(x, y);
            assertTrue(d >= 0);
            if (d >= param.disparityRange)
                continue;
            double Z = param.baseline * param.pinhole.fx / (d + param.disparityMin);
            if (Math.abs(Z - 2.0) <= 0.1)
                totalCorrect++;
            totalFilled++;
        }
    }
    int N = found.width * found.height;
    assertTrue(N * tolFilled <= totalFilled);
    assertTrue(totalFilled * tolCorrect <= totalCorrect);
}
Also used : CameraPinholeBrown(boofcv.struct.calib.CameraPinholeBrown) ConfigDisparityBMBest5(boofcv.factory.disparity.ConfigDisparityBMBest5) ArrayList(java.util.ArrayList) SceneStructureMetric(boofcv.abst.geo.bundle.SceneStructureMetric) SimulatePlanarWorld(boofcv.simulation.SimulatePlanarWorld) GrayF32(boofcv.struct.image.GrayF32) TIntObjectHashMap(gnu.trove.map.hash.TIntObjectHashMap)

Aggregations

ConfigDisparityBMBest5 (boofcv.factory.disparity.ConfigDisparityBMBest5)6 CameraPinholeBrown (boofcv.struct.calib.CameraPinholeBrown)4 GrayF32 (boofcv.struct.image.GrayF32)4 RectifiedPairPanel (boofcv.gui.stereo.RectifiedPairPanel)3 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)3 GrayU8 (boofcv.struct.image.GrayU8)3 TIntObjectHashMap (gnu.trove.map.hash.TIntObjectHashMap)3 BufferedImage (java.awt.image.BufferedImage)3 ArrayList (java.util.ArrayList)3 SceneStructureMetric (boofcv.abst.geo.bundle.SceneStructureMetric)2 DMatrixRMaj (org.ejml.data.DMatrixRMaj)2 ConfigSpeckleFilter (boofcv.abst.disparity.ConfigSpeckleFilter)1 LensDistortionBrown (boofcv.alg.distort.brown.LensDistortionBrown)1 DisparityParameters (boofcv.alg.mvs.DisparityParameters)1 MultiBaselineStereoIndependent (boofcv.alg.mvs.MultiBaselineStereoIndependent)1 SceneWorkingGraph (boofcv.alg.structure.SceneWorkingGraph)1 ConfigDisparitySGM (boofcv.factory.disparity.ConfigDisparitySGM)1 ListDisplayPanel (boofcv.gui.ListDisplayPanel)1 ControlPanelDisparityDense (boofcv.gui.controls.ControlPanelDisparityDense)1 LookUpImageFilesByIndex (boofcv.io.image.LookUpImageFilesByIndex)1