Search in sources :

Example 36 with TIntObjectHashMap

use of gnu.trove.map.hash.TIntObjectHashMap in project OsmAnd-tools by osmandapp.

the class BinaryInspector method printTransportDetailInfo.

private void printTransportDetailInfo(VerboseInfo verbose, BinaryMapIndexReader index, TransportIndex p) throws IOException {
    SearchRequest<TransportStop> sr = BinaryMapIndexReader.buildSearchTransportRequest(MapUtils.get31TileNumberX(verbose.lonleft), MapUtils.get31TileNumberX(verbose.lonright), MapUtils.get31TileNumberY(verbose.lattop), MapUtils.get31TileNumberY(verbose.latbottom), -1, null);
    List<TransportStop> stops = index.searchTransportIndex(sr);
    Map<Long, TransportRoute> rs = new LinkedHashMap<>();
    List<String> lrs = new ArrayList<>();
    println("\nStops:");
    for (TransportStop s : stops) {
        lrs.clear();
        for (int pnt : s.getReferencesToRoutes()) {
            TransportRoute route;
            if (!rs.containsKey((long) pnt)) {
                TIntObjectHashMap<TransportRoute> pts = index.getTransportRoutes(new int[] { pnt });
                route = pts.valueCollection().iterator().next();
                rs.put((long) pnt, route);
            } else {
                route = rs.get((long) pnt);
            }
            if (route != null) {
                // lrs.add(route.getRef() + " " + route.getName(verbose.lang));
                lrs.add(route.getRef() + " " + route.getType());
            }
        }
        if (s.getDeletedRoutesIds() != null) {
            for (long l : s.getDeletedRoutesIds()) {
                lrs.add(" -" + (l / 2));
            }
        }
        String exitsString = s.getExitsString();
        println("  " + s.getName(verbose.lang) + ": " + lrs + " " + s.getLocation() + exitsString + " " + s.getId());
    }
    println("\nRoutes:");
    for (TransportRoute st : rs.values()) {
        List<String> stopsString = new ArrayList<>();
        for (TransportStop stop : st.getForwardStops()) {
            stopsString.add(stop.getName(verbose.lang));
        }
        Map<String, String> tags = st.getTags();
        StringBuilder tagString = new StringBuilder();
        if (tags != null) {
            for (Map.Entry<String, String> tag : tags.entrySet()) {
                tagString.append(tag.getKey()).append(":").append(tag.getValue()).append(" ");
            }
        }
        println("  " + st.getRef() + " " + st.getType() + " " + st.getName(verbose.lang) + ": " + stopsString + " " + tagString);
        if (verbose.vtransportschedule) {
            TransportSchedule sc = st.getSchedule();
            if (sc != null) {
                StringBuilder bld = new StringBuilder();
                int[] tripIntervalsList = sc.getTripIntervals();
                int prevTime = 0;
                for (int i : tripIntervalsList) {
                    i = i + prevTime;
                    String tm = TransportRoutePlanner.formatTransportTime(i);
                    bld.append(tm);
                    prevTime = i;
                }
                println("   " + bld.toString());
                bld = new StringBuilder();
                int atm = tripIntervalsList[0];
                int[] avgStopIntervals = sc.getAvgStopIntervals();
                int[] avgWaitIntervals = sc.getAvgWaitIntervals();
                for (int k = 0; k < st.getForwardStops().size(); k++) {
                    TransportStop stp = st.getForwardStops().get(k);
                    if (k == 0) {
                        bld.append(String.format("%6.6s %s, ", stp.getName(), TransportRoutePlanner.formatTransportTime(atm)));
                    } else {
                        atm += avgStopIntervals[k - 1];
                        if (avgWaitIntervals.length > k && avgWaitIntervals[k] > 0) {
                            bld.append(String.format("%6.6s %s - %s, ", stp.getName(), TransportRoutePlanner.formatTransportTime(atm), TransportRoutePlanner.formatTransportTime(avgWaitIntervals[k] + atm)));
                        } else {
                            bld.append(String.format("%6.6s %s, ", stp.getName(), TransportRoutePlanner.formatTransportTime(atm)));
                        }
                    }
                }
                // %3.3s
                println("   " + bld.toString());
            }
        }
    }
}
Also used : TIntArrayList(gnu.trove.list.array.TIntArrayList) ArrayList(java.util.ArrayList) TLongArrayList(gnu.trove.list.array.TLongArrayList) LinkedHashMap(java.util.LinkedHashMap) TransportRoute(net.osmand.data.TransportRoute) TransportSchedule(net.osmand.data.TransportSchedule) TransportStop(net.osmand.data.TransportStop) TIntObjectHashMap(gnu.trove.map.hash.TIntObjectHashMap) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Example 37 with TIntObjectHashMap

use of gnu.trove.map.hash.TIntObjectHashMap in project BoofCV by lessthanoptimal.

the class TestGenerateStereoPairGraphFromScene method allTogether.

/**
 * Simple test where viewA and viewB only share very far away points and should have a low score. A and C
 * share some much closer points and should have a good score.
 */
@Test
void allTogether() {
    var viewToId = new TIntObjectHashMap<String>();
    var scene = new SceneStructureMetric(false);
    scene.initialize(3, 3, 10);
    for (int i = 0; i < 3; i++) {
        viewToId.put(i, "" + (i + 1));
        scene.setView(i, i, true, new Se3_F64());
        scene.setCamera(i, true, new CameraPinhole(200, 200, 0, 150, 150, 300, 300));
        scene.motions.get(i).motion.T.x = i;
    }
    // Create the points far away
    for (int i = 0; i < 5; i++) {
        scene.setPoint(i, 0, 0, 1e4);
        scene.points.get(i).views.setTo(0, 1);
    }
    // Add the up close points
    for (int i = 5; i < 10; i++) {
        scene.setPoint(i, 0, 0, 4);
        scene.points.get(i).views.setTo(0, 2);
    }
    var alg = new GenerateStereoPairGraphFromScene();
    alg.process(viewToId, scene);
    StereoPairGraph found = alg.getStereoGraph();
    assertEquals(3, found.vertexes.size());
    StereoPairGraph.Vertex vA = Objects.requireNonNull(found.vertexes.get("1"));
    StereoPairGraph.Vertex vB = Objects.requireNonNull(found.vertexes.get("2"));
    StereoPairGraph.Vertex vC = Objects.requireNonNull(found.vertexes.get("3"));
    assertEquals(0, vA.indexSba);
    assertEquals(1, vB.indexSba);
    assertEquals(2, vC.indexSba);
    assertEquals(2, vA.pairs.size());
    assertEquals(1, vB.pairs.size());
    assertEquals(1, vC.pairs.size());
    double quality_a_b = vA.pairs.get(0).quality3D;
    double quality_a_c = vA.pairs.get(1).quality3D;
    assertTrue(quality_a_c > quality_a_b * 2);
    assertTrue(quality_a_b > 0.0 && quality_a_b <= 1.0);
    assertTrue(quality_a_c > 0.0 && quality_a_c <= 1.0);
}
Also used : SceneStructureMetric(boofcv.abst.geo.bundle.SceneStructureMetric) TIntObjectHashMap(gnu.trove.map.hash.TIntObjectHashMap) StereoPairGraph(boofcv.alg.mvs.StereoPairGraph) CameraPinhole(boofcv.struct.calib.CameraPinhole) Se3_F64(georegression.struct.se.Se3_F64) Test(org.junit.jupiter.api.Test)

Example 38 with TIntObjectHashMap

use of gnu.trove.map.hash.TIntObjectHashMap in project BoofCV by lessthanoptimal.

the class TestMultiBaselineStereoIndependent method simulate_constant_disparity.

/**
 * The plane being viewed and the camera's image plane are parallel causing the disparity to have a constant value
 * making it easy check for correctness.
 *
 * @param tolFilled What fraction of fused image should be filled
 * @param tolCorrect Out of the filled pixels what fraction need to have the correct disparity
 */
void simulate_constant_disparity(Se3_F64 world_to_view1, Se3_F64 world_to_view2, double tolFilled, double tolCorrect) {
    // Each camera is different.
    List<CameraPinholeBrown> listIntrinsic = new ArrayList<>();
    listIntrinsic.add(new CameraPinholeBrown().fsetK(150, 140, 0, 105, 100, 210, 200).fsetRadial(0.02, -0.003));
    listIntrinsic.add(new CameraPinholeBrown().fsetK(151, 142, 0, 107.5, 102.5, 215, 205).fsetRadial(0.03, -0.001));
    listIntrinsic.add(new CameraPinholeBrown().fsetK(149, 141, 0, 102.5, 107.5, 205, 215).fsetRadial(0.001, 0.003));
    // Create the scene. This will be used as input into MultiViewToFusedDisparity and in the simulator
    var scene = new SceneStructureMetric(true);
    scene.initialize(3, 3, 0);
    scene.setCamera(0, true, listIntrinsic.get(0));
    scene.setCamera(1, true, listIntrinsic.get(1));
    scene.setCamera(2, true, listIntrinsic.get(2));
    // All views are looking head on at the target. The 2nd and third view have been offset to ensure full coverage and that
    // it's incorporating all the views, otherwise there would be large gaps
    scene.setView(0, 0, true, eulerXyz(0, 0, 0, 0.0, 0, 0, null));
    scene.setView(1, 1, true, world_to_view1);
    scene.setView(2, 2, true, world_to_view2);
    var lookup = new MockLookUp();
    var alg = new MultiBaselineStereoIndependent<>(lookup, ImageType.SB_F32);
    // Not mocking disparity because of how complex that would be to pull off. This makes it a bit of an inexact
    // science to ensure fill in
    var configDisp = new ConfigDisparityBMBest5();
    configDisp.errorType = DisparityError.SAD;
    configDisp.texture = 0.05;
    configDisp.disparityMin = 20;
    configDisp.disparityRange = 80;
    alg.stereoDisparity = FactoryStereoDisparity.blockMatchBest5(configDisp, GrayF32.class, GrayF32.class);
    // Textured target that stereo will work well on
    var texture = new GrayF32(100, 100);
    ImageMiscOps.fillUniform(texture, rand, 50, 255);
    SimulatePlanarWorld sim = new SimulatePlanarWorld();
    sim.addSurface(eulerXyz(0, 0, 2, 0, Math.PI, 0, null), 3, texture);
    List<GrayF32> images = new ArrayList<>();
    TIntObjectMap<String> sbaIndexToViewID = new TIntObjectHashMap<>();
    for (int i = 0; i < listIntrinsic.size(); i++) {
        sbaIndexToViewID.put(i, i + "");
        sim.setCamera(listIntrinsic.get(i));
        sim.setWorldToCamera(scene.motions.get(i).motion);
        images.add(sim.render().clone());
        if (visualize)
            ShowImages.showWindow(images.get(images.size() - 1), "Frame " + i);
    }
    lookup.images = images;
    assertTrue(alg.process(scene, 0, DogArray_I32.array(1, 2), sbaIndexToViewID::get));
    GrayF32 found = alg.getFusedDisparity();
    assertEquals(listIntrinsic.get(0).width, found.width);
    assertEquals(listIntrinsic.get(0).height, found.height);
    if (visualize) {
        ShowImages.showWindow(VisualizeImageData.disparity(found, null, 100, 0x00FF00), "Disparity");
        BoofMiscOps.sleep(60_000);
    }
    DisparityParameters param = alg.getFusedParam();
    // Check the results. Since the target fills the view and is a known constant Z away we can that here.
    // however since a real disparity algorithm is being used its inputs will not be perfect
    int totalFilled = 0;
    int totalCorrect = 0;
    for (int y = 0; y < found.height; y++) {
        for (int x = 0; x < found.width; x++) {
            float d = found.get(x, y);
            assertTrue(d >= 0);
            if (d >= param.disparityRange)
                continue;
            double Z = param.baseline * param.pinhole.fx / (d + param.disparityMin);
            if (Math.abs(Z - 2.0) <= 0.1)
                totalCorrect++;
            totalFilled++;
        }
    }
    int N = found.width * found.height;
    assertTrue(N * tolFilled <= totalFilled);
    assertTrue(totalFilled * tolCorrect <= totalCorrect);
}
Also used : CameraPinholeBrown(boofcv.struct.calib.CameraPinholeBrown) ConfigDisparityBMBest5(boofcv.factory.disparity.ConfigDisparityBMBest5) ArrayList(java.util.ArrayList) SceneStructureMetric(boofcv.abst.geo.bundle.SceneStructureMetric) SimulatePlanarWorld(boofcv.simulation.SimulatePlanarWorld) GrayF32(boofcv.struct.image.GrayF32) TIntObjectHashMap(gnu.trove.map.hash.TIntObjectHashMap)

Example 39 with TIntObjectHashMap

use of gnu.trove.map.hash.TIntObjectHashMap in project BoofCV by lessthanoptimal.

the class TestMultiBaselineStereoIndependent method handleOneCameraManyViews.

/**
 * In this scene there is only one camera for several views
 */
@Test
void handleOneCameraManyViews() {
    var scene = new SceneStructureMetric(true);
    scene.initialize(1, 3, 0);
    scene.setCamera(0, true, new CameraPinholeBrown().fsetK(30, 30, 0, 25, 25, 50, 50));
    for (int i = 0; i < 3; i++) {
        scene.setView(i, 0, true, eulerXyz(i, 0, 0, 0, 0, 0, null));
    }
    var alg = new MultiBaselineStereoIndependent<>(ImageType.SB_F32);
    var configDisp = new ConfigDisparityBMBest5();
    configDisp.errorType = DisparityError.SAD;
    configDisp.disparityRange = 5;
    alg.stereoDisparity = FactoryStereoDisparity.blockMatchBest5(configDisp, GrayF32.class, GrayF32.class);
    List<GrayF32> images = new ArrayList<>();
    TIntObjectMap<String> sbaIndexToViewID = new TIntObjectHashMap<>();
    for (int i = 0; i < 3; i++) {
        images.add(new GrayF32(50, 50));
        sbaIndexToViewID.put(i, i + "");
    }
    alg.lookUpImages = new MockLookUp(images);
    // Override so that it will always be happy
    alg.performFusion = new MultiBaselineDisparityMedian() {

        @Override
        public boolean process(GrayF32 disparity) {
            return true;
        }
    };
    // just see if it blows up
    assertTrue(alg.process(scene, 0, DogArray_I32.array(1, 2), sbaIndexToViewID::get));
}
Also used : CameraPinholeBrown(boofcv.struct.calib.CameraPinholeBrown) ConfigDisparityBMBest5(boofcv.factory.disparity.ConfigDisparityBMBest5) ArrayList(java.util.ArrayList) SceneStructureMetric(boofcv.abst.geo.bundle.SceneStructureMetric) GrayF32(boofcv.struct.image.GrayF32) TIntObjectHashMap(gnu.trove.map.hash.TIntObjectHashMap) Test(org.junit.jupiter.api.Test)

Example 40 with TIntObjectHashMap

use of gnu.trove.map.hash.TIntObjectHashMap in project BoofCV by lessthanoptimal.

the class ExampleMultiBaselineStereo method main.

public static void main(String[] args) {
    // Compute a sparse reconstruction. This will give us intrinsic and extrinsic for all views
    var example = new ExampleMultiViewSparseReconstruction();
    // Specifies the "center" frame to use
    int centerViewIdx = 15;
    example.compute("tree_snow_01.mp4", true);
    // example.compute("ditch_02.mp4", true);
    // example.compute("holiday_display_01.mp4"", true);
    // example.compute("log_building_02.mp4"", true);
    // example.compute("drone_park_01.mp4", false);
    // example.compute("stone_sign.mp4", true);
    // We need a way to load images based on their ID. In this particular case the ID encodes the array index.
    var imageLookup = new LookUpImageFilesByIndex(example.imageFiles);
    // Next we tell it which view to use as the "center", which acts as the common view for all disparity images.
    // The process of selecting the best views to use as centers is a problem all it's own. To keep things
    // we just pick a frame.
    SceneWorkingGraph.View center = example.working.getAllViews().get(centerViewIdx);
    // The final scene refined by bundle adjustment is created by the Working graph. However the 3D relationship
    // between views is contained in the pairwise graph. A View in the working graph has a reference to the view
    // in the pairwise graph. Using that we will find all connected views that have a 3D relationship
    var pairedViewIdxs = new DogArray_I32();
    var sbaIndexToImageID = new TIntObjectHashMap<String>();
    // This relationship between pairwise and working graphs might seem (and is) a bit convoluted. The Pairwise
    // graph is the initial crude sketch of what might be connected. The working graph is an intermediate
    // data structure for computing the metric scene. SBA is a refinement of the working graph.
    // Iterate through all connected views in the pairwise graph and mark their indexes in the working graph
    center.pview.connections.forEach((m) -> {
        // if there isn't a 3D relationship just skip it
        if (!m.is3D)
            return;
        String connectedID = m.other(center.pview).id;
        SceneWorkingGraph.View connected = example.working.views.get(connectedID);
        // Make sure the pairwise view exists in the working graph too
        if (connected == null)
            return;
        // Add this view to the index to name/ID lookup table
        sbaIndexToImageID.put(connected.index, connectedID);
        // Note that this view is one which acts as the second image in the stereo pair
        pairedViewIdxs.add(connected.index);
    });
    // Add the center camera image to the ID look up table
    sbaIndexToImageID.put(centerViewIdx, center.pview.id);
    // Configure there stereo disparity algorithm which is used
    var configDisparity = new ConfigDisparityBMBest5();
    configDisparity.validateRtoL = 1;
    configDisparity.texture = 0.5;
    configDisparity.regionRadiusX = configDisparity.regionRadiusY = 4;
    configDisparity.disparityRange = 120;
    // This is the actual MBS algorithm mentioned previously. It selects the best disparity for each pixel
    // in the original image using a median filter.
    var multiBaseline = new MultiBaselineStereoIndependent<>(imageLookup, ImageType.SB_U8);
    multiBaseline.setStereoDisparity(FactoryStereoDisparity.blockMatchBest5(configDisparity, GrayU8.class, GrayF32.class));
    // Print out verbose debugging and profile information
    multiBaseline.setVerbose(System.out, null);
    multiBaseline.setVerboseProfiling(System.out);
    // Improve stereo by removing small regions, which tends to be noise. Consider adjusting the region size.
    multiBaseline.setDisparitySmoother(FactoryStereoDisparity.removeSpeckle(null, GrayF32.class));
    // Print out debugging information from the smoother
    // Objects.requireNonNull(multiBaseline.getDisparitySmoother()).setVerbose(System.out,null);
    // Creates a list where you can switch between different images/visualizations
    var listDisplay = new ListDisplayPanel();
    listDisplay.setPreferredSize(new Dimension(1000, 300));
    ShowImages.showWindow(listDisplay, "Intermediate Results", true);
    // We will display intermediate results as they come in
    multiBaseline.setListener((leftView, rightView, rectLeft, rectRight, disparity, mask, parameters, rect) -> {
        // Visualize the rectified stereo pair. You can interact with this window and verify
        // that the y-axis is  aligned
        var rectified = new RectifiedPairPanel(true);
        rectified.setImages(ConvertBufferedImage.convertTo(rectLeft, null), ConvertBufferedImage.convertTo(rectRight, null));
        // Cleans up the disparity image by zeroing out pixels that are outside the original image bounds
        RectifyImageOps.applyMask(disparity, mask, 0);
        // Display the colorized disparity
        BufferedImage colorized = VisualizeImageData.disparity(disparity, null, parameters.disparityRange, 0);
        SwingUtilities.invokeLater(() -> {
            listDisplay.addItem(rectified, "Rectified " + leftView + " " + rightView);
            listDisplay.addImage(colorized, leftView + " " + rightView);
        });
    });
    // Process the images and compute a single combined disparity image
    if (!multiBaseline.process(example.scene, center.index, pairedViewIdxs, sbaIndexToImageID::get)) {
        throw new RuntimeException("Failed to fuse stereo views");
    }
    // Extract the point cloud from the fused disparity image
    GrayF32 fusedDisparity = multiBaseline.getFusedDisparity();
    DisparityParameters fusedParam = multiBaseline.getFusedParam();
    BufferedImage colorizedDisp = VisualizeImageData.disparity(fusedDisparity, null, fusedParam.disparityRange, 0);
    ShowImages.showWindow(colorizedDisp, "Fused Disparity");
    // Now compute the point cloud it represents and the color of each pixel.
    // For the fused image, instead of being in rectified image coordinates it's in the original image coordinates
    // this makes extracting color much easier.
    var cloud = new DogArray<>(Point3D_F64::new);
    var cloudRgb = new DogArray_I32(cloud.size);
    // Load the center image in color
    var colorImage = new InterleavedU8(1, 1, 3);
    imageLookup.loadImage(center.pview.id, colorImage);
    // Since the fused image is in the original (i.e. distorted) pixel coordinates and is not rectified,
    // that needs to be taken in account by undistorting the image to create the point cloud.
    CameraPinholeBrown intrinsic = BundleAdjustmentOps.convert(example.scene.cameras.get(center.cameraIdx).model, colorImage.width, colorImage.height, null);
    Point2Transform2_F64 pixel_to_norm = new LensDistortionBrown(intrinsic).distort_F64(true, false);
    MultiViewStereoOps.disparityToCloud(fusedDisparity, fusedParam, new PointToPixelTransform_F64(pixel_to_norm), (pixX, pixY, x, y, z) -> {
        cloud.grow().setTo(x, y, z);
        cloudRgb.add(colorImage.get24(pixX, pixY));
    });
    // Configure the point cloud viewer
    PointCloudViewer pcv = VisualizeData.createPointCloudViewer();
    pcv.setCameraHFov(UtilAngle.radian(70));
    pcv.setTranslationStep(0.15);
    pcv.addCloud(cloud.toList(), cloudRgb.data);
    // pcv.setColorizer(new SingleAxisRgb.Z().fperiod(30.0));
    JComponent viewer = pcv.getComponent();
    viewer.setPreferredSize(new Dimension(600, 600));
    ShowImages.showWindow(viewer, "Point Cloud", true);
    System.out.println("Done");
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) InterleavedU8(boofcv.struct.image.InterleavedU8) ListDisplayPanel(boofcv.gui.ListDisplayPanel) CameraPinholeBrown(boofcv.struct.calib.CameraPinholeBrown) ConfigDisparityBMBest5(boofcv.factory.disparity.ConfigDisparityBMBest5) LensDistortionBrown(boofcv.alg.distort.brown.LensDistortionBrown) RectifiedPairPanel(boofcv.gui.stereo.RectifiedPairPanel) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) PointCloudViewer(boofcv.visualize.PointCloudViewer) PointToPixelTransform_F64(boofcv.struct.distort.PointToPixelTransform_F64) LookUpImageFilesByIndex(boofcv.io.image.LookUpImageFilesByIndex) GrayU8(boofcv.struct.image.GrayU8) SceneWorkingGraph(boofcv.alg.structure.SceneWorkingGraph) MultiBaselineStereoIndependent(boofcv.alg.mvs.MultiBaselineStereoIndependent) Point2Transform2_F64(boofcv.struct.distort.Point2Transform2_F64) DogArray_I32(org.ddogleg.struct.DogArray_I32) DogArray(org.ddogleg.struct.DogArray) GrayF32(boofcv.struct.image.GrayF32) TIntObjectHashMap(gnu.trove.map.hash.TIntObjectHashMap) DisparityParameters(boofcv.alg.mvs.DisparityParameters)

Aggregations

TIntObjectHashMap (gnu.trove.map.hash.TIntObjectHashMap)43 ArrayList (java.util.ArrayList)15 LinkedList (java.util.LinkedList)13 PeakResultPoint (uk.ac.sussex.gdsc.smlm.results.PeakResultPoint)12 List (java.util.List)11 Coordinate (uk.ac.sussex.gdsc.core.match.Coordinate)10 Ticker (uk.ac.sussex.gdsc.core.logging.Ticker)9 MemoryPeakResults (uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults)9 TIntHashSet (gnu.trove.set.hash.TIntHashSet)8 IJ (ij.IJ)8 Prefs (ij.Prefs)8 PlugIn (ij.plugin.PlugIn)8 TextWindow (ij.text.TextWindow)8 Arrays (java.util.Arrays)8 ArrayBlockingQueue (java.util.concurrent.ArrayBlockingQueue)8 AtomicReference (java.util.concurrent.atomic.AtomicReference)8 ImageJUtils (uk.ac.sussex.gdsc.core.ij.ImageJUtils)8 ExtendedGenericDialog (uk.ac.sussex.gdsc.core.ij.gui.ExtendedGenericDialog)8 MathUtils (uk.ac.sussex.gdsc.core.utils.MathUtils)8 TIntProcedure (gnu.trove.procedure.TIntProcedure)7