Search in sources :

Example 46 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class TestOpenCVTools method testPercentiles.

@Test
public void testPercentiles() {
    int[] minValues = { -2, 0, 1 };
    int[] maxValues = { 1, 10, 101 };
    opencv_core.setRNGSeed(100);
    for (int min : minValues) {
        for (int max : maxValues) {
            var values = IntStream.range(min, max + 1).asDoubleStream().toArray();
            var stats = new DescriptiveStatistics(values);
            var mat = new Mat(values);
            opencv_core.randShuffle(mat);
            assertEquals(stats.getPercentile(50), OpenCVTools.median(mat));
            assertEquals((min + max) / 2.0, OpenCVTools.median(mat));
            assertEquals(max, OpenCVTools.maximum(mat));
            assertEquals(min, OpenCVTools.minimum(mat));
            assertArrayEquals(new double[] { min, stats.getPercentile(50), max }, OpenCVTools.percentiles(mat, 1e-9, 50, 100));
            double[] newValues = new double[values.length + 30];
            Arrays.fill(newValues, Double.NaN);
            System.arraycopy(values, 0, newValues, 0, values.length);
            mat.close();
            mat = new Mat(newValues);
            opencv_core.randShuffle(mat);
            assertEquals(stats.getPercentile(50), OpenCVTools.median(mat));
            assertEquals((min + max) / 2.0, OpenCVTools.median(mat));
            assertEquals(max, OpenCVTools.maximum(mat));
            assertEquals(min, OpenCVTools.minimum(mat));
            assertArrayEquals(new double[] { min, stats.getPercentile(50), max }, OpenCVTools.percentiles(mat, 1e-9, 50, 100));
            mat.close();
        }
    }
}
Also used : DescriptiveStatistics(org.apache.commons.math3.stat.descriptive.DescriptiveStatistics) Mat(org.bytedeco.opencv.opencv_core.Mat) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 47 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class PixelClassifierTraining method updateTrainingData.

private synchronized ClassifierTrainingData updateTrainingData(Map<PathClass, Integer> labelMap, Collection<ImageData<BufferedImage>> imageDataCollection) throws IOException {
    if (imageDataCollection.isEmpty()) {
        resetTrainingData();
        return null;
    }
    Map<PathClass, Integer> labels = new LinkedHashMap<>();
    boolean hasLockedAnnotations = false;
    if (labelMap == null) {
        Set<PathClass> pathClasses = new TreeSet<>((p1, p2) -> p1.toString().compareTo(p2.toString()));
        for (var imageData : imageDataCollection) {
            // Get labels for all annotations
            Collection<PathObject> annotations = imageData.getHierarchy().getAnnotationObjects();
            for (var annotation : annotations) {
                if (isTrainableAnnotation(annotation, true)) {
                    var pathClass = annotation.getPathClass();
                    pathClasses.add(pathClass);
                    // We only use boundary classes for areas
                    if (annotation.getROI().isArea()) {
                        var boundaryClass = boundaryStrategy.getBoundaryClass(pathClass);
                        if (boundaryClass != null)
                            pathClasses.add(boundaryClass);
                    }
                } else if (isTrainableAnnotation(annotation, false))
                    hasLockedAnnotations = true;
            }
        }
        int lab = 0;
        for (PathClass pathClass : pathClasses) {
            Integer temp = Integer.valueOf(lab);
            labels.put(pathClass, temp);
            lab++;
        }
    } else {
        labels.putAll(labelMap);
    }
    List<Mat> allFeatures = new ArrayList<>();
    List<Mat> allTargets = new ArrayList<>();
    for (var imageData : imageDataCollection) {
        // Get features & targets for all the tiles that we need
        var featureServer = getFeatureServer(imageData);
        if (featureServer != null) {
            var tiles = featureServer.getTileRequestManager().getAllTileRequests();
            for (var tile : tiles) {
                var tileFeatures = getTileFeatures(tile.getRegionRequest(), featureServer, boundaryStrategy, labels);
                if (tileFeatures != null) {
                    allFeatures.add(tileFeatures.getFeatures());
                    allTargets.add(tileFeatures.getTargets());
                }
            }
        } else {
            logger.warn("Unable to generate features for {}", imageData);
        }
    }
    // We need at least two classes for anything very meaningful to happen
    int nTargets = labels.size();
    if (nTargets <= 1) {
        logger.warn("Unlocked annotations for at least two classes are required to train a classifier!");
        if (hasLockedAnnotations)
            logger.warn("Image contains annotations that *could* be used for training, except they are currently locked. Please unlock them if they should be used.");
        resetTrainingData();
        return null;
    }
    if (matTraining == null)
        matTraining = new Mat();
    if (matTargets == null)
        matTargets = new Mat();
    opencv_core.vconcat(new MatVector(allFeatures.toArray(Mat[]::new)), matTraining);
    opencv_core.vconcat(new MatVector(allTargets.toArray(Mat[]::new)), matTargets);
    logger.debug("Training data: {} x {}, Target data: {} x {}", matTraining.rows(), matTraining.cols(), matTargets.rows(), matTargets.cols());
    if (matTraining.rows() == 0) {
        logger.warn("No training data found - if you have training annotations, check the features are compatible with the current image.");
        return null;
    }
    return new ClassifierTrainingData(labels, matTraining, matTargets);
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) PathClass(qupath.lib.objects.classes.PathClass) PathObject(qupath.lib.objects.PathObject) TreeSet(java.util.TreeSet) MatVector(org.bytedeco.opencv.opencv_core.MatVector)

Example 48 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class WandToolCV method createShape.

@Override
protected Geometry createShape(MouseEvent e, double x, double y, boolean useTiles, Geometry addToShape) {
    GeometryFactory factory = getGeometryFactory();
    if (addToShape != null && pLast != null && pLast.distanceSq(x, y) < 2)
        return null;
    long startTime = System.currentTimeMillis();
    QuPathViewer viewer = getViewer();
    if (viewer == null)
        return null;
    double downsample = Math.max(1, Math.round(viewer.getDownsampleFactor() * 4)) / 4.0;
    var regionStore = viewer.getImageRegionStore();
    // Paint the image as it is currently being viewed
    var type = wandType.get();
    boolean doGray = type == WandType.GRAY;
    BufferedImage imgTemp = doGray ? imgGray : imgBGR;
    int nChannels = doGray ? 1 : 3;
    Graphics2D g2d = imgTemp.createGraphics();
    g2d.setColor(Color.BLACK);
    g2d.setClip(0, 0, w, w);
    g2d.fillRect(0, 0, w, w);
    double xStart = Math.round(x - w * downsample * 0.5);
    double yStart = Math.round(y - w * downsample * 0.5);
    bounds.setFrame(xStart, yStart, w * downsample, w * downsample);
    g2d.scale(1.0 / downsample, 1.0 / downsample);
    g2d.translate(-xStart, -yStart);
    regionStore.paintRegion(viewer.getServer(), g2d, bounds, viewer.getZPosition(), viewer.getTPosition(), downsample, null, null, viewer.getImageDisplay());
    // regionStore.paintRegionCompletely(viewer.getServer(), g2d, bounds, viewer.getZPosition(), viewer.getTPosition(), viewer.getDownsampleFactor(), null, viewer.getImageDisplay(), 250);
    // Optionally include the overlay information when using the wand
    float opacity = viewer.getOverlayOptions().getOpacity();
    if (opacity > 0 && getWandUseOverlays()) {
        ImageRegion region = ImageRegion.createInstance((int) bounds.getX() - 1, (int) bounds.getY() - 1, (int) bounds.getWidth() + 2, (int) bounds.getHeight() + 2, viewer.getZPosition(), viewer.getTPosition());
        if (opacity < 1)
            g2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opacity));
        for (PathOverlay overlay : viewer.getOverlayLayers().toArray(PathOverlay[]::new)) {
            if (!(overlay instanceof HierarchyOverlay))
                overlay.paintOverlay(g2d, region, downsample, viewer.getImageData(), true);
        }
    }
    // Ensure we have Mats & the correct channel number
    if (mat != null && (mat.channels() != nChannels || mat.depth() != opencv_core.CV_8U)) {
        mat.close();
        mat = null;
    }
    if (mat == null || mat.isNull() || mat.empty())
        mat = new Mat(w, w, CV_8UC(nChannels));
    // if (matMask == null)
    // matMask = new Mat(w+2, w+2, CV_8U);
    // if (matSelected == null)
    // matSelected = new Mat(w+2, w+2, CV_8U);
    // Put pixels into an OpenCV image
    byte[] buffer = ((DataBufferByte) imgTemp.getRaster().getDataBuffer()).getData();
    ByteBuffer matBuffer = mat.createBuffer();
    matBuffer.put(buffer);
    // mat.put(0, 0, buffer);
    // opencv_imgproc.cvtColor(mat, mat, opencv_imgproc.COLOR_BGR2Lab);
    // blurSigma = 4;
    boolean doSimpleSelection = e.isShortcutDown() && !e.isShiftDown();
    if (doSimpleSelection) {
        matMask.put(Scalar.ZERO);
        // opencv_imgproc.circle(matMask, seed, radius, Scalar.ONE);
        opencv_imgproc.floodFill(mat, matMask, seed, Scalar.ONE, null, Scalar.ZERO, Scalar.ZERO, 4 | (2 << 8) | opencv_imgproc.FLOODFILL_MASK_ONLY | opencv_imgproc.FLOODFILL_FIXED_RANGE);
        subtractPut(matMask, Scalar.ONE);
    } else {
        double blurSigma = Math.max(0.5, getWandSigmaPixels());
        int size = (int) Math.ceil(blurSigma * 2) * 2 + 1;
        blurSize.width(size);
        blurSize.height(size);
        // Smooth a little
        opencv_imgproc.GaussianBlur(mat, mat, blurSize, blurSigma);
        // Choose mat to threshold (may be adjusted)
        Mat matThreshold = mat;
        // Apply color transform if required
        if (type == WandType.LAB_DISTANCE) {
            mat.convertTo(matFloat, opencv_core.CV_32F, 1.0 / 255.0, 0.0);
            opencv_imgproc.cvtColor(matFloat, matFloat, opencv_imgproc.COLOR_BGR2Lab);
            double max = 0;
            double mean = 0;
            try (FloatIndexer idx = matFloat.createIndexer()) {
                int k = w / 2;
                double v1 = idx.get(k, k, 0);
                double v2 = idx.get(k, k, 1);
                double v3 = idx.get(k, k, 2);
                double meanScale = 1.0 / (w * w);
                for (int row = 0; row < w; row++) {
                    for (int col = 0; col < w; col++) {
                        double L = idx.get(row, col, 0) - v1;
                        double A = idx.get(row, col, 1) - v2;
                        double B = idx.get(row, col, 2) - v3;
                        double dist = Math.sqrt(L * L + A * A + B * B);
                        if (dist > max)
                            max = dist;
                        mean += dist * meanScale;
                        idx.put(row, col, 0, (float) dist);
                    }
                }
            }
            if (matThreshold == null)
                matThreshold = new Mat();
            opencv_core.extractChannel(matFloat, matThreshold, 0);
            // There are various ways we might choose a threshold now...
            // Here, we use a multiple of the mean. Since values are 'distances'
            // they are all >= 0
            matThreshold.convertTo(matThreshold, opencv_core.CV_8U, 255.0 / max, 0);
            threshold.put(mean * getWandSensitivity());
            // //			OpenCVTools.matToImagePlus(matThreshold, "Before").show();
            // // Apply local Otsu threshold
            // opencv_imgproc.threshold(matThreshold, matThreshold,
            // 0,
            // 255, opencv_imgproc.THRESH_BINARY + opencv_imgproc.THRESH_OTSU);
            // threshold.put(Scalar.ZERO);
            nChannels = 1;
        } else {
            // Base threshold on local standard deviation
            meanStdDev(matThreshold, mean, stddev);
            DoubleBuffer stddevBuffer = stddev.createBuffer();
            double[] stddev2 = new double[nChannels];
            stddevBuffer.get(stddev2);
            double scale = 1.0 / getWandSensitivity();
            if (scale < 0)
                scale = 0.01;
            for (int i = 0; i < stddev2.length; i++) stddev2[i] = stddev2[i] * scale;
            threshold.put(stddev2);
        }
        // Limit maximum radius by pen
        int radius = (int) Math.round(w / 2 * QuPathPenManager.getPenManager().getPressure());
        if (radius == 0)
            return null;
        matMask.put(Scalar.ZERO);
        opencv_imgproc.circle(matMask, seed, radius, Scalar.ONE);
        opencv_imgproc.floodFill(matThreshold, matMask, seed, Scalar.ONE, null, threshold, threshold, 4 | (2 << 8) | opencv_imgproc.FLOODFILL_MASK_ONLY | opencv_imgproc.FLOODFILL_FIXED_RANGE);
        subtractPut(matMask, Scalar.ONE);
        if (strel == null)
            strel = opencv_imgproc.getStructuringElement(opencv_imgproc.MORPH_ELLIPSE, new Size(5, 5));
        opencv_imgproc.morphologyEx(matMask, matMask, opencv_imgproc.MORPH_CLOSE, strel);
    }
    MatVector contours = new MatVector();
    if (contourHierarchy == null)
        contourHierarchy = new Mat();
    opencv_imgproc.findContours(matMask, contours, contourHierarchy, opencv_imgproc.RETR_EXTERNAL, opencv_imgproc.CHAIN_APPROX_SIMPLE);
    // logger.trace("Contours: " + contours.size());
    List<Coordinate> coords = new ArrayList<>();
    List<Geometry> geometries = new ArrayList<>();
    for (Mat contour : contours.get()) {
        // Discard single pixels / lines
        if (contour.size().height() <= 2)
            continue;
        // Create a polygon geometry
        try (IntIndexer idxrContours = contour.createIndexer()) {
            for (long r = 0; r < idxrContours.size(0); r++) {
                int px = idxrContours.get(r, 0L, 0L);
                int py = idxrContours.get(r, 0L, 1L);
                // * downsample + x;
                double xx = (px - w / 2 - 1);
                // * downsample + y;
                double yy = (py - w / 2 - 1);
                coords.add(new Coordinate(xx, yy));
            }
        }
        if (coords.size() > 1) {
            // Ensure closed
            if (!coords.get(coords.size() - 1).equals(coords.get(0)))
                coords.add(coords.get(0));
            // Exclude single pixels
            var polygon = factory.createPolygon(coords.toArray(Coordinate[]::new));
            if (coords.size() > 5 || polygon.getArea() > 1)
                geometries.add(polygon);
        }
    }
    contours.close();
    if (geometries.isEmpty())
        return null;
    // Handle the fact that OpenCV contours are defined using the 'pixel center' by dilating the boundary
    var geometry = geometries.size() == 1 ? geometries.get(0) : GeometryCombiner.combine(geometries);
    geometry = geometry.buffer(0.5);
    // Transform to map to integer pixel locations in the full-resolution image
    var transform = new AffineTransformation().scale(downsample, downsample).translate(x, y);
    geometry = transform.transform(geometry);
    geometry = GeometryTools.roundCoordinates(geometry);
    geometry = GeometryTools.constrainToBounds(geometry, 0, 0, viewer.getServerWidth(), viewer.getServerHeight());
    if (geometry.getArea() <= 1)
        return null;
    long endTime = System.currentTimeMillis();
    logger.trace(getClass().getSimpleName() + " time: " + (endTime - startTime));
    if (pLast == null)
        pLast = new Point2D.Double(x, y);
    else
        pLast.setLocation(x, y);
    return geometry;
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) GeometryFactory(org.locationtech.jts.geom.GeometryFactory) Size(org.bytedeco.opencv.opencv_core.Size) ArrayList(java.util.ArrayList) ImageRegion(qupath.lib.regions.ImageRegion) FloatIndexer(org.bytedeco.javacpp.indexer.FloatIndexer) DataBufferByte(java.awt.image.DataBufferByte) BufferedImage(java.awt.image.BufferedImage) AffineTransformation(org.locationtech.jts.geom.util.AffineTransformation) MatVector(org.bytedeco.opencv.opencv_core.MatVector) IntIndexer(org.bytedeco.javacpp.indexer.IntIndexer) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) DoubleBuffer(java.nio.DoubleBuffer) ByteBuffer(java.nio.ByteBuffer) Point(org.bytedeco.opencv.opencv_core.Point) Graphics2D(java.awt.Graphics2D) HierarchyOverlay(qupath.lib.gui.viewer.overlays.HierarchyOverlay) Geometry(org.locationtech.jts.geom.Geometry) Coordinate(org.locationtech.jts.geom.Coordinate) PathOverlay(qupath.lib.gui.viewer.overlays.PathOverlay)

Example 49 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class DetectCytokeratinCV method updateArea.

private static void updateArea(final MatVector contours, final Mat hierarchy, final Area area, int row, int depth) {
    IntIndexer indexer = hierarchy.createIndexer();
    while (row >= 0) {
        int[] data = new int[4];
        // TODO: Check indexing after switch to JavaCPP!!!
        indexer.get(0, row, data);
        // hierarchy.get(0, row, data);
        Mat contour = contours.get(row);
        // Don't include isolated pixels - otherwise add or remove, as required
        if (contour.rows() > 2) {
            Path2D path = getContour(contour);
            if (depth % 2 == 0)
                area.add(new Area(path));
            else
                area.subtract(new Area(path));
        }
        // Deal with any sub-contours
        if (data[2] >= 0)
            updateArea(contours, hierarchy, area, data[2], depth + 1);
        // Move to next contour in this hierarchy level
        row = data[0];
    }
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) Area(java.awt.geom.Area) Path2D(java.awt.geom.Path2D) IntIndexer(org.bytedeco.javacpp.indexer.IntIndexer)

Example 50 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class DetectCytokeratinCV method getArea.

/**
 * Get an Area object corresponding to contours in a binary image from OpenCV.
 * @param mat
 * @return
 */
private static Area getArea(final Mat mat) {
    if (mat.empty())
        return null;
    // Identify all contours
    MatVector contours = new MatVector();
    Mat hierarchy = new Mat();
    opencv_imgproc.findContours(mat, contours, hierarchy, opencv_imgproc.RETR_TREE, opencv_imgproc.CHAIN_APPROX_SIMPLE);
    if (contours.empty()) {
        hierarchy.close();
        return null;
    }
    Area area = new Area();
    updateArea(contours, hierarchy, area, 0, 0);
    hierarchy.close();
    return area;
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) Area(java.awt.geom.Area) MatVector(org.bytedeco.opencv.opencv_core.MatVector)

Aggregations

Mat (org.bytedeco.opencv.opencv_core.Mat)101 Point (org.bytedeco.opencv.opencv_core.Point)23 MatVector (org.bytedeco.opencv.opencv_core.MatVector)20 ArrayList (java.util.ArrayList)16 PointerScope (org.bytedeco.javacpp.PointerScope)16 Size (org.bytedeco.opencv.opencv_core.Size)15 BufferedImage (java.awt.image.BufferedImage)13 FloatIndexer (org.bytedeco.javacpp.indexer.FloatIndexer)13 Scalar (org.bytedeco.opencv.opencv_core.Scalar)13 Test (org.junit.jupiter.api.Test)12 IntIndexer (org.bytedeco.javacpp.indexer.IntIndexer)10 DoubleIndexer (org.bytedeco.javacpp.indexer.DoubleIndexer)9 UByteIndexer (org.bytedeco.javacpp.indexer.UByteIndexer)8 org.bytedeco.opencv.global.opencv_core (org.bytedeco.opencv.global.opencv_core)7 IOException (java.io.IOException)6 Arrays (java.util.Arrays)6 Collections (java.util.Collections)6 List (java.util.List)6 Collectors (java.util.stream.Collectors)6 UShortIndexer (org.bytedeco.javacpp.indexer.UShortIndexer)6