Search in sources :

Example 1 with PathOverlay

use of qupath.lib.gui.viewer.overlays.PathOverlay in project qupath by qupath.

the class WandToolCV method createShape.

@Override
protected Geometry createShape(MouseEvent e, double x, double y, boolean useTiles, Geometry addToShape) {
    GeometryFactory factory = getGeometryFactory();
    if (addToShape != null && pLast != null && pLast.distanceSq(x, y) < 2)
        return null;
    long startTime = System.currentTimeMillis();
    QuPathViewer viewer = getViewer();
    if (viewer == null)
        return null;
    double downsample = Math.max(1, Math.round(viewer.getDownsampleFactor() * 4)) / 4.0;
    var regionStore = viewer.getImageRegionStore();
    // Paint the image as it is currently being viewed
    var type = wandType.get();
    boolean doGray = type == WandType.GRAY;
    BufferedImage imgTemp = doGray ? imgGray : imgBGR;
    int nChannels = doGray ? 1 : 3;
    Graphics2D g2d = imgTemp.createGraphics();
    g2d.setColor(Color.BLACK);
    g2d.setClip(0, 0, w, w);
    g2d.fillRect(0, 0, w, w);
    double xStart = Math.round(x - w * downsample * 0.5);
    double yStart = Math.round(y - w * downsample * 0.5);
    bounds.setFrame(xStart, yStart, w * downsample, w * downsample);
    g2d.scale(1.0 / downsample, 1.0 / downsample);
    g2d.translate(-xStart, -yStart);
    regionStore.paintRegion(viewer.getServer(), g2d, bounds, viewer.getZPosition(), viewer.getTPosition(), downsample, null, null, viewer.getImageDisplay());
    // regionStore.paintRegionCompletely(viewer.getServer(), g2d, bounds, viewer.getZPosition(), viewer.getTPosition(), viewer.getDownsampleFactor(), null, viewer.getImageDisplay(), 250);
    // Optionally include the overlay information when using the wand
    float opacity = viewer.getOverlayOptions().getOpacity();
    if (opacity > 0 && getWandUseOverlays()) {
        ImageRegion region = ImageRegion.createInstance((int) bounds.getX() - 1, (int) bounds.getY() - 1, (int) bounds.getWidth() + 2, (int) bounds.getHeight() + 2, viewer.getZPosition(), viewer.getTPosition());
        if (opacity < 1)
            g2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opacity));
        for (PathOverlay overlay : viewer.getOverlayLayers().toArray(PathOverlay[]::new)) {
            if (!(overlay instanceof HierarchyOverlay))
                overlay.paintOverlay(g2d, region, downsample, viewer.getImageData(), true);
        }
    }
    // Ensure we have Mats & the correct channel number
    if (mat != null && (mat.channels() != nChannels || mat.depth() != opencv_core.CV_8U)) {
        mat.close();
        mat = null;
    }
    if (mat == null || mat.isNull() || mat.empty())
        mat = new Mat(w, w, CV_8UC(nChannels));
    // if (matMask == null)
    // matMask = new Mat(w+2, w+2, CV_8U);
    // if (matSelected == null)
    // matSelected = new Mat(w+2, w+2, CV_8U);
    // Put pixels into an OpenCV image
    byte[] buffer = ((DataBufferByte) imgTemp.getRaster().getDataBuffer()).getData();
    ByteBuffer matBuffer = mat.createBuffer();
    matBuffer.put(buffer);
    // mat.put(0, 0, buffer);
    // opencv_imgproc.cvtColor(mat, mat, opencv_imgproc.COLOR_BGR2Lab);
    // blurSigma = 4;
    boolean doSimpleSelection = e.isShortcutDown() && !e.isShiftDown();
    if (doSimpleSelection) {
        matMask.put(Scalar.ZERO);
        // opencv_imgproc.circle(matMask, seed, radius, Scalar.ONE);
        opencv_imgproc.floodFill(mat, matMask, seed, Scalar.ONE, null, Scalar.ZERO, Scalar.ZERO, 4 | (2 << 8) | opencv_imgproc.FLOODFILL_MASK_ONLY | opencv_imgproc.FLOODFILL_FIXED_RANGE);
        subtractPut(matMask, Scalar.ONE);
    } else {
        double blurSigma = Math.max(0.5, getWandSigmaPixels());
        int size = (int) Math.ceil(blurSigma * 2) * 2 + 1;
        blurSize.width(size);
        blurSize.height(size);
        // Smooth a little
        opencv_imgproc.GaussianBlur(mat, mat, blurSize, blurSigma);
        // Choose mat to threshold (may be adjusted)
        Mat matThreshold = mat;
        // Apply color transform if required
        if (type == WandType.LAB_DISTANCE) {
            mat.convertTo(matFloat, opencv_core.CV_32F, 1.0 / 255.0, 0.0);
            opencv_imgproc.cvtColor(matFloat, matFloat, opencv_imgproc.COLOR_BGR2Lab);
            double max = 0;
            double mean = 0;
            try (FloatIndexer idx = matFloat.createIndexer()) {
                int k = w / 2;
                double v1 = idx.get(k, k, 0);
                double v2 = idx.get(k, k, 1);
                double v3 = idx.get(k, k, 2);
                double meanScale = 1.0 / (w * w);
                for (int row = 0; row < w; row++) {
                    for (int col = 0; col < w; col++) {
                        double L = idx.get(row, col, 0) - v1;
                        double A = idx.get(row, col, 1) - v2;
                        double B = idx.get(row, col, 2) - v3;
                        double dist = Math.sqrt(L * L + A * A + B * B);
                        if (dist > max)
                            max = dist;
                        mean += dist * meanScale;
                        idx.put(row, col, 0, (float) dist);
                    }
                }
            }
            if (matThreshold == null)
                matThreshold = new Mat();
            opencv_core.extractChannel(matFloat, matThreshold, 0);
            // There are various ways we might choose a threshold now...
            // Here, we use a multiple of the mean. Since values are 'distances'
            // they are all >= 0
            matThreshold.convertTo(matThreshold, opencv_core.CV_8U, 255.0 / max, 0);
            threshold.put(mean * getWandSensitivity());
            // //			OpenCVTools.matToImagePlus(matThreshold, "Before").show();
            // // Apply local Otsu threshold
            // opencv_imgproc.threshold(matThreshold, matThreshold,
            // 0,
            // 255, opencv_imgproc.THRESH_BINARY + opencv_imgproc.THRESH_OTSU);
            // threshold.put(Scalar.ZERO);
            nChannels = 1;
        } else {
            // Base threshold on local standard deviation
            meanStdDev(matThreshold, mean, stddev);
            DoubleBuffer stddevBuffer = stddev.createBuffer();
            double[] stddev2 = new double[nChannels];
            stddevBuffer.get(stddev2);
            double scale = 1.0 / getWandSensitivity();
            if (scale < 0)
                scale = 0.01;
            for (int i = 0; i < stddev2.length; i++) stddev2[i] = stddev2[i] * scale;
            threshold.put(stddev2);
        }
        // Limit maximum radius by pen
        int radius = (int) Math.round(w / 2 * QuPathPenManager.getPenManager().getPressure());
        if (radius == 0)
            return null;
        matMask.put(Scalar.ZERO);
        opencv_imgproc.circle(matMask, seed, radius, Scalar.ONE);
        opencv_imgproc.floodFill(matThreshold, matMask, seed, Scalar.ONE, null, threshold, threshold, 4 | (2 << 8) | opencv_imgproc.FLOODFILL_MASK_ONLY | opencv_imgproc.FLOODFILL_FIXED_RANGE);
        subtractPut(matMask, Scalar.ONE);
        if (strel == null)
            strel = opencv_imgproc.getStructuringElement(opencv_imgproc.MORPH_ELLIPSE, new Size(5, 5));
        opencv_imgproc.morphologyEx(matMask, matMask, opencv_imgproc.MORPH_CLOSE, strel);
    }
    MatVector contours = new MatVector();
    if (contourHierarchy == null)
        contourHierarchy = new Mat();
    opencv_imgproc.findContours(matMask, contours, contourHierarchy, opencv_imgproc.RETR_EXTERNAL, opencv_imgproc.CHAIN_APPROX_SIMPLE);
    // logger.trace("Contours: " + contours.size());
    List<Coordinate> coords = new ArrayList<>();
    List<Geometry> geometries = new ArrayList<>();
    for (Mat contour : contours.get()) {
        // Discard single pixels / lines
        if (contour.size().height() <= 2)
            continue;
        // Create a polygon geometry
        try (IntIndexer idxrContours = contour.createIndexer()) {
            for (long r = 0; r < idxrContours.size(0); r++) {
                int px = idxrContours.get(r, 0L, 0L);
                int py = idxrContours.get(r, 0L, 1L);
                // * downsample + x;
                double xx = (px - w / 2 - 1);
                // * downsample + y;
                double yy = (py - w / 2 - 1);
                coords.add(new Coordinate(xx, yy));
            }
        }
        if (coords.size() > 1) {
            // Ensure closed
            if (!coords.get(coords.size() - 1).equals(coords.get(0)))
                coords.add(coords.get(0));
            // Exclude single pixels
            var polygon = factory.createPolygon(coords.toArray(Coordinate[]::new));
            if (coords.size() > 5 || polygon.getArea() > 1)
                geometries.add(polygon);
        }
    }
    contours.close();
    if (geometries.isEmpty())
        return null;
    // Handle the fact that OpenCV contours are defined using the 'pixel center' by dilating the boundary
    var geometry = geometries.size() == 1 ? geometries.get(0) : GeometryCombiner.combine(geometries);
    geometry = geometry.buffer(0.5);
    // Transform to map to integer pixel locations in the full-resolution image
    var transform = new AffineTransformation().scale(downsample, downsample).translate(x, y);
    geometry = transform.transform(geometry);
    geometry = GeometryTools.roundCoordinates(geometry);
    geometry = GeometryTools.constrainToBounds(geometry, 0, 0, viewer.getServerWidth(), viewer.getServerHeight());
    if (geometry.getArea() <= 1)
        return null;
    long endTime = System.currentTimeMillis();
    logger.trace(getClass().getSimpleName() + " time: " + (endTime - startTime));
    if (pLast == null)
        pLast = new Point2D.Double(x, y);
    else
        pLast.setLocation(x, y);
    return geometry;
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) GeometryFactory(org.locationtech.jts.geom.GeometryFactory) Size(org.bytedeco.opencv.opencv_core.Size) ArrayList(java.util.ArrayList) ImageRegion(qupath.lib.regions.ImageRegion) FloatIndexer(org.bytedeco.javacpp.indexer.FloatIndexer) DataBufferByte(java.awt.image.DataBufferByte) BufferedImage(java.awt.image.BufferedImage) AffineTransformation(org.locationtech.jts.geom.util.AffineTransformation) MatVector(org.bytedeco.opencv.opencv_core.MatVector) IntIndexer(org.bytedeco.javacpp.indexer.IntIndexer) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) DoubleBuffer(java.nio.DoubleBuffer) ByteBuffer(java.nio.ByteBuffer) Point(org.bytedeco.opencv.opencv_core.Point) Graphics2D(java.awt.Graphics2D) HierarchyOverlay(qupath.lib.gui.viewer.overlays.HierarchyOverlay) Geometry(org.locationtech.jts.geom.Geometry) Coordinate(org.locationtech.jts.geom.Coordinate) PathOverlay(qupath.lib.gui.viewer.overlays.PathOverlay)

Example 2 with PathOverlay

use of qupath.lib.gui.viewer.overlays.PathOverlay in project qupath by qupath.

the class QuPathViewer method paintViewer.

protected void paintViewer(Graphics g, int w, int h) {
    ImageServer<BufferedImage> server = getServer();
    if (server == null) {
        g.setColor(background);
        g.fillRect(0, 0, w, h);
        updateRepaintTimestamp();
        return;
    }
    // // Get dimensions
    // int w = getWidth();
    // int h = getHeight();
    Rectangle clip = g.getClipBounds();
    boolean clipFull;
    if (clip == null) {
        clip = new Rectangle(0, 0, w, h);
        g.setClip(0, 0, w, h);
        clipFull = true;
    } else
        clipFull = clip.x == 0 && clip.y == 0 && clip.width == w && clip.height == h;
    // Ensure we have a sufficiently-large buffer
    if (imgBuffer == null || imgBuffer.getWidth() != w || imgBuffer.getHeight() != h) {
        // Create buffered images & buffers for RGB pixel values
        imgBuffer = createBufferedImage(w, h);
        imgBuffer.setAccelerationPriority(1f);
        logger.trace("New buffered image created: {}", imgBuffer);
        // imgVolatile = createVolatileImage(w, h);
        imageUpdated = true;
        // If the size changed, ensure the AffineTransform is up-to-date
        updateAffineTransform();
    }
    // Get the displayed region
    Shape shapeRegion = getDisplayedRegionShape();
    // The visible shape must have changed if there wasn't one previously...
    // Otherwise check if it has changed & update accordingly
    // This will be used to notify listeners soon
    boolean shapeChanged = lastVisibleShape == null || !lastVisibleShape.equals(shapeRegion);
    long t1 = System.currentTimeMillis();
    // Only repaint the image if this is requested, otherwise only overlays need to be repainted
    if (imageUpdated || locationUpdated) {
        // || imgVolatile.contentsLost()) {
        // Set flags that image no longer requiring an update
        // By setting them early, they might still be reset during this run... in which case we don't want to thwart the re-run
        imageUpdated = false;
        locationUpdated = false;
        // updateBufferedImage(imgVolatile, shapeRegion, w, h);
        updateBufferedImage(imgBuffer, shapeRegion, w, h);
    }
    // if (imageUpdated || locationUpdated) {
    // updateBufferedImage(imgVolatile, shapeRegion, w, h);
    // //			updateBufferedImage(imgBuffer, shapeRegion, w, h);
    // //			logger.info("INITIAL Image drawing time: " + (System.currentTimeMillis() - t1));
    // imgVolatile.createGraphics().drawImage(imgBuffer, 0, 0, this);
    // }
    // while (imgVolatile.contentsLost()) {
    // imgVolatile.createGraphics().drawImage(imgBuffer, 0, 0, this);
    // }
    // Store the last shape visible
    lastVisibleShape = shapeRegion;
    // Draw the image from the buffer
    // The call to super.paintComponent is delayed until here to try to stop occasional flickering on Apple's Java 6
    g.setColor(background);
    if (clipFull)
        paintFinalImage(g, imgBuffer, this);
    else
        // g2d.drawImage(imgBuffer, 0, 0, getWidth(), getHeight(), this);
        g.drawImage(imgBuffer, clip.x, clip.y, clip.x + clip.width, clip.y + clip.height, clip.x, clip.y, clip.x + clip.width, clip.y + clip.height, null);
    if (logger.isTraceEnabled()) {
        long t2 = System.currentTimeMillis();
        logger.trace("Final image drawing time: {}", (t2 - t1));
    }
    // Really useful only for debugging graphics
    if (!(g instanceof Graphics2D)) {
        imageUpdated = false;
        // Notify any listeners of shape changes
        if (shapeChanged)
            fireVisibleRegionChangedEvent(lastVisibleShape);
        return;
    }
    double downsample = getDownsampleFactor();
    float opacity = overlayOptions.getOpacity();
    Graphics2D g2d = (Graphics2D) g.create();
    // Apply required transform to the graphics object (rotation, scaling, shifting...)
    g2d.transform(transform);
    Composite previousComposite = g2d.getComposite();
    boolean paintCompletely = thumbnailIsFullImage || !doFasterRepaint;
    // var regionBounds = AwtTools.getImageRegion(clip, getZPosition(), getTPosition());
    if (opacity > 0 || PathPrefs.alwaysPaintSelectedObjectsProperty().get()) {
        if (opacity < 1) {
            AlphaComposite composite = AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opacity);
            g2d.setComposite(composite);
        }
        Color color = getSuggestedOverlayColor();
        // Paint the overlay layers
        var imageData = this.imageDataProperty.get();
        for (PathOverlay overlay : allOverlayLayers.toArray(PathOverlay[]::new)) {
            logger.trace("Painting overlay: {}", overlay);
            if (overlay instanceof AbstractOverlay)
                ((AbstractOverlay) overlay).setPreferredOverlayColor(color);
            // overlay.paintOverlay(g2d, regionBounds, downsample, null, paintCompletely);
            overlay.paintOverlay(g2d, getServerBounds(), downsample, imageData, paintCompletely);
        }
    // if (hierarchyOverlay != null) {
    // hierarchyOverlay.setPreferredOverlayColor(color);
    // hierarchyOverlay.paintOverlay(g2d, getServerBounds(), downsampleFactor, null, paintCompletely);
    // }
    }
    // Paint the selected object
    PathObjectHierarchy hierarchy = getHierarchy();
    PathObject mainSelectedObject = getSelectedObject();
    Rectangle2D boundsRect = null;
    boolean useSelectedColor = PathPrefs.useSelectedColorProperty().get();
    boolean paintSelectedBounds = PathPrefs.paintSelectedBoundsProperty().get();
    for (PathObject selectedObject : hierarchy.getSelectionModel().getSelectedObjects().toArray(new PathObject[0])) {
        // TODO: Simplify this...
        if (selectedObject != null && selectedObject.hasROI() && selectedObject.getROI().getZ() == getZPosition() && selectedObject.getROI().getT() == getTPosition()) {
            if (!selectedObject.isDetection()) {
                // Ensure a selected ROI can be seen clearly
                if (previousComposite != null)
                    g2d.setComposite(previousComposite);
                g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
            }
            Rectangle boundsDisplayed = shapeRegion.getBounds();
            ROI pathROI = selectedObject.getROI();
            // if ((PathPrefs.getPaintSelectedBounds() || (selectedObject.isDetection() && !PathPrefs.getUseSelectedColor())) && !(pathROI instanceof RectangleROI)) {
            if (pathROI != null && (paintSelectedBounds || (!useSelectedColor)) && !(pathROI instanceof RectangleROI) && !pathROI.isEmpty()) {
                Shape boundsShape = null;
                if (pathROI.isPoint()) {
                    var hull = pathROI.getConvexHull();
                    if (hull != null)
                        boundsShape = hull.getShape();
                }
                if (boundsShape == null) {
                    boundsRect = AwtTools.getBounds2D(pathROI, boundsRect);
                    boundsShape = boundsRect;
                }
                // Tried to match to pixel boundaries... but resulted in too much jiggling
                // boundsShape.setFrame(
                // Math.round(boundsShape.getX()/downsampleFactor)*downsampleFactor-downsampleFactor,
                // Math.round(boundsShape.getY()/downsampleFactor)*downsampleFactor-downsampleFactor,
                // Math.round(boundsShape.getWidth()/downsampleFactor)*downsampleFactor+2*downsampleFactor,
                // Math.round(boundsShape.getHeight()/downsampleFactor)*downsampleFactor+2*downsampleFactor);
                // boundsShape.setFrame(boundsShape.getX()-downsampleFactor, boundsShape.getY()-downsampleFactor, boundsShape.getWidth()+2*downsampleFactor, boundsShape.getHeight()+2*downsampleFactor);
                PathHierarchyPaintingHelper.paintShape(boundsShape, g2d, getSuggestedOverlayColor(), PathHierarchyPaintingHelper.getCachedStroke(Math.max(downsample, 1) * 2), null);
            // boundsShape.setFrame(boundsShape.getX()+downsampleFactor, boundsShape.getY()-downsampleFactor, boundsShape.getWidth(), boundsShape.getHeight());
            // PathHierarchyPaintingHelper.paintShape(boundsShape, g2d, new Color(1f, 1f, 1f, 0.75f), PathHierarchyPaintingHelper.getCachedStroke(Math.max(downsampleFactor, 1)*2), null, downsampleFactor);
            }
            // in a cached way
            if ((selectedObject.isDetection() && PathPrefs.useSelectedColorProperty().get()) || !PathObjectTools.hierarchyContainsObject(hierarchy, selectedObject))
                PathHierarchyPaintingHelper.paintObject(selectedObject, false, g2d, boundsDisplayed, overlayOptions, getHierarchy().getSelectionModel(), downsample);
            // Paint ROI handles, if required
            if (selectedObject == mainSelectedObject && roiEditor.hasROI()) {
                Stroke strokeThick = PathHierarchyPaintingHelper.getCachedStroke(PathPrefs.annotationStrokeThicknessProperty().get() * downsample);
                Color color = useSelectedColor ? ColorToolsAwt.getCachedColor(PathPrefs.colorSelectedObjectProperty().get()) : null;
                if (color == null)
                    color = ColorToolsAwt.getCachedColor(ColorToolsFX.getDisplayedColorARGB(selectedObject));
                g2d.setStroke(strokeThick);
                // Draw ROI handles using adaptive size
                double maxHandleSize = getMaxROIHandleSize();
                double minHandleSize = downsample;
                PathHierarchyPaintingHelper.paintHandles(roiEditor, g2d, minHandleSize, maxHandleSize, color, ColorToolsAwt.getTranslucentColor(color));
            }
        }
    }
    // Notify any listeners of shape changes
    if (shapeChanged)
        fireVisibleRegionChangedEvent(lastVisibleShape);
    updateRepaintTimestamp();
}
Also used : AbstractOverlay(qupath.lib.gui.viewer.overlays.AbstractOverlay) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) Stroke(java.awt.Stroke) Shape(java.awt.Shape) AlphaComposite(java.awt.AlphaComposite) Composite(java.awt.Composite) AlphaComposite(java.awt.AlphaComposite) Color(java.awt.Color) Rectangle(java.awt.Rectangle) Rectangle2D(java.awt.geom.Rectangle2D) RectangleROI(qupath.lib.roi.RectangleROI) ROI(qupath.lib.roi.interfaces.ROI) BufferedImage(java.awt.image.BufferedImage) Graphics2D(java.awt.Graphics2D) PathObject(qupath.lib.objects.PathObject) RectangleROI(qupath.lib.roi.RectangleROI) PathOverlay(qupath.lib.gui.viewer.overlays.PathOverlay)

Aggregations

Graphics2D (java.awt.Graphics2D)2 BufferedImage (java.awt.image.BufferedImage)2 PathOverlay (qupath.lib.gui.viewer.overlays.PathOverlay)2 AlphaComposite (java.awt.AlphaComposite)1 Color (java.awt.Color)1 Composite (java.awt.Composite)1 Rectangle (java.awt.Rectangle)1 Shape (java.awt.Shape)1 Stroke (java.awt.Stroke)1 Rectangle2D (java.awt.geom.Rectangle2D)1 DataBufferByte (java.awt.image.DataBufferByte)1 ByteBuffer (java.nio.ByteBuffer)1 DoubleBuffer (java.nio.DoubleBuffer)1 ArrayList (java.util.ArrayList)1 FloatIndexer (org.bytedeco.javacpp.indexer.FloatIndexer)1 IntIndexer (org.bytedeco.javacpp.indexer.IntIndexer)1 Mat (org.bytedeco.opencv.opencv_core.Mat)1 MatVector (org.bytedeco.opencv.opencv_core.MatVector)1 Point (org.bytedeco.opencv.opencv_core.Point)1 Size (org.bytedeco.opencv.opencv_core.Size)1