Search in sources :

Example 36 with ROI

use of qupath.lib.roi.interfaces.ROI in project qupath by qupath.

the class ExtractRegionCommand method run.

@Override
public void run() {
    QuPathViewer viewer = qupath.getViewer();
    ImageServer<BufferedImage> server = null;
    if (viewer != null)
        server = viewer.getServer();
    if (server == null)
        return;
    List<String> unitOptions = new ArrayList<>();
    unitOptions.add(PIXELS_UNIT);
    String unit = server.getPixelCalibration().getPixelWidthUnit();
    if (unit.equals(server.getPixelCalibration().getPixelHeightUnit()) && !unit.equals(PixelCalibration.PIXEL))
        unitOptions.add(unit);
    if (!unitOptions.contains(resolutionUnit))
        resolutionUnit = PIXELS_UNIT;
    ParameterList params = new ParameterList().addDoubleParameter("resolution", "Resolution", resolution, null, "Resolution at which the image will be exported, defined as the 'pixel size' in Resolution units").addChoiceParameter("resolutionUnit", "Resolution unit", resolutionUnit, unitOptions, "Units defining the export resolution; if 'pixels' then the resolution is the same as a downsample value").addBooleanParameter("includeROI", "Include ROI", includeROI, "Include the primary object defining the exported region as an active ROI in ImageJ").addBooleanParameter("includeOverlay", "Include overlay", includeOverlay, "Include any objects overlapping the exported region as ROIs on an ImageJ overlay").addBooleanParameter("doTransforms", "Apply color transforms", doTransforms, "Optionally apply any color transforms when sending the pixels to ImageJ").addBooleanParameter("doZ", "All z-slices", doZ, "Optionally include all slices of a z-stack").addBooleanParameter("doT", "All timepoints", doT, "Optionally include all timepoints of a time series");
    // params.setHiddenParameters(unitOptions.size() <= 1, "resolutionUnit");
    params.setHiddenParameters(server.nZSlices() == 1, "doZ");
    params.setHiddenParameters(server.nTimepoints() == 1, "doT");
    if (!Dialogs.showParameterDialog("Send region to ImageJ", params))
        return;
    // Parse values
    resolution = params.getDoubleParameterValue("resolution");
    resolutionUnit = (String) params.getChoiceParameterValue("resolutionUnit");
    includeROI = params.getBooleanParameterValue("includeROI");
    includeOverlay = params.getBooleanParameterValue("includeOverlay");
    doTransforms = params.getBooleanParameterValue("doTransforms");
    doZ = params.getBooleanParameterValue("doZ");
    doT = params.getBooleanParameterValue("doT");
    // Calculate downsample
    double downsample = resolution;
    if (!resolutionUnit.equals(PIXELS_UNIT))
        downsample = resolution / (server.getPixelCalibration().getPixelHeight().doubleValue() / 2.0 + server.getPixelCalibration().getPixelWidth().doubleValue() / 2.0);
    // Color transforms are (currently) only applied for brightfield images - for fluorescence we always provide everything as unchanged as possible
    List<ChannelDisplayInfo> selectedChannels = new ArrayList<>(viewer.getImageDisplay().selectedChannels());
    List<ChannelDisplayInfo> channels = doTransforms && !selectedChannels.isEmpty() ? selectedChannels : null;
    if (channels != null)
        server = ChannelDisplayTransformServer.createColorTransformServer(server, channels);
    // Loop through all selected objects
    Collection<PathObject> pathObjects = viewer.getHierarchy().getSelectionModel().getSelectedObjects();
    if (pathObjects.isEmpty())
        pathObjects = Collections.singletonList(viewer.getHierarchy().getRootObject());
    List<ImagePlus> imps = new ArrayList<>();
    for (PathObject pathObject : pathObjects) {
        if (Thread.currentThread().isInterrupted() || IJ.escapePressed())
            return;
        int width, height;
        if (pathObject == null || !pathObject.hasROI()) {
            width = server.getWidth();
            height = server.getHeight();
        } else {
            Rectangle bounds = AwtTools.getBounds(pathObject.getROI());
            width = bounds.width;
            height = bounds.height;
        }
        RegionRequest region;
        ROI roi = pathObject == null ? null : pathObject.getROI();
        if (roi == null || PathObjectTools.hasPointROI(pathObject)) {
            region = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), viewer.getZPosition(), viewer.getTPosition());
        } else
            region = RegionRequest.createInstance(server.getPath(), downsample, roi);
        // region = RegionRequest.createInstance(server.getPath(), downsample, pathObject.getROI(), viewer.getZPosition(), viewer.getTPosition());
        // Minimum size has been removed (v0.2.0-m4); returned regions should be at least 1x1 pixels
        // if (region.getWidth() / downsample < 8 || region.getHeight() / downsample < 8) {
        // DisplayHelpers.showErrorMessage("Send region to ImageJ", "The width & height of the extracted image must both be >= 8 pixels");
        // continue;
        // }
        // Calculate required z-slices and time-points
        int zStart = doZ ? 0 : region.getZ();
        int zEnd = doZ ? server.nZSlices() : region.getZ() + 1;
        int tStart = doT ? 0 : region.getT();
        int tEnd = doT ? server.nTimepoints() : region.getT() + 1;
        long nZ = zEnd - zStart;
        long nT = tEnd - tStart;
        int bytesPerPixel = server.isRGB() ? 4 : server.getPixelType().getBytesPerPixel() * server.nChannels();
        double memory = ((long) width * height * nZ * nT * bytesPerPixel) / (downsample * downsample);
        // TODO: Perform calculation based on actual amount of available memory
        long availableMemory = GeneralTools.estimateAvailableMemory();
        if (memory >= availableMemory * 0.95) {
            logger.error("Cannot extract region {} - estimated size is too large (approx. {} MB)", pathObject, GeneralTools.formatNumber(memory / (1024.0 * 1024.0), 2));
            Dialogs.showErrorMessage("Send region to ImageJ error", "Selected region is too large to extract - please selected a smaller region or use a higher downsample factor");
            continue;
        }
        if (memory / 1024 / 1024 > 100) {
            if (pathObjects.size() == 1 && !Dialogs.showYesNoDialog("Send region to ImageJ", String.format("Attempting to extract this region is likely to require > %.2f MB - are you sure you want to continue?", memory / 1024 / 1024)))
                return;
        }
        // We should switch to the event dispatch thread when interacting with ImageJ
        try {
            ImagePlus imp;
            PathObjectHierarchy hierarchy = viewer.getHierarchy();
            OverlayOptions options = viewer.getOverlayOptions();
            if (zEnd - zStart > 1 || tEnd - tStart > 1) {
                // TODO: Handle overlays
                imp = IJTools.extractHyperstack(server, region, zStart, zEnd, tStart, tEnd);
                if (includeROI && roi != null) {
                    Roi roiIJ = IJTools.convertToIJRoi(roi, imp.getCalibration(), region.getDownsample());
                    imp.setRoi(roiIJ);
                }
                if (includeOverlay) {
                    Overlay overlay = new Overlay();
                    for (int t = tStart; t < tEnd; t++) {
                        for (int z = zStart; z < zEnd; z++) {
                            RegionRequest request2 = RegionRequest.createInstance(region.getPath(), region.getDownsample(), region.getX(), region.getY(), region.getWidth(), region.getHeight(), z, t);
                            var regionPredicate = PathObjectTools.createImageRegionPredicate(request2);
                            Overlay temp = IJExtension.extractOverlay(hierarchy, request2, options, p -> p != pathObject && regionPredicate.test(p));
                            if (overlay == null)
                                overlay = temp;
                            for (int i = 0; i < temp.size(); i++) {
                                Roi roiIJ = temp.get(i);
                                roiIJ.setPosition(-1, z + 1, t + 1);
                                overlay.add(roiIJ);
                            }
                        }
                    }
                    if (overlay != null && overlay.size() > 0)
                        imp.setOverlay(overlay);
                }
            } else if (includeOverlay)
                imp = IJExtension.extractROIWithOverlay(server, pathObject, hierarchy, region, includeROI, options).getImage();
            else
                imp = IJExtension.extractROIWithOverlay(server, pathObject, null, region, includeROI, options).getImage();
            // Set display ranges if we can
            if (viewer != null && imp instanceof CompositeImage) {
                var availableChannels = viewer.getImageDisplay().availableChannels().stream().filter(c -> c instanceof SingleChannelDisplayInfo).map(c -> (SingleChannelDisplayInfo) c).collect(Collectors.toList());
                CompositeImage impComp = (CompositeImage) imp;
                if (availableChannels.size() == imp.getNChannels()) {
                    for (int c = 0; c < availableChannels.size(); c++) {
                        var channel = availableChannels.get(c);
                        imp.setPosition(c + 1, 1, 1);
                        impComp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
                    }
                    imp.setPosition(1);
                }
            } else if (selectedChannels.size() == 1 && imp.getType() != ImagePlus.COLOR_RGB) {
                // Setting the display range for non-RGB images can give unexpected results (changing pixel values)
                var channel = selectedChannels.get(0);
                imp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
            }
            imps.add(imp);
        } catch (IOException e) {
            Dialogs.showErrorMessage("Send region to ImageJ", e);
            return;
        }
    }
    // Show all the images we've got
    if (!imps.isEmpty()) {
        SwingUtilities.invokeLater(() -> {
            boolean batchMode = Interpreter.batchMode;
            // Try to start an ImageJ instance, and return if this fails
            try {
                ImageJ ij = IJExtension.getImageJInstance();
                if (ij == null)
                    return;
                ij.setVisible(true);
                // Make sure we aren't in batch mode, so that image will display
                Interpreter.batchMode = false;
                for (ImagePlus imp : imps) {
                    imp.show();
                }
            } finally {
                Interpreter.batchMode = batchMode;
            }
        });
    }
}
Also used : CompositeImage(ij.CompositeImage) Rectangle(java.awt.Rectangle) ImageServer(qupath.lib.images.servers.ImageServer) IJTools(qupath.imagej.tools.IJTools) LoggerFactory(org.slf4j.LoggerFactory) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) ArrayList(java.util.ArrayList) Dialogs(qupath.lib.gui.dialogs.Dialogs) SwingUtilities(javax.swing.SwingUtilities) ParameterList(qupath.lib.plugins.parameters.ParameterList) Overlay(ij.gui.Overlay) QuPathGUI(qupath.lib.gui.QuPathGUI) Logger(org.slf4j.Logger) Interpreter(ij.macro.Interpreter) BufferedImage(java.awt.image.BufferedImage) GeneralTools(qupath.lib.common.GeneralTools) RegionRequest(qupath.lib.regions.RegionRequest) Collection(java.util.Collection) ChannelDisplayTransformServer(qupath.lib.gui.images.servers.ChannelDisplayTransformServer) AwtTools(qupath.lib.awt.common.AwtTools) ChannelDisplayInfo(qupath.lib.display.ChannelDisplayInfo) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) PathObjectTools(qupath.lib.objects.PathObjectTools) PathObject(qupath.lib.objects.PathObject) ImageJ(ij.ImageJ) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) ROI(qupath.lib.roi.interfaces.ROI) ImagePlus(ij.ImagePlus) List(java.util.List) IJ(ij.IJ) PixelCalibration(qupath.lib.images.servers.PixelCalibration) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) Collections(java.util.Collections) Roi(ij.gui.Roi) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) BufferedImage(java.awt.image.BufferedImage) ImageJ(ij.ImageJ) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) CompositeImage(ij.CompositeImage) Overlay(ij.gui.Overlay) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) IOException(java.io.IOException) ChannelDisplayInfo(qupath.lib.display.ChannelDisplayInfo) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) ImagePlus(ij.ImagePlus) ROI(qupath.lib.roi.interfaces.ROI) Roi(ij.gui.Roi) PathObject(qupath.lib.objects.PathObject) ParameterList(qupath.lib.plugins.parameters.ParameterList) RegionRequest(qupath.lib.regions.RegionRequest)

Example 37 with ROI

use of qupath.lib.roi.interfaces.ROI in project qupath by qupath.

the class QP method makeInverseAnnotation.

/**
 * Make an annotation, for which the ROI is obtained by subtracting the ROIs of the specified objects from the closest
 * common ancestor ROI (or entire image if the closest ancestor is the root).
 * <p>
 * In an inverted annotation can be created, it is added to the hierarchy and set as selected.
 *
 * @param imageData the image containing the annotation
 * @param pathObjects the annotation to invert
 * @return true if an inverted annotation is added to the hierarchy, false otherwise.
 */
public static boolean makeInverseAnnotation(final ImageData<?> imageData, Collection<PathObject> pathObjects) {
    if (imageData == null)
        return false;
    var map = pathObjects.stream().filter(p -> p.hasROI() && p.getROI().isArea()).collect(Collectors.groupingBy(p -> p.getROI().getImagePlane()));
    if (map.isEmpty()) {
        logger.warn("No area annotations available - cannot created inverse ROI!");
        return false;
    }
    if (map.size() > 1) {
        logger.error("Cannot merge annotations from different image planes!");
        return false;
    }
    ImagePlane plane = map.keySet().iterator().next();
    List<PathObject> pathObjectList = map.get(plane);
    PathObjectHierarchy hierarchy = imageData.getHierarchy();
    // Try to get the best candidate parent
    Collection<PathObject> parentSet = pathObjectList.stream().map(p -> p.getParent()).collect(Collectors.toCollection(HashSet::new));
    PathObject parent;
    if (parentSet.size() > 1) {
        parentSet.clear();
        boolean firstTime = true;
        for (PathObject temp : pathObjectList) {
            if (firstTime)
                parentSet.addAll(PathObjectTools.getAncestorList(temp));
            else
                parentSet.retainAll(PathObjectTools.getAncestorList(temp));
            firstTime = false;
        }
        List<PathObject> parents = new ArrayList<>(parentSet);
        Collections.sort(parents, Comparator.comparingInt(PathObject::getLevel).reversed().thenComparingDouble(p -> p.hasROI() ? p.getROI().getArea() : Double.MAX_VALUE));
        parent = parents.get(0);
    } else
        parent = parentSet.iterator().next();
    // Get the parent area
    Geometry geometryParent;
    if (parent == null || parent.isRootObject() || !parent.hasROI())
        geometryParent = GeometryTools.createRectangle(0, 0, imageData.getServer().getWidth(), imageData.getServer().getHeight());
    else
        geometryParent = parent.getROI().getGeometry();
    // Get the parent area to use
    var union = GeometryTools.union(pathObjectList.stream().map(p -> p.getROI().getGeometry()).collect(Collectors.toList()));
    var geometry = geometryParent.difference(union);
    // Create the new ROI
    ROI shapeNew = GeometryTools.geometryToROI(geometry, plane);
    PathObject pathObjectNew = PathObjects.createAnnotationObject(shapeNew);
    parent.addPathObject(pathObjectNew);
    hierarchy.fireHierarchyChangedEvent(parent);
    hierarchy.getSelectionModel().setSelectedObject(pathObjectNew);
    return true;
}
Also used : FeatureExtractors(qupath.opencv.ml.objects.features.FeatureExtractors) Arrays(java.util.Arrays) ServerTools(qupath.lib.images.servers.ServerTools) PathTileObject(qupath.lib.objects.PathTileObject) IJTools(qupath.imagej.tools.IJTools) GroovyCV(qupath.opencv.tools.GroovyCV) CommandLinePluginRunner(qupath.lib.plugins.CommandLinePluginRunner) ImageRegion(qupath.lib.regions.ImageRegion) Map(java.util.Map) PixelClassifierTools(qupath.opencv.ml.pixel.PixelClassifierTools) Path(java.nio.file.Path) ColorTools(qupath.lib.common.ColorTools) Member(java.lang.reflect.Member) PathObjects(qupath.lib.objects.PathObjects) Set(java.util.Set) PathAnnotationObject(qupath.lib.objects.PathAnnotationObject) Project(qupath.lib.projects.Project) Stream(java.util.stream.Stream) ColorModels(qupath.lib.analysis.heatmaps.ColorModels) ShapeFeatures(qupath.lib.analysis.features.ObjectMeasurements.ShapeFeatures) DensityMaps(qupath.lib.analysis.heatmaps.DensityMaps) PathObjectPredicates(qupath.lib.objects.PathObjectPredicates) GeoJsonExportOptions(qupath.lib.io.PathIO.GeoJsonExportOptions) GsonTools(qupath.lib.io.GsonTools) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) Constructor(java.lang.reflect.Constructor) PathPlugin(qupath.lib.plugins.PathPlugin) Projects(qupath.lib.projects.Projects) ArrayList(java.util.ArrayList) ROIs(qupath.lib.roi.ROIs) ImageOps(qupath.opencv.ops.ImageOps) LinkedHashSet(java.util.LinkedHashSet) PathClassifierTools(qupath.lib.classifiers.PathClassifierTools) ObjectArrays(com.google.common.collect.ObjectArrays) Files(java.nio.file.Files) GeneralTools(qupath.lib.common.GeneralTools) RegionRequest(qupath.lib.regions.RegionRequest) TileExporter(qupath.lib.images.writers.TileExporter) DistanceTools(qupath.lib.analysis.DistanceTools) IOException(java.io.IOException) Padding(qupath.lib.regions.Padding) Field(java.lang.reflect.Field) DelaunayTools(qupath.lib.analysis.DelaunayTools) File(java.io.File) PathObjectTools(qupath.lib.objects.PathObjectTools) ROI(qupath.lib.roi.interfaces.ROI) PixelClassifier(qupath.lib.classifiers.pixel.PixelClassifier) Paths(java.nio.file.Paths) TMAGrid(qupath.lib.objects.hierarchy.TMAGrid) UriResource(qupath.lib.io.UriResource) ImageServerMetadata(qupath.lib.images.servers.ImageServerMetadata) PathIO(qupath.lib.io.PathIO) RunSavedClassifierWorkflowStep(qupath.lib.plugins.workflow.RunSavedClassifierWorkflowStep) CreateObjectOptions(qupath.opencv.ml.pixel.PixelClassifierTools.CreateObjectOptions) ImageServer(qupath.lib.images.servers.ImageServer) LoggerFactory(org.slf4j.LoggerFactory) Scanner(java.util.Scanner) PathObjectFilter(qupath.lib.objects.PathObjectFilter) OpenCVMLClassifier(qupath.opencv.ml.objects.OpenCVMLClassifier) BufferedImageTools(qupath.lib.awt.common.BufferedImageTools) URI(java.net.URI) ImageServers(qupath.lib.images.servers.ImageServers) Method(java.lang.reflect.Method) ImageType(qupath.lib.images.ImageData.ImageType) ObjectMeasurements(qupath.lib.analysis.features.ObjectMeasurements) BufferedImage(java.awt.image.BufferedImage) PixelClassifiers(qupath.opencv.ml.pixel.PixelClassifiers) Predicate(java.util.function.Predicate) ImageServerProvider(qupath.lib.images.servers.ImageServerProvider) Collection(java.util.Collection) DensityMapBuilder(qupath.lib.analysis.heatmaps.DensityMaps.DensityMapBuilder) UriUpdater(qupath.lib.io.UriUpdater) Collectors(java.util.stream.Collectors) FileNotFoundException(java.io.FileNotFoundException) PathObject(qupath.lib.objects.PathObject) PathDetectionObject(qupath.lib.objects.PathDetectionObject) List(java.util.List) ProjectIO(qupath.lib.projects.ProjectIO) ContourTracing(qupath.lib.analysis.images.ContourTracing) PathObjectClassifier(qupath.lib.classifiers.PathObjectClassifier) Modifier(java.lang.reflect.Modifier) DnnTools(qupath.opencv.dnn.DnnTools) ImagePlane(qupath.lib.regions.ImagePlane) Geometry(org.locationtech.jts.geom.Geometry) Pattern(java.util.regex.Pattern) PathCellObject(qupath.lib.objects.PathCellObject) PathClassTools(qupath.lib.objects.classes.PathClassTools) ImageChannel(qupath.lib.images.servers.ImageChannel) OpenCVTools(qupath.opencv.tools.OpenCVTools) HashMap(java.util.HashMap) PathClassFactory(qupath.lib.objects.classes.PathClassFactory) ColorTransforms(qupath.lib.images.servers.ColorTransforms) CellTools(qupath.lib.objects.CellTools) HashSet(java.util.HashSet) ColorDeconvolutionStains(qupath.lib.color.ColorDeconvolutionStains) ObjectClassifiers(qupath.lib.classifiers.object.ObjectClassifiers) GeometryTools(qupath.lib.roi.GeometryTools) NoSuchElementException(java.util.NoSuchElementException) WeakHashMap(java.util.WeakHashMap) ImageData(qupath.lib.images.ImageData) RoiTools(qupath.lib.roi.RoiTools) Logger(org.slf4j.Logger) ProjectImageEntry(qupath.lib.projects.ProjectImageEntry) ImageWriterTools(qupath.lib.images.writers.ImageWriterTools) PixelType(qupath.lib.images.servers.PixelType) PathClass(qupath.lib.objects.classes.PathClass) PointIO(qupath.lib.io.PointIO) TMACoreObject(qupath.lib.objects.TMACoreObject) ObjectClassifier(qupath.lib.classifiers.object.ObjectClassifier) Comparator(java.util.Comparator) Collections(java.util.Collections) Geometry(org.locationtech.jts.geom.Geometry) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) PathObject(qupath.lib.objects.PathObject) ArrayList(java.util.ArrayList) ImagePlane(qupath.lib.regions.ImagePlane) ROI(qupath.lib.roi.interfaces.ROI)

Example 38 with ROI

use of qupath.lib.roi.interfaces.ROI in project qupath by qupath.

the class SmoothFeaturesPlugin method smoothMeasurements.

/**
 * Using the centroids of the ROIs within PathObjects, 'smooth' measurements by summing up the corresponding measurements of
 * nearby objects, weighted by centroid distance.
 *
 * @param pathObjects
 * @param measurements
 * @param fwhmPixels
 * @param fwhmString
 * @param withinClass
 * @param useLegacyNames
 */
// public static Set<String> smoothMeasurements(List<PathObject> pathObjects, List<String> measurements, double fwhmPixels) {
public static void smoothMeasurements(List<PathObject> pathObjects, List<String> measurements, double fwhmPixels, String fwhmString, boolean withinClass, boolean useLegacyNames) {
    if (measurements.isEmpty() || pathObjects.size() <= 1)
        // Collections.emptySet();
        return;
    if (fwhmString == null)
        fwhmString = String.format("%.2f px", fwhmPixels);
    double fwhmPixels2 = fwhmPixels * fwhmPixels;
    double sigmaPixels = fwhmPixels / Math.sqrt(8 * Math.log(2));
    double sigma2 = 2 * sigmaPixels * sigmaPixels;
    double maxDist = sigmaPixels * 3;
    // Maximum separation
    double maxDistSq = maxDist * maxDist;
    int nObjects = pathObjects.size();
    // int counter = 0;
    // Sort by x-coordinate - this gives us a method of breaking early
    Collections.sort(pathObjects, new Comparator<PathObject>() {

        @Override
        public int compare(PathObject o1, PathObject o2) {
            double x1 = o1.getROI().getCentroidX();
            double x2 = o2.getROI().getCentroidX();
            // System.out.println(String.format("(%.2f, %.2f) vs (%.2f, %.2f)", o1.getROI().getCentroidX(), o1.getROI().getCentroidY(), o2.getROI().getCentroidX(), o2.getROI().getCentroidY()));				}
            return Double.compare(x1, x2);
        // if (x1 > x2)
        // return 1;
        // if (x2 < x1)
        // return -1;
        // System.out.println(x1 + " vs. " + x2);
        // System.out.println(String.format("(%.2f, %.2f) vs (%.2f, %.2f)", o1.getROI().getCentroidX(), o1.getROI().getCentroidY(), o2.getROI().getCentroidX(), o2.getROI().getCentroidY()));
        // return 0;
        // return (int)Math.signum(o1.getROI().getCentroidX() - o2.getROI().getCentroidX());
        }
    });
    // Create a LUT for distances - calculating exp every time is expensive
    double[] distanceWeights = new double[(int) (maxDist + .5) + 1];
    for (int i = 0; i < distanceWeights.length; i++) {
        distanceWeights[i] = Math.exp(-(i * i) / sigma2);
    }
    System.currentTimeMillis();
    float[] xCentroids = new float[nObjects];
    float[] yCentroids = new float[nObjects];
    PathClass[] pathClasses = new PathClass[nObjects];
    int[] nearbyDetectionCounts = new int[nObjects];
    float[][] measurementsWeighted = new float[nObjects][measurements.size()];
    float[][] measurementDenominators = new float[nObjects][measurements.size()];
    float[][] measurementValues = new float[nObjects][measurements.size()];
    for (int i = 0; i < nObjects; i++) {
        PathObject pathObject = pathObjects.get(i);
        if (withinClass)
            pathClasses[i] = pathObject.getPathClass() == null ? null : pathObject.getPathClass().getBaseClass();
        ROI roi = pathObject.getROI();
        xCentroids[i] = (float) roi.getCentroidX();
        yCentroids[i] = (float) roi.getCentroidY();
        MeasurementList measurementList = pathObject.getMeasurementList();
        int ind = 0;
        for (String name : measurements) {
            float value = (float) measurementList.getMeasurementValue(name);
            // Used to cache values
            measurementValues[i][ind] = value;
            // Based on distances and measurements
            measurementsWeighted[i][ind] = value;
            // Based on distances along
            measurementDenominators[i][ind] = 1;
            ind++;
        }
    }
    String prefix, postfix, denomName, countsName;
    // Use previous syntax for naming smoothed measurements
    if (useLegacyNames) {
        prefix = "";
        postfix = String.format(" - Smoothed (FWHM %s)", fwhmString);
        denomName = String.format("Smoothed denominator (local density, FWHM %s)", fwhmString);
        countsName = String.format("Nearby detection counts (radius %s)", fwhmString);
    } else {
        prefix = String.format("Smoothed: %s: ", fwhmString);
        postfix = "";
        // prefix + "Weighted density";
        denomName = null;
        countsName = prefix + "Nearby detection counts";
    // denomName = prefix + "Denominator (local density)";
    // countsName = prefix + "Nearby detection counts";
    }
    // Loop through objects, computing predominant class based on distance weighting
    for (int i = 0; i < nObjects; i++) {
        // Extract the current class index
        PathObject pathObject = pathObjects.get(i);
        PathClass pathClass = pathClasses[i];
        MeasurementList measurementList = pathObject.getMeasurementList();
        float[] mValues = measurementValues[i];
        float[] mWeighted = measurementsWeighted[i];
        float[] mDenominator = measurementDenominators[i];
        // Compute centroid distances
        double xi = xCentroids[i];
        double yi = yCentroids[i];
        for (int j = i + 1; j < nObjects; j++) {
            double xj = xCentroids[j];
            double yj = yCentroids[j];
            // Break early if we are already too far away
            if (Math.abs(xj - xi) > maxDist) {
                break;
            }
            double distSq = (xj - xi) * (xj - xi) + (yj - yi) * (yj - yi);
            // // Check if we are close enough to have an influence
            if (distSq > maxDistSq || Double.isNaN(distSq))
                continue;
            // Check if the class is ok, if check needed
            if (withinClass && pathClass != pathClasses[j])
                continue;
            // Update the counts, if close enough
            if (distSq < fwhmPixels2) {
                nearbyDetectionCounts[i]++;
                nearbyDetectionCounts[j]++;
            }
            // Update the class weights for both objects currently being tested
            // Compute weight based on centroid distances
            // double weight = Math.exp(-distSq/sigma2);
            // * pathObjects.get(j).getClassProbability();
            double weight = distanceWeights[(int) (Math.sqrt(distSq) + .5)];
            float[] temp = measurementValues[j];
            float[] tempWeighted = measurementsWeighted[j];
            float[] tempDenominator = measurementDenominators[j];
            for (int ind = 0; ind < measurements.size(); ind++) {
                float tempVal = temp[ind];
                if (Float.isNaN(tempVal))
                    continue;
                mWeighted[ind] += tempVal * weight;
                mDenominator[ind] += weight;
                float tempVal2 = mValues[ind];
                if (Float.isNaN(tempVal2))
                    continue;
                tempWeighted[ind] += tempVal2 * weight;
                tempDenominator[ind] += weight;
            }
        }
        // Store the measurements
        int ind = 0;
        float maxDenominator = Float.NEGATIVE_INFINITY;
        for (String name : measurements) {
            // if (name.contains(" - Smoothed (FWHM ") || name.startsWith("Smoothed denominator (local density, ") || name.startsWith("Nearby detection counts"))
            // continue;
            float denominator = mDenominator[ind];
            if (denominator > maxDenominator)
                maxDenominator = denominator;
            String nameToAdd = prefix + name + postfix;
            measurementList.putMeasurement(nameToAdd, mWeighted[ind] / denominator);
            // measurementsAdded.add(nameToAdd);
            // measurementList.putMeasurement(name + " - weighted sum", mWeighted[ind]); // TODO: Support optionally providing weighted sums
            // measurementList.addMeasurement(name + " - smoothed", mWeighted[ind] / mDenominator[ind]);
            ind++;
        }
        if (pathObject instanceof PathDetectionObject && denomName != null) {
            measurementList.putMeasurement(denomName, maxDenominator);
        // measurementsAdded.add(denomName);
        }
        if (pathObject instanceof PathDetectionObject && countsName != null) {
            measurementList.putMeasurement(countsName, nearbyDetectionCounts[i]);
        // measurementsAdded.add(countsName);
        }
        measurementList.close();
    }
    System.currentTimeMillis();
// return measurementsAdded;
}
Also used : PathDetectionObject(qupath.lib.objects.PathDetectionObject) MeasurementList(qupath.lib.measurements.MeasurementList) ROI(qupath.lib.roi.interfaces.ROI) PathClass(qupath.lib.objects.classes.PathClass) PathObject(qupath.lib.objects.PathObject)

Example 39 with ROI

use of qupath.lib.roi.interfaces.ROI in project qupath by qupath.

the class IntensityFeaturesPlugin method processObject.

static boolean processObject(final PathObject pathObject, final ParameterList params, final ImageData<BufferedImage> imageData) throws IOException {
    // Determine amount to downsample
    var server = imageData.getServer();
    var stains = imageData.getColorDeconvolutionStains();
    PixelCalibration cal = server.getPixelCalibration();
    double downsample = calculateDownsample(cal, params);
    if (downsample <= 0) {
        logger.warn("Effective downsample must be > 0 (requested value {})", downsample);
    }
    // Determine region shape
    RegionType regionType = (RegionType) params.getChoiceParameterValue("region");
    // Try to get ROI
    boolean useROI = regionType == RegionType.ROI || regionType == RegionType.NUCLEUS;
    ROI roi = null;
    if (regionType == RegionType.NUCLEUS) {
        if (pathObject instanceof PathCellObject)
            roi = ((PathCellObject) pathObject).getNucleusROI();
    } else
        roi = pathObject.getROI();
    // pathROI = ((PathCellObject)pathObject).getNucleusROI();
    if (roi == null)
        return false;
    // Create a map - this is useful for occasions when tiling is needed
    Map<FeatureColorTransform, List<FeatureComputer>> map = new LinkedHashMap<>();
    if (server.isRGB()) {
        for (FeatureColorTransform transform : FeatureColorTransformEnum.values()) {
            List<FeatureComputer> list = new ArrayList<>();
            map.put(transform, list);
            for (FeatureComputerBuilder builder : builders) {
                list.add(builder.build());
            }
        }
    } else {
        for (FeatureColorTransform transform : getBasicChannelTransforms(server.nChannels())) {
            List<FeatureComputer> list = new ArrayList<>();
            map.put(transform, list);
            for (FeatureComputerBuilder builder : builders) {
                list.add(builder.build());
            }
        }
    }
    String prefix = getDiameterString(server, params);
    // Create tiled ROIs, if required
    ImmutableDimension sizePreferred = ImmutableDimension.getInstance((int) (2000 * downsample), (int) (2000 * downsample));
    // ImmutableDimension sizePreferred = new ImmutableDimension((int)(200*downsample), (int)(200*downsample));
    Collection<? extends ROI> rois = RoiTools.computeTiledROIs(roi, sizePreferred, sizePreferred, false, 0);
    if (rois.size() > 1)
        logger.info("Splitting {} into {} tiles for intensity measurements", roi, rois.size());
    for (ROI pathROI : rois) {
        if (Thread.currentThread().isInterrupted()) {
            logger.warn("Measurement skipped - thread interrupted!");
            return false;
        }
        // Get bounds
        RegionRequest region;
        if (useROI) {
            region = RegionRequest.createInstance(server.getPath(), downsample, pathROI);
        } else {
            ImmutableDimension size = getPreferredTileSizePixels(server, params);
            // RegionRequest region = RegionRequest.createInstance(server.getPath(), downsample, (int)(pathROI.getCentroidX() + .5) - size.width/2, (int)(pathROI.getCentroidY() + .5) - size.height/2, size.width, size.height, pathROI.getT(), pathROI.getZ());
            // Try to align with pixel boundaries according to the downsample being used - otherwise, interpolation can cause some strange, pattern artefacts
            int xStart = (int) (Math.round(pathROI.getCentroidX() / downsample) * downsample) - size.width / 2;
            int yStart = (int) (Math.round(pathROI.getCentroidY() / downsample) * downsample) - size.height / 2;
            int width = Math.min(server.getWidth(), xStart + size.width) - xStart;
            int height = Math.min(server.getHeight(), yStart + size.height) - yStart;
            region = RegionRequest.createInstance(server.getPath(), downsample, xStart, yStart, width, height, pathROI.getT(), pathROI.getZ());
        }
        // // Check image large enough to do *anything* of value
        // if (region.getWidth() / downsample < 1 || region.getHeight() / downsample < 1) {
        // logger.trace("Requested region is too small! {}", region);
        // return false;
        // }
        // System.out.println(bounds);
        // System.out.println("Size: " + size);
        BufferedImage img = server.readBufferedImage(region);
        if (img == null) {
            logger.error("Could not read image - unable to compute intensity features for {}", pathObject);
            return false;
        }
        // Create mask ROI if necessary
        // If we just have 1 pixel, we want to use it so that the mean/min/max measurements are valid (even if nothing else is)
        byte[] maskBytes = null;
        if (useROI && img.getWidth() * img.getHeight() > 1) {
            BufferedImage imgMask = BufferedImageTools.createROIMask(img.getWidth(), img.getHeight(), pathROI, region);
            maskBytes = ((DataBufferByte) imgMask.getRaster().getDataBuffer()).getData();
        }
        boolean isRGB = server.isRGB();
        List<FeatureColorTransform> transforms;
        if (isRGB)
            transforms = Arrays.asList(FeatureColorTransformEnum.values());
        else
            transforms = getBasicChannelTransforms(server.nChannels());
        int w = img.getWidth();
        int h = img.getHeight();
        int[] rgbBuffer = isRGB ? img.getRGB(0, 0, w, h, null, 0, w) : null;
        float[] pixels = null;
        for (FeatureColorTransform transform : transforms) {
            // Check if the color transform is requested
            if (params.containsKey(transform.getKey()) && Boolean.TRUE.equals(params.getBooleanParameterValue(transform.getKey()))) {
                // Transform the pixels
                pixels = transform.getTransformedPixels(img, rgbBuffer, stains, pixels);
                // Create the simple image
                SimpleModifiableImage pixelImage = SimpleImages.createFloatImage(pixels, w, h);
                // Apply any arbitrary mask
                if (maskBytes != null) {
                    for (int i = 0; i < pixels.length; i++) {
                        if (maskBytes[i] == (byte) 0)
                            pixelImage.setValue(i % w, i / w, Float.NaN);
                    }
                } else if (regionType == RegionType.CIRCLE) {
                    // Apply circular tile mask
                    double cx = (w - 1) / 2;
                    double cy = (h - 1) / 2;
                    double radius = Math.max(w, h) * .5;
                    double distThreshold = radius * radius;
                    for (int y = 0; y < h; y++) {
                        for (int x = 0; x < w; x++) {
                            if ((cx - x) * (cx - x) + (cy - y) * (cy - y) > distThreshold)
                                pixelImage.setValue(x, y, Float.NaN);
                        }
                    }
                }
                // Do the computations
                for (FeatureComputer computer : map.get(transform)) {
                    computer.updateFeatures(pixelImage, transform, params);
                }
            }
        }
    }
    // Add measurements to the parent object
    for (Entry<FeatureColorTransform, List<FeatureComputer>> entry : map.entrySet()) {
        String name = prefix + ": " + entry.getKey().getName(imageData, false) + ":";
        for (FeatureComputer computer : entry.getValue()) computer.addMeasurements(pathObject, name, params);
    }
    pathObject.getMeasurementList().close();
    // Lock any measurements that require it
    if (pathObject instanceof PathAnnotationObject)
        ((PathAnnotationObject) pathObject).setLocked(true);
    else if (pathObject instanceof TMACoreObject)
        ((TMACoreObject) pathObject).setLocked(true);
    return true;
}
Also used : ArrayList(java.util.ArrayList) BufferedImage(java.awt.image.BufferedImage) LinkedHashMap(java.util.LinkedHashMap) HaralickFeatureComputer(qupath.lib.analysis.features.HaralickFeatureComputer) SimpleModifiableImage(qupath.lib.analysis.images.SimpleModifiableImage) PathAnnotationObject(qupath.lib.objects.PathAnnotationObject) ArrayList(java.util.ArrayList) MeasurementList(qupath.lib.measurements.MeasurementList) ParameterList(qupath.lib.plugins.parameters.ParameterList) List(java.util.List) TMACoreObject(qupath.lib.objects.TMACoreObject) PixelCalibration(qupath.lib.images.servers.PixelCalibration) ROI(qupath.lib.roi.interfaces.ROI) ImmutableDimension(qupath.lib.geom.ImmutableDimension) RegionRequest(qupath.lib.regions.RegionRequest) PathCellObject(qupath.lib.objects.PathCellObject)

Example 40 with ROI

use of qupath.lib.roi.interfaces.ROI in project qupath by qupath.

the class RefineAnnotationsPlugin method getTasks.

@Override
protected Collection<Runnable> getTasks(final PluginRunner<T> runner) {
    Collection<? extends PathObject> parentObjects = getParentObjects(runner);
    if (parentObjects == null || parentObjects.isEmpty())
        return Collections.emptyList();
    // Add a single task, to avoid multithreading - which may complicate setting parents
    List<Runnable> tasks = new ArrayList<>(1);
    PathObjectHierarchy hierarchy = getHierarchy(runner);
    double minFragmentSize;
    double maxHoleSize, maxHoleSizeTemp;
    ImageServer<T> server = getServer(runner);
    PixelCalibration cal = server.getPixelCalibration();
    if (cal.hasPixelSizeMicrons()) {
        double pixelAreaMicrons = cal.getPixelWidthMicrons() * cal.getPixelHeightMicrons();
        minFragmentSize = params.getDoubleParameterValue("minFragmentSizeMicrons") / pixelAreaMicrons;
        maxHoleSizeTemp = params.getDoubleParameterValue("maxHoleSizeMicrons") / pixelAreaMicrons;
    } else {
        minFragmentSize = params.getDoubleParameterValue("minFragmentSizePixels");
        maxHoleSizeTemp = params.getDoubleParameterValue("maxHoleSizePixels");
    }
    // Handle negative values
    if (maxHoleSizeTemp < 0)
        maxHoleSize = Double.POSITIVE_INFINITY;
    else
        maxHoleSize = maxHoleSizeTemp;
    // Want to reset selection
    PathObject selected = hierarchy.getSelectionModel().getSelectedObject();
    Collection<PathObject> previousSelection = new ArrayList<>(hierarchy.getSelectionModel().getSelectedObjects());
    tasks.add(() -> {
        List<PathObject> toRemove = new ArrayList<>();
        Map<PathROIObject, ROI> toUpdate = new HashMap<>();
        for (PathObject pathObject : parentObjects) {
            ROI roiOrig = pathObject.getROI();
            if (roiOrig == null || !roiOrig.isArea())
                continue;
            ROI roiUpdated = RoiTools.removeSmallPieces(roiOrig, minFragmentSize, maxHoleSize);
            if (roiUpdated == null || roiUpdated.isEmpty())
                toRemove.add(pathObject);
            else if (roiOrig != roiUpdated && pathObject instanceof PathROIObject) {
                toUpdate.put((PathROIObject) pathObject, roiUpdated);
            }
        }
        if (toRemove.isEmpty() && toUpdate.isEmpty())
            return;
        hierarchy.getSelectionModel().clearSelection();
        if (!toRemove.isEmpty())
            hierarchy.removeObjects(toRemove, true);
        if (!toUpdate.isEmpty()) {
            hierarchy.removeObjects(toUpdate.keySet(), true);
            toUpdate.forEach((p, r) -> p.setROI(r));
            hierarchy.addPathObjects(toUpdate.keySet());
        }
        hierarchy.getSelectionModel().selectObjects(previousSelection);
        hierarchy.getSelectionModel().setSelectedObject(selected, true);
    });
    return tasks;
}
Also used : PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) PixelCalibration(qupath.lib.images.servers.PixelCalibration) PathROIObject(qupath.lib.objects.PathROIObject) ROI(qupath.lib.roi.interfaces.ROI) PathObject(qupath.lib.objects.PathObject)

Aggregations

ROI (qupath.lib.roi.interfaces.ROI)87 PathObject (qupath.lib.objects.PathObject)61 ArrayList (java.util.ArrayList)31 BufferedImage (java.awt.image.BufferedImage)24 PathObjectHierarchy (qupath.lib.objects.hierarchy.PathObjectHierarchy)24 IOException (java.io.IOException)20 RegionRequest (qupath.lib.regions.RegionRequest)19 List (java.util.List)17 Collectors (java.util.stream.Collectors)17 RectangleROI (qupath.lib.roi.RectangleROI)17 Logger (org.slf4j.Logger)16 LoggerFactory (org.slf4j.LoggerFactory)16 PolygonROI (qupath.lib.roi.PolygonROI)16 PathAnnotationObject (qupath.lib.objects.PathAnnotationObject)15 Point2D (java.awt.geom.Point2D)14 Collection (java.util.Collection)14 Collections (java.util.Collections)14 Geometry (org.locationtech.jts.geom.Geometry)14 PathClass (qupath.lib.objects.classes.PathClass)14 ImagePlane (qupath.lib.regions.ImagePlane)13