Search in sources :

Example 6 with ImageServer

use of qupath.lib.images.servers.ImageServer in project qupath by qupath.

the class ExtractRegionCommand method run.

@Override
public void run() {
    QuPathViewer viewer = qupath.getViewer();
    ImageServer<BufferedImage> server = null;
    if (viewer != null)
        server = viewer.getServer();
    if (server == null)
        return;
    List<String> unitOptions = new ArrayList<>();
    unitOptions.add(PIXELS_UNIT);
    String unit = server.getPixelCalibration().getPixelWidthUnit();
    if (unit.equals(server.getPixelCalibration().getPixelHeightUnit()) && !unit.equals(PixelCalibration.PIXEL))
        unitOptions.add(unit);
    if (!unitOptions.contains(resolutionUnit))
        resolutionUnit = PIXELS_UNIT;
    ParameterList params = new ParameterList().addDoubleParameter("resolution", "Resolution", resolution, null, "Resolution at which the image will be exported, defined as the 'pixel size' in Resolution units").addChoiceParameter("resolutionUnit", "Resolution unit", resolutionUnit, unitOptions, "Units defining the export resolution; if 'pixels' then the resolution is the same as a downsample value").addBooleanParameter("includeROI", "Include ROI", includeROI, "Include the primary object defining the exported region as an active ROI in ImageJ").addBooleanParameter("includeOverlay", "Include overlay", includeOverlay, "Include any objects overlapping the exported region as ROIs on an ImageJ overlay").addBooleanParameter("doTransforms", "Apply color transforms", doTransforms, "Optionally apply any color transforms when sending the pixels to ImageJ").addBooleanParameter("doZ", "All z-slices", doZ, "Optionally include all slices of a z-stack").addBooleanParameter("doT", "All timepoints", doT, "Optionally include all timepoints of a time series");
    // params.setHiddenParameters(unitOptions.size() <= 1, "resolutionUnit");
    params.setHiddenParameters(server.nZSlices() == 1, "doZ");
    params.setHiddenParameters(server.nTimepoints() == 1, "doT");
    if (!Dialogs.showParameterDialog("Send region to ImageJ", params))
        return;
    // Parse values
    resolution = params.getDoubleParameterValue("resolution");
    resolutionUnit = (String) params.getChoiceParameterValue("resolutionUnit");
    includeROI = params.getBooleanParameterValue("includeROI");
    includeOverlay = params.getBooleanParameterValue("includeOverlay");
    doTransforms = params.getBooleanParameterValue("doTransforms");
    doZ = params.getBooleanParameterValue("doZ");
    doT = params.getBooleanParameterValue("doT");
    // Calculate downsample
    double downsample = resolution;
    if (!resolutionUnit.equals(PIXELS_UNIT))
        downsample = resolution / (server.getPixelCalibration().getPixelHeight().doubleValue() / 2.0 + server.getPixelCalibration().getPixelWidth().doubleValue() / 2.0);
    // Color transforms are (currently) only applied for brightfield images - for fluorescence we always provide everything as unchanged as possible
    List<ChannelDisplayInfo> selectedChannels = new ArrayList<>(viewer.getImageDisplay().selectedChannels());
    List<ChannelDisplayInfo> channels = doTransforms && !selectedChannels.isEmpty() ? selectedChannels : null;
    if (channels != null)
        server = ChannelDisplayTransformServer.createColorTransformServer(server, channels);
    // Loop through all selected objects
    Collection<PathObject> pathObjects = viewer.getHierarchy().getSelectionModel().getSelectedObjects();
    if (pathObjects.isEmpty())
        pathObjects = Collections.singletonList(viewer.getHierarchy().getRootObject());
    List<ImagePlus> imps = new ArrayList<>();
    for (PathObject pathObject : pathObjects) {
        if (Thread.currentThread().isInterrupted() || IJ.escapePressed())
            return;
        int width, height;
        if (pathObject == null || !pathObject.hasROI()) {
            width = server.getWidth();
            height = server.getHeight();
        } else {
            Rectangle bounds = AwtTools.getBounds(pathObject.getROI());
            width = bounds.width;
            height = bounds.height;
        }
        RegionRequest region;
        ROI roi = pathObject == null ? null : pathObject.getROI();
        if (roi == null || PathObjectTools.hasPointROI(pathObject)) {
            region = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), viewer.getZPosition(), viewer.getTPosition());
        } else
            region = RegionRequest.createInstance(server.getPath(), downsample, roi);
        // region = RegionRequest.createInstance(server.getPath(), downsample, pathObject.getROI(), viewer.getZPosition(), viewer.getTPosition());
        // Minimum size has been removed (v0.2.0-m4); returned regions should be at least 1x1 pixels
        // if (region.getWidth() / downsample < 8 || region.getHeight() / downsample < 8) {
        // DisplayHelpers.showErrorMessage("Send region to ImageJ", "The width & height of the extracted image must both be >= 8 pixels");
        // continue;
        // }
        // Calculate required z-slices and time-points
        int zStart = doZ ? 0 : region.getZ();
        int zEnd = doZ ? server.nZSlices() : region.getZ() + 1;
        int tStart = doT ? 0 : region.getT();
        int tEnd = doT ? server.nTimepoints() : region.getT() + 1;
        long nZ = zEnd - zStart;
        long nT = tEnd - tStart;
        int bytesPerPixel = server.isRGB() ? 4 : server.getPixelType().getBytesPerPixel() * server.nChannels();
        double memory = ((long) width * height * nZ * nT * bytesPerPixel) / (downsample * downsample);
        // TODO: Perform calculation based on actual amount of available memory
        long availableMemory = GeneralTools.estimateAvailableMemory();
        if (memory >= availableMemory * 0.95) {
            logger.error("Cannot extract region {} - estimated size is too large (approx. {} MB)", pathObject, GeneralTools.formatNumber(memory / (1024.0 * 1024.0), 2));
            Dialogs.showErrorMessage("Send region to ImageJ error", "Selected region is too large to extract - please selected a smaller region or use a higher downsample factor");
            continue;
        }
        if (memory / 1024 / 1024 > 100) {
            if (pathObjects.size() == 1 && !Dialogs.showYesNoDialog("Send region to ImageJ", String.format("Attempting to extract this region is likely to require > %.2f MB - are you sure you want to continue?", memory / 1024 / 1024)))
                return;
        }
        // We should switch to the event dispatch thread when interacting with ImageJ
        try {
            ImagePlus imp;
            PathObjectHierarchy hierarchy = viewer.getHierarchy();
            OverlayOptions options = viewer.getOverlayOptions();
            if (zEnd - zStart > 1 || tEnd - tStart > 1) {
                // TODO: Handle overlays
                imp = IJTools.extractHyperstack(server, region, zStart, zEnd, tStart, tEnd);
                if (includeROI && roi != null) {
                    Roi roiIJ = IJTools.convertToIJRoi(roi, imp.getCalibration(), region.getDownsample());
                    imp.setRoi(roiIJ);
                }
                if (includeOverlay) {
                    Overlay overlay = new Overlay();
                    for (int t = tStart; t < tEnd; t++) {
                        for (int z = zStart; z < zEnd; z++) {
                            RegionRequest request2 = RegionRequest.createInstance(region.getPath(), region.getDownsample(), region.getX(), region.getY(), region.getWidth(), region.getHeight(), z, t);
                            var regionPredicate = PathObjectTools.createImageRegionPredicate(request2);
                            Overlay temp = IJExtension.extractOverlay(hierarchy, request2, options, p -> p != pathObject && regionPredicate.test(p));
                            if (overlay == null)
                                overlay = temp;
                            for (int i = 0; i < temp.size(); i++) {
                                Roi roiIJ = temp.get(i);
                                roiIJ.setPosition(-1, z + 1, t + 1);
                                overlay.add(roiIJ);
                            }
                        }
                    }
                    if (overlay != null && overlay.size() > 0)
                        imp.setOverlay(overlay);
                }
            } else if (includeOverlay)
                imp = IJExtension.extractROIWithOverlay(server, pathObject, hierarchy, region, includeROI, options).getImage();
            else
                imp = IJExtension.extractROIWithOverlay(server, pathObject, null, region, includeROI, options).getImage();
            // Set display ranges if we can
            if (viewer != null && imp instanceof CompositeImage) {
                var availableChannels = viewer.getImageDisplay().availableChannels().stream().filter(c -> c instanceof SingleChannelDisplayInfo).map(c -> (SingleChannelDisplayInfo) c).collect(Collectors.toList());
                CompositeImage impComp = (CompositeImage) imp;
                if (availableChannels.size() == imp.getNChannels()) {
                    for (int c = 0; c < availableChannels.size(); c++) {
                        var channel = availableChannels.get(c);
                        imp.setPosition(c + 1, 1, 1);
                        impComp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
                    }
                    imp.setPosition(1);
                }
            } else if (selectedChannels.size() == 1 && imp.getType() != ImagePlus.COLOR_RGB) {
                // Setting the display range for non-RGB images can give unexpected results (changing pixel values)
                var channel = selectedChannels.get(0);
                imp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
            }
            imps.add(imp);
        } catch (IOException e) {
            Dialogs.showErrorMessage("Send region to ImageJ", e);
            return;
        }
    }
    // Show all the images we've got
    if (!imps.isEmpty()) {
        SwingUtilities.invokeLater(() -> {
            boolean batchMode = Interpreter.batchMode;
            // Try to start an ImageJ instance, and return if this fails
            try {
                ImageJ ij = IJExtension.getImageJInstance();
                if (ij == null)
                    return;
                ij.setVisible(true);
                // Make sure we aren't in batch mode, so that image will display
                Interpreter.batchMode = false;
                for (ImagePlus imp : imps) {
                    imp.show();
                }
            } finally {
                Interpreter.batchMode = batchMode;
            }
        });
    }
}
Also used : CompositeImage(ij.CompositeImage) Rectangle(java.awt.Rectangle) ImageServer(qupath.lib.images.servers.ImageServer) IJTools(qupath.imagej.tools.IJTools) LoggerFactory(org.slf4j.LoggerFactory) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) ArrayList(java.util.ArrayList) Dialogs(qupath.lib.gui.dialogs.Dialogs) SwingUtilities(javax.swing.SwingUtilities) ParameterList(qupath.lib.plugins.parameters.ParameterList) Overlay(ij.gui.Overlay) QuPathGUI(qupath.lib.gui.QuPathGUI) Logger(org.slf4j.Logger) Interpreter(ij.macro.Interpreter) BufferedImage(java.awt.image.BufferedImage) GeneralTools(qupath.lib.common.GeneralTools) RegionRequest(qupath.lib.regions.RegionRequest) Collection(java.util.Collection) ChannelDisplayTransformServer(qupath.lib.gui.images.servers.ChannelDisplayTransformServer) AwtTools(qupath.lib.awt.common.AwtTools) ChannelDisplayInfo(qupath.lib.display.ChannelDisplayInfo) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) PathObjectTools(qupath.lib.objects.PathObjectTools) PathObject(qupath.lib.objects.PathObject) ImageJ(ij.ImageJ) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) ROI(qupath.lib.roi.interfaces.ROI) ImagePlus(ij.ImagePlus) List(java.util.List) IJ(ij.IJ) PixelCalibration(qupath.lib.images.servers.PixelCalibration) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) Collections(java.util.Collections) Roi(ij.gui.Roi) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) BufferedImage(java.awt.image.BufferedImage) ImageJ(ij.ImageJ) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) CompositeImage(ij.CompositeImage) Overlay(ij.gui.Overlay) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) IOException(java.io.IOException) ChannelDisplayInfo(qupath.lib.display.ChannelDisplayInfo) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) ImagePlus(ij.ImagePlus) ROI(qupath.lib.roi.interfaces.ROI) Roi(ij.gui.Roi) PathObject(qupath.lib.objects.PathObject) ParameterList(qupath.lib.plugins.parameters.ParameterList) RegionRequest(qupath.lib.regions.RegionRequest)

Example 7 with ImageServer

use of qupath.lib.images.servers.ImageServer in project qupath by qupath.

the class PixelClassifierUI method promptToCreateObjects.

/**
 * Prompt the user to create objects directly from the pixels of an {@link ImageServer}.
 * Often, the {@link ImageServer} has been created by applying a {@link PixelClassifier}.
 *
 * @param imageData the {@link ImageData} to which objects should be added
 * @param classifier the {@link ImageServer} used to generate objects
 * @param classifierName the name of the classifier; if not null and the command runs to completion, it will be logged in the history
 * 						 workflow of the {@link ImageData} for later scripting.
 * @return true if changes were made, false otherwise
 */
public static boolean promptToCreateObjects(ImageData<BufferedImage> imageData, PixelClassifier classifier, String classifierName) {
    Objects.requireNonNull(imageData);
    Objects.requireNonNull(classifier);
    // Check what is selected
    List<SelectionChoice> choices = buildChoiceList(imageData.getHierarchy(), SelectionChoice.FULL_IMAGE, SelectionChoice.CURRENT_SELECTION, SelectionChoice.ANNOTATIONS, SelectionChoice.TMA);
    SelectionChoice defaultChoice;
    if (choices.contains(SelectionChoice.CURRENT_SELECTION))
        defaultChoice = SelectionChoice.CURRENT_SELECTION;
    else if (choices.contains(SelectionChoice.ANNOTATIONS))
        defaultChoice = SelectionChoice.ANNOTATIONS;
    else
        defaultChoice = choices.get(0);
    var parentChoice = Dialogs.showChoiceDialog("Pixel classifier", "Choose parent objects", choices, defaultChoice);
    if (parentChoice == null)
        return false;
    var outputObjectTypes = Arrays.asList("Annotation", "Detection");
    // To avoid confusing the user unnecessarily, if we *only* have ignored classes then set default for includeIgnored to true
    var labels = classifier.getMetadata().getClassificationLabels();
    boolean allIgnored = !labels.isEmpty() && labels.values().stream().allMatch(p -> p == null || PathClassTools.isIgnoredClass(p));
    boolean includeIgnored = allIgnored;
    var cal = imageData.getServer().getPixelCalibration();
    var units = cal.unitsMatch2D() ? cal.getPixelWidthUnit() + "^2" : cal.getPixelWidthUnit() + "x" + cal.getPixelHeightUnit();
    ParameterList params;
    if (lastCreateObjectParams != null) {
        params = lastCreateObjectParams.duplicate();
        params.setHiddenParameters(false, params.getKeyValueParameters(true).keySet().toArray(String[]::new));
        ((BooleanParameter) params.getParameters().get("includeIgnored")).setValue(includeIgnored);
    } else {
        params = new ParameterList().addChoiceParameter("objectType", "New object type", "Annotation", outputObjectTypes, "Define the type of objects that will be created").addDoubleParameter("minSize", "Minimum object size", 0, units, "Minimum size of a region to keep (smaller regions will be dropped)").addDoubleParameter("minHoleSize", "Minimum hole size", 0, units, "Minimum size of a hole to keep (smaller holes will be filled)").addBooleanParameter("doSplit", "Split objects", false, "Split multi-part regions into separate objects").addBooleanParameter("clearExisting", "Delete existing objects", false, "Delete any existing objects within the selected object before adding new objects (or entire image if no object is selected)").addBooleanParameter("includeIgnored", "Create objects for ignored classes", includeIgnored, "Create objects for classifications that are usually ignored (e.g. \"Ignore*\", \"Region*\")").addBooleanParameter("selectNew", "Set new objects to selected", false, "Set the newly-created objects to be selected");
    }
    if (!Dialogs.showParameterDialog("Create objects", params))
        return false;
    boolean createDetections = params.getChoiceParameterValue("objectType").equals("Detection");
    boolean doSplit = params.getBooleanParameterValue("doSplit");
    includeIgnored = params.getBooleanParameterValue("includeIgnored");
    double minSize = params.getDoubleParameterValue("minSize");
    double minHoleSize = params.getDoubleParameterValue("minHoleSize");
    boolean clearExisting = params.getBooleanParameterValue("clearExisting");
    boolean selectNew = params.getBooleanParameterValue("selectNew");
    lastCreateObjectParams = params;
    parentChoice.handleSelection(imageData);
    List<CreateObjectOptions> options = new ArrayList<>();
    if (doSplit)
        options.add(CreateObjectOptions.SPLIT);
    if (clearExisting)
        options.add(CreateObjectOptions.DELETE_EXISTING);
    if (includeIgnored)
        options.add(CreateObjectOptions.INCLUDE_IGNORED);
    else if (allIgnored) {
        Dialogs.showErrorMessage(title, "Cannot create objects - all class names have an asterisk to show they should be 'ignored'!");
        return false;
    }
    if (selectNew)
        options.add(CreateObjectOptions.SELECT_NEW);
    var optionsArray = options.toArray(CreateObjectOptions[]::new);
    String optionsString = "";
    if (!options.isEmpty())
        optionsString = ", " + options.stream().map(o -> "\"" + o.name() + "\"").collect(Collectors.joining(", "));
    try {
        if (createDetections) {
            if (PixelClassifierTools.createDetectionsFromPixelClassifier(imageData, classifier, minSize, minHoleSize, optionsArray)) {
                if (classifierName != null) {
                    imageData.getHistoryWorkflow().addStep(new DefaultScriptableWorkflowStep("Pixel classifier create detections", String.format("createDetectionsFromPixelClassifier(\"%s\", %s, %s)", classifierName, minSize, minHoleSize + optionsString)));
                }
                return true;
            }
        } else {
            if (PixelClassifierTools.createAnnotationsFromPixelClassifier(imageData, classifier, minSize, minHoleSize, optionsArray)) {
                if (classifierName != null) {
                    imageData.getHistoryWorkflow().addStep(new DefaultScriptableWorkflowStep("Pixel classifier create annotations", String.format("createAnnotationsFromPixelClassifier(\"%s\", %s, %s)", classifierName, minSize, minHoleSize + optionsString)));
                }
                return true;
            }
        }
    } catch (IOException e) {
        Dialogs.showErrorMessage(title, e);
    }
    return false;
}
Also used : Button(javafx.scene.control.Button) CreateObjectOptions(qupath.opencv.ml.pixel.PixelClassifierTools.CreateObjectOptions) Arrays(java.util.Arrays) ImageServer(qupath.lib.images.servers.ImageServer) PathTileObject(qupath.lib.objects.PathTileObject) BooleanBinding(javafx.beans.binding.BooleanBinding) CheckMenuItem(javafx.scene.control.CheckMenuItem) LoggerFactory(org.slf4j.LoggerFactory) Side(javafx.geometry.Side) ImageWriter(qupath.lib.images.writers.ImageWriter) ParameterList(qupath.lib.plugins.parameters.ParameterList) ComboBox(javafx.scene.control.ComboBox) ContextMenu(javafx.scene.control.ContextMenu) Map(java.util.Map) PixelClassifierTools(qupath.opencv.ml.pixel.PixelClassifierTools) Pane(javafx.scene.layout.Pane) MenuItem(javafx.scene.control.MenuItem) BufferedImage(java.awt.image.BufferedImage) Collectors(java.util.stream.Collectors) PathAnnotationObject(qupath.lib.objects.PathAnnotationObject) PathDetectionObject(qupath.lib.objects.PathDetectionObject) PathObject(qupath.lib.objects.PathObject) Objects(java.util.Objects) List(java.util.List) BooleanProperty(javafx.beans.property.BooleanProperty) Project(qupath.lib.projects.Project) DefaultScriptableWorkflowStep(qupath.lib.plugins.workflow.DefaultScriptableWorkflowStep) GuiTools(qupath.lib.gui.tools.GuiTools) RegionFilter(qupath.lib.gui.viewer.RegionFilter) BorderPane(javafx.scene.layout.BorderPane) StringProperty(javafx.beans.property.StringProperty) PathCellObject(qupath.lib.objects.PathCellObject) SaveResourcePaneBuilder(qupath.process.gui.commands.ui.SaveResourcePaneBuilder) ObjectExpression(javafx.beans.binding.ObjectExpression) PathClassTools(qupath.lib.objects.classes.PathClassTools) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) Bindings(javafx.beans.binding.Bindings) ArrayList(java.util.ArrayList) StandardRegionFilters(qupath.lib.gui.viewer.RegionFilter.StandardRegionFilters) LinkedHashMap(java.util.LinkedHashMap) Dialogs(qupath.lib.gui.dialogs.Dialogs) Tooltip(javafx.scene.control.Tooltip) ImageData(qupath.lib.images.ImageData) Logger(org.slf4j.Logger) StringExpression(javafx.beans.binding.StringExpression) ImageWriterTools(qupath.lib.images.writers.ImageWriterTools) Commands(qupath.lib.gui.commands.Commands) IOException(java.io.IOException) TMACoreObject(qupath.lib.objects.TMACoreObject) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) SimpleBooleanProperty(javafx.beans.property.SimpleBooleanProperty) PixelClassifier(qupath.lib.classifiers.pixel.PixelClassifier) BooleanParameter(qupath.lib.plugins.parameters.BooleanParameter) Collections(java.util.Collections) PaneTools(qupath.lib.gui.tools.PaneTools) ArrayList(java.util.ArrayList) IOException(java.io.IOException) BooleanParameter(qupath.lib.plugins.parameters.BooleanParameter) DefaultScriptableWorkflowStep(qupath.lib.plugins.workflow.DefaultScriptableWorkflowStep) ParameterList(qupath.lib.plugins.parameters.ParameterList) CreateObjectOptions(qupath.opencv.ml.pixel.PixelClassifierTools.CreateObjectOptions)

Example 8 with ImageServer

use of qupath.lib.images.servers.ImageServer in project qupath by qupath.

the class ObservableMeasurementTableData method updateMeasurementList.

/**
 * Update the entire measurement list for the current objects.
 * @see #setImageData(ImageData, Collection)
 */
public synchronized void updateMeasurementList() {
    // PathPrefs.setAllredMinPercentagePositive(0);
    builderMap.clear();
    // Add the image name
    if (!PathPrefs.maskImageNamesProperty().get())
        builderMap.put("Image", new ImageNameMeasurementBuilder(imageData));
    // Check if we have any annotations / TMA cores
    boolean containsDetections = false;
    boolean containsAnnotations = false;
    // boolean containsParentAnnotations = false;
    boolean containsTMACores = false;
    boolean containsRoot = false;
    List<PathObject> pathObjectListCopy = new ArrayList<>(list);
    for (PathObject temp : pathObjectListCopy) {
        if (temp instanceof PathAnnotationObject) {
            // if (temp.hasChildren())
            // containsParentAnnotations = true;
            containsAnnotations = true;
        } else if (temp instanceof TMACoreObject) {
            containsTMACores = true;
        } else if (temp instanceof PathDetectionObject) {
            containsDetections = true;
        } else if (temp.isRootObject())
            containsRoot = true;
    }
    boolean detectionsAnywhere = imageData == null ? containsDetections : !imageData.getHierarchy().getDetectionObjects().isEmpty();
    // Include the object displayed name
    // if (containsDetections || containsAnnotations || containsTMACores)
    builderMap.put("Name", new ObjectNameMeasurementBuilder());
    // Include the class
    if (containsAnnotations || containsDetections) {
        builderMap.put("Class", new PathClassMeasurementBuilder());
        // Get the name of the containing TMA core if we have anything other than cores
        if (imageData != null && imageData.getHierarchy().getTMAGrid() != null) {
            builderMap.put("TMA core", new TMACoreNameMeasurementBuilder());
        }
        // Get the name of the first parent object
        builderMap.put("Parent", new ParentNameMeasurementBuilder());
    }
    // Include the TMA missing status, if appropriate
    if (containsTMACores) {
        builderMap.put("Missing", new MissingTMACoreMeasurementBuilder());
    }
    if (containsAnnotations || containsDetections) {
        builderMap.put("ROI", new ROINameMeasurementBuilder());
    }
    // Add centroids
    if (containsAnnotations || containsDetections || containsTMACores) {
        // ROICentroidMeasurementBuilder builder = new ROICentroidMeasurementBuilder(imageData, CentroidType.X);
        // builderMap.put("Centroid X", builder);
        // builder = new ROICentroidMeasurementBuilder(imageData, CentroidType.Y);
        // builderMap.put("Centroid Y", builder);
        ROICentroidMeasurementBuilder builder = new ROICentroidMeasurementBuilder(imageData, CentroidType.X);
        builderMap.put(builder.getName(), builder);
        builder = new ROICentroidMeasurementBuilder(imageData, CentroidType.Y);
        builderMap.put(builder.getName(), builder);
    }
    // If we have metadata, store it
    Set<String> metadataNames = new LinkedHashSet<>();
    metadataNames.addAll(builderMap.keySet());
    for (PathObject pathObject : pathObjectListCopy) {
        if (pathObject instanceof MetadataStore) {
            metadataNames.addAll(((MetadataStore) pathObject).getMetadataKeys());
        }
    }
    // Ensure we have suitable builders
    for (String name : metadataNames) {
        if (!builderMap.containsKey(name))
            builderMap.put(name, new StringMetadataMeasurementBuilder(name));
    }
    // Get all the 'built-in' feature measurements, stored in the measurement list
    Collection<String> features = PathClassifierTools.getAvailableFeatures(pathObjectListCopy);
    // Add derived measurements if we don't have only detections
    if (containsAnnotations || containsTMACores || containsRoot) {
        if (detectionsAnywhere) {
            var builder = new ObjectTypeCountMeasurementBuilder(PathDetectionObject.class);
            builderMap.put(builder.getName(), builder);
            features.add(builder.getName());
        }
        // Here, we allow TMA cores to act like annotations
        manager = new DerivedMeasurementManager(getImageData(), containsAnnotations || containsTMACores);
        for (MeasurementBuilder<?> builder2 : manager.getMeasurementBuilders()) {
            builderMap.put(builder2.getName(), builder2);
            features.add(builder2.getName());
        }
    }
    // If we have an annotation, add shape features
    if (containsAnnotations) {
        boolean anyPoints = false;
        boolean anyAreas = false;
        boolean anyLines = false;
        @SuppressWarnings("unused") boolean anyPolygons = false;
        for (PathObject pathObject : pathObjectListCopy) {
            if (!pathObject.isAnnotation())
                continue;
            ROI roi = pathObject.getROI();
            if (roi == null)
                continue;
            if (roi.isPoint())
                anyPoints = true;
            if (roi.isArea())
                anyAreas = true;
            if (roi.isLine())
                anyLines = true;
            if (pathObject.getROI() instanceof PolygonROI)
                anyPolygons = true;
        }
        // Add point count, if needed
        if (anyPoints) {
            MeasurementBuilder<?> builder = new NumPointsMeasurementBuilder();
            builderMap.put(builder.getName(), builder);
            features.add(builder.getName());
        }
        // Add spatial measurements, if needed
        if (anyAreas) {
            MeasurementBuilder<?> builder = new AreaMeasurementBuilder(imageData);
            builderMap.put(builder.getName(), builder);
            features.add(builder.getName());
            builder = new PerimeterMeasurementBuilder(imageData);
            builderMap.put(builder.getName(), builder);
            features.add(builder.getName());
        }
        if (anyLines) {
            MeasurementBuilder<?> builder = new LineLengthMeasurementBuilder(imageData);
            builderMap.put(builder.getName(), builder);
            features.add(builder.getName());
        }
    // if (anyPolygons) {
    // MeasurementBuilder<?> builder = new MaxDiameterMeasurementBuilder(imageData);
    // builderMap.put(builder.getName(), builder);
    // features.add(builder.getName());
    // 
    // builder = new MinDiameterMeasurementBuilder(imageData);
    // builderMap.put(builder.getName(), builder);
    // features.add(builder.getName());
    // }
    }
    if (containsAnnotations || containsTMACores || containsRoot) {
        var pixelClassifier = getPixelLayer(imageData);
        if (pixelClassifier instanceof ImageServer<?>) {
            ImageServer<BufferedImage> server = (ImageServer<BufferedImage>) pixelClassifier;
            if (server.getMetadata().getChannelType() == ImageServerMetadata.ChannelType.CLASSIFICATION || server.getMetadata().getChannelType() == ImageServerMetadata.ChannelType.PROBABILITY) {
                var pixelManager = new PixelClassificationMeasurementManager(server);
                for (String name : pixelManager.getMeasurementNames()) {
                    // String nameLive = name + " (live)";
                    String nameLive = "(Live) " + name;
                    builderMap.put(nameLive, new PixelClassifierMeasurementBuilder(pixelManager, name));
                    features.add(nameLive);
                }
            }
        }
    }
    // Update all the lists, if necessary
    boolean changes = false;
    if (metadataNames.size() != metadataList.size() || !metadataNames.containsAll(metadataList)) {
        changes = metadataList.setAll(metadataNames);
    }
    if (features.size() != measurementList.size() || !features.containsAll(measurementList))
        changes = measurementList.setAll(features);
    if (changes) {
        if (metadataList.isEmpty())
            fullList.setAll(measurementList);
        else {
            fullList.setAll(metadataList);
            fullList.addAll(measurementList);
        }
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) ArrayList(java.util.ArrayList) BufferedImage(java.awt.image.BufferedImage) PolygonROI(qupath.lib.roi.PolygonROI) PathAnnotationObject(qupath.lib.objects.PathAnnotationObject) ImageServer(qupath.lib.images.servers.ImageServer) PathDetectionObject(qupath.lib.objects.PathDetectionObject) TMACoreObject(qupath.lib.objects.TMACoreObject) PolygonROI(qupath.lib.roi.PolygonROI) ROI(qupath.lib.roi.interfaces.ROI) MetadataStore(qupath.lib.objects.MetadataStore) PathObject(qupath.lib.objects.PathObject) PixelClassificationMeasurementManager(qupath.opencv.ml.pixel.PixelClassificationMeasurementManager)

Example 9 with ImageServer

use of qupath.lib.images.servers.ImageServer in project qupath by qupath.

the class ScriptCommand method run.

@Override
public void run() {
    try {
        if (projectPath != null && !projectPath.toLowerCase().endsWith(ProjectIO.getProjectExtension()))
            throw new IOException("Project file must end with '.qpproj'");
        if (scriptCommand == null) {
            if (scriptFile == null || scriptFile.equals("") || !scriptFile.endsWith(".groovy"))
                throw new IOException("File must be a valid script file (.groovy): " + scriptFile);
        } else if (scriptFile != null) {
            throw new IllegalArgumentException("Either a script file or a script command may be provided, but not both!");
        }
        // Ensure we have a tile cache set
        createTileCache();
        // Set classloader to include any available extensions
        var extensionClassLoader = new ExtensionClassLoader();
        extensionClassLoader.refresh();
        ImageServerProvider.setServiceLoader(ServiceLoader.load(ImageServerBuilder.class, extensionClassLoader));
        Thread.currentThread().setContextClassLoader(extensionClassLoader);
        // Unfortunately necessary to force initialization (including GsonTools registration of some classes)
        QP.getCoreClasses();
        ImageData<BufferedImage> imageData;
        if (projectPath != null && !projectPath.equals("")) {
            String path = QuPath.getEncodedPath(projectPath);
            Project<BufferedImage> project = ProjectIO.loadProject(new File(path), BufferedImage.class);
            for (var entry : project.getImageList()) {
                if (imagePath != null && !imagePath.equals("") && !imagePath.equals(entry.getImageName()))
                    continue;
                logger.info("Running script for {}", entry.getImageName());
                imageData = entry.readImageData();
                try {
                    Object result = runScript(project, imageData);
                    if (result != null)
                        logger.info("Script result: {}", result);
                    if (save)
                        entry.saveImageData(imageData);
                } catch (Exception e) {
                    logger.error("Error running script for image: " + entry.getImageName(), e);
                    // Otherwise, try to recover and continue processing images
                    if (imagePath != null && imagePath.equals(entry.getImageName()))
                        throw new RuntimeException(e);
                } finally {
                    imageData.getServer().close();
                }
            }
        } else if (imagePath != null && !imagePath.equals("")) {
            String path = QuPath.getEncodedPath(imagePath);
            URI uri = GeneralTools.toURI(path);
            ImageServer<BufferedImage> server = ImageServers.buildServer(uri, parseArgs(serverArgs));
            imageData = new ImageData<>(server);
            Object result = runScript(null, imageData);
            if (result != null)
                logger.info("Script result: {}", result);
            server.close();
        } else {
            Object result = runScript(null, null);
            if (result != null)
                logger.info("Script result: {}", result);
        }
    } catch (Exception e) {
        logger.error(e.getLocalizedMessage(), e);
        throw new RuntimeException(e);
    }
}
Also used : ImageServer(qupath.lib.images.servers.ImageServer) ImageServerBuilder(qupath.lib.images.servers.ImageServerBuilder) ExtensionClassLoader(qupath.lib.gui.ExtensionClassLoader) IOException(java.io.IOException) URI(java.net.URI) BufferedImage(java.awt.image.BufferedImage) ScriptException(javax.script.ScriptException) IOException(java.io.IOException) ImageData(qupath.lib.images.ImageData) File(java.io.File)

Example 10 with ImageServer

use of qupath.lib.images.servers.ImageServer in project qupath by qupath.

the class TestPixelClassifierTools method checkCreateObjects.

private void checkCreateObjects(ImageServer<BufferedImage> server, int[] hist, Map<PathClass, Integer> classificationLabelsReverse) throws IOException {
    var hierarchy = new PathObjectHierarchy();
    boolean success = PixelClassifierTools.createObjectsFromPredictions(server, hierarchy, Collections.singleton(hierarchy.getRootObject()), r -> PathObjects.createAnnotationObject(r), 0, 0);
    assertTrue(success);
    // Recall that we have an object for zero as well
    var annotations = new ArrayList<>(hierarchy.getAnnotationObjects());
    assertEquals(hist.length, annotations.size());
    // Check areas for all our annotations
    Collections.sort(annotations, Comparator.comparingInt(a -> classificationLabelsReverse.get(a.getPathClass())));
    for (var annotation : annotations) {
        int label = classificationLabelsReverse.get(annotation.getPathClass());
        var roi = annotation.getROI();
        double area = roi.getArea();
        if (printAreas)
            logger.debug(hist[label] + ": \t" + area);
        assertEquals(hist[label], area);
        var geom = roi.getGeometry();
        if (alwaysCheckValidity || geom.getNumPoints() < MAX_POINTS_FOR_VALIDITY) {
            var error = new IsValidOp(geom).getValidationError();
            if (error != null)
                logger.warn("{}", error);
            assertNull(error);
        }
        assertEquals(hist[label], geom.getArea());
    }
}
Also used : CreateObjectOptions(qupath.opencv.ml.pixel.PixelClassifierTools.CreateObjectOptions) RoiLabeling(qupath.imagej.processing.RoiLabeling) Arrays(java.util.Arrays) ImageServer(qupath.lib.images.servers.ImageServer) ByteProcessor(ij.process.ByteProcessor) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) PathClassFactory(qupath.lib.objects.classes.PathClassFactory) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) DataBufferByte(java.awt.image.DataBufferByte) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) ImageStatistics(ij.process.ImageStatistics) ChannelType(qupath.lib.images.servers.ImageServerMetadata.ChannelType) Map(java.util.Map) ImageIO(javax.imageio.ImageIO) ImageServers(qupath.lib.images.servers.ImageServers) Path(java.nio.file.Path) MethodSource(org.junit.jupiter.params.provider.MethodSource) IsValidOp(org.locationtech.jts.operation.valid.IsValidOp) Logger(org.slf4j.Logger) BufferedImage(java.awt.image.BufferedImage) PathObjects(qupath.lib.objects.PathObjects) PathClass(qupath.lib.objects.classes.PathClass) IOException(java.io.IOException) UUID(java.util.UUID) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Assertions(org.junit.jupiter.api.Assertions) WrappedBufferedImageServer(qupath.lib.images.servers.WrappedBufferedImageServer) Comparator(java.util.Comparator) Collections(java.util.Collections) ImageServerMetadata(qupath.lib.images.servers.ImageServerMetadata) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) ArrayList(java.util.ArrayList) IsValidOp(org.locationtech.jts.operation.valid.IsValidOp)

Aggregations

BufferedImage (java.awt.image.BufferedImage)17 ImageServer (qupath.lib.images.servers.ImageServer)17 IOException (java.io.IOException)15 Logger (org.slf4j.Logger)14 LoggerFactory (org.slf4j.LoggerFactory)14 ArrayList (java.util.ArrayList)13 PathObject (qupath.lib.objects.PathObject)13 Collectors (java.util.stream.Collectors)12 Collections (java.util.Collections)11 ROI (qupath.lib.roi.interfaces.ROI)11 Collection (java.util.Collection)10 List (java.util.List)10 Map (java.util.Map)10 ImageData (qupath.lib.images.ImageData)10 PathObjectTools (qupath.lib.objects.PathObjectTools)10 RegionRequest (qupath.lib.regions.RegionRequest)10 Arrays (java.util.Arrays)9 LinkedHashMap (java.util.LinkedHashMap)9 PathObjectHierarchy (qupath.lib.objects.hierarchy.PathObjectHierarchy)9 HashSet (java.util.HashSet)8