Search in sources :

Example 16 with ParameterList

use of qupath.lib.plugins.parameters.ParameterList in project qupath by qupath.

the class SubcellularDetection method getDefaultParameterList.

@Override
public ParameterList getDefaultParameterList(final ImageData<BufferedImage> imageData) {
    ParameterList params = new ParameterList().addTitleParameter("Detection parameters");
    for (String name : new ImageWrapper(imageData).getChannelNames(true, true)) {
        params.addDoubleParameter("detection[" + name + "]", "Detection threshold (" + name + ")", -1.0, "", "Intensity threshold for detection - if < 0, no detection will be applied to this channel");
    }
    params.addBooleanParameter("doSmoothing", "Smooth before detection", false, "Apply 3x3 smoothing filter to reduce noise prior to detection");
    params.addBooleanParameter("splitByIntensity", "Split by intensity", false, "Attempt to split merged spots based on intensity peaks");
    params.addBooleanParameter("splitByShape", "Split by shape", false, "Attempt to split merged spots according to shape (i.e. looking for rounder spots)");
    params.addTitleParameter("Spot & cluster parameters");
    boolean hasMicrons = imageData.getServer().getPixelCalibration().hasPixelSizeMicrons();
    if (!hasMicrons) {
        params.addEmptyParameter("Subcellular detection works best if the pixel size information is available in " + GeneralTools.micrometerSymbol() + "!");
        params.addEmptyParameter("Because this information is missing, the following values are in pixels.");
        params.addEmptyParameter("If you change the pixel sizes in the image, restart this command to see the changes.");
    }
    params.addDoubleParameter("spotSizeMicrons", "Expected spot size", 1, GeneralTools.micrometerSymbol() + "^2", "Estimated area of a single spot - used to estimate total spot counts");
    params.addDoubleParameter("minSpotSizeMicrons", "Min spot size", 0.5, GeneralTools.micrometerSymbol() + "^2", "Minimum spot area - smaller spots will be excluded");
    params.addDoubleParameter("maxSpotSizeMicrons", "Max spot size", 2.0, GeneralTools.micrometerSymbol() + "^2", "Maximum spot area - larger spots will be counted as clusters");
    params.addDoubleParameter("spotSizePixels", "Expected spot size", 1, "px^2", "Estimated area of a single spot - used to estimate total spot counts");
    params.addDoubleParameter("minSpotSizePixels", "Min spot size", 1, "px^2", "Minimum spot area - smaller spots will be excluded");
    params.addDoubleParameter("maxSpotSizePixels", "Max spot size", 4.0, "px^2", "Maximum spot area - larger spots will be counted as clusters");
    params.addBooleanParameter("includeClusters", "Include clusters", true, "Store anything larger than 'Max spot size' as a cluster, instead of ignoring it");
    params.setHiddenParameters(!hasMicrons, "spotSizeMicrons", "minSpotSizeMicrons", "maxSpotSizeMicrons");
    params.setHiddenParameters(hasMicrons, "spotSizePixels", "minSpotSizePixels", "maxSpotSizePixels");
    return params;
}
Also used : ParameterList(qupath.lib.plugins.parameters.ParameterList)

Example 17 with ParameterList

use of qupath.lib.plugins.parameters.ParameterList in project qupath by qupath.

the class SubcellularDetection method addRunnableTasks.

@Override
protected void addRunnableTasks(final ImageData<BufferedImage> imageData, final PathObject parentObject, List<Runnable> tasks) {
    final ParameterList params = getParameterList(imageData);
    tasks.add(new SubcellularDetectionRunnable(imageData, parentObject, params));
}
Also used : ParameterList(qupath.lib.plugins.parameters.ParameterList)

Example 18 with ParameterList

use of qupath.lib.plugins.parameters.ParameterList in project qupath by qupath.

the class ExtractRegionCommand method run.

@Override
public void run() {
    QuPathViewer viewer = qupath.getViewer();
    ImageServer<BufferedImage> server = null;
    if (viewer != null)
        server = viewer.getServer();
    if (server == null)
        return;
    List<String> unitOptions = new ArrayList<>();
    unitOptions.add(PIXELS_UNIT);
    String unit = server.getPixelCalibration().getPixelWidthUnit();
    if (unit.equals(server.getPixelCalibration().getPixelHeightUnit()) && !unit.equals(PixelCalibration.PIXEL))
        unitOptions.add(unit);
    if (!unitOptions.contains(resolutionUnit))
        resolutionUnit = PIXELS_UNIT;
    ParameterList params = new ParameterList().addDoubleParameter("resolution", "Resolution", resolution, null, "Resolution at which the image will be exported, defined as the 'pixel size' in Resolution units").addChoiceParameter("resolutionUnit", "Resolution unit", resolutionUnit, unitOptions, "Units defining the export resolution; if 'pixels' then the resolution is the same as a downsample value").addBooleanParameter("includeROI", "Include ROI", includeROI, "Include the primary object defining the exported region as an active ROI in ImageJ").addBooleanParameter("includeOverlay", "Include overlay", includeOverlay, "Include any objects overlapping the exported region as ROIs on an ImageJ overlay").addBooleanParameter("doTransforms", "Apply color transforms", doTransforms, "Optionally apply any color transforms when sending the pixels to ImageJ").addBooleanParameter("doZ", "All z-slices", doZ, "Optionally include all slices of a z-stack").addBooleanParameter("doT", "All timepoints", doT, "Optionally include all timepoints of a time series");
    // params.setHiddenParameters(unitOptions.size() <= 1, "resolutionUnit");
    params.setHiddenParameters(server.nZSlices() == 1, "doZ");
    params.setHiddenParameters(server.nTimepoints() == 1, "doT");
    if (!Dialogs.showParameterDialog("Send region to ImageJ", params))
        return;
    // Parse values
    resolution = params.getDoubleParameterValue("resolution");
    resolutionUnit = (String) params.getChoiceParameterValue("resolutionUnit");
    includeROI = params.getBooleanParameterValue("includeROI");
    includeOverlay = params.getBooleanParameterValue("includeOverlay");
    doTransforms = params.getBooleanParameterValue("doTransforms");
    doZ = params.getBooleanParameterValue("doZ");
    doT = params.getBooleanParameterValue("doT");
    // Calculate downsample
    double downsample = resolution;
    if (!resolutionUnit.equals(PIXELS_UNIT))
        downsample = resolution / (server.getPixelCalibration().getPixelHeight().doubleValue() / 2.0 + server.getPixelCalibration().getPixelWidth().doubleValue() / 2.0);
    // Color transforms are (currently) only applied for brightfield images - for fluorescence we always provide everything as unchanged as possible
    List<ChannelDisplayInfo> selectedChannels = new ArrayList<>(viewer.getImageDisplay().selectedChannels());
    List<ChannelDisplayInfo> channels = doTransforms && !selectedChannels.isEmpty() ? selectedChannels : null;
    if (channels != null)
        server = ChannelDisplayTransformServer.createColorTransformServer(server, channels);
    // Loop through all selected objects
    Collection<PathObject> pathObjects = viewer.getHierarchy().getSelectionModel().getSelectedObjects();
    if (pathObjects.isEmpty())
        pathObjects = Collections.singletonList(viewer.getHierarchy().getRootObject());
    List<ImagePlus> imps = new ArrayList<>();
    for (PathObject pathObject : pathObjects) {
        if (Thread.currentThread().isInterrupted() || IJ.escapePressed())
            return;
        int width, height;
        if (pathObject == null || !pathObject.hasROI()) {
            width = server.getWidth();
            height = server.getHeight();
        } else {
            Rectangle bounds = AwtTools.getBounds(pathObject.getROI());
            width = bounds.width;
            height = bounds.height;
        }
        RegionRequest region;
        ROI roi = pathObject == null ? null : pathObject.getROI();
        if (roi == null || PathObjectTools.hasPointROI(pathObject)) {
            region = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), viewer.getZPosition(), viewer.getTPosition());
        } else
            region = RegionRequest.createInstance(server.getPath(), downsample, roi);
        // region = RegionRequest.createInstance(server.getPath(), downsample, pathObject.getROI(), viewer.getZPosition(), viewer.getTPosition());
        // Minimum size has been removed (v0.2.0-m4); returned regions should be at least 1x1 pixels
        // if (region.getWidth() / downsample < 8 || region.getHeight() / downsample < 8) {
        // DisplayHelpers.showErrorMessage("Send region to ImageJ", "The width & height of the extracted image must both be >= 8 pixels");
        // continue;
        // }
        // Calculate required z-slices and time-points
        int zStart = doZ ? 0 : region.getZ();
        int zEnd = doZ ? server.nZSlices() : region.getZ() + 1;
        int tStart = doT ? 0 : region.getT();
        int tEnd = doT ? server.nTimepoints() : region.getT() + 1;
        long nZ = zEnd - zStart;
        long nT = tEnd - tStart;
        int bytesPerPixel = server.isRGB() ? 4 : server.getPixelType().getBytesPerPixel() * server.nChannels();
        double memory = ((long) width * height * nZ * nT * bytesPerPixel) / (downsample * downsample);
        // TODO: Perform calculation based on actual amount of available memory
        long availableMemory = GeneralTools.estimateAvailableMemory();
        if (memory >= availableMemory * 0.95) {
            logger.error("Cannot extract region {} - estimated size is too large (approx. {} MB)", pathObject, GeneralTools.formatNumber(memory / (1024.0 * 1024.0), 2));
            Dialogs.showErrorMessage("Send region to ImageJ error", "Selected region is too large to extract - please selected a smaller region or use a higher downsample factor");
            continue;
        }
        if (memory / 1024 / 1024 > 100) {
            if (pathObjects.size() == 1 && !Dialogs.showYesNoDialog("Send region to ImageJ", String.format("Attempting to extract this region is likely to require > %.2f MB - are you sure you want to continue?", memory / 1024 / 1024)))
                return;
        }
        // We should switch to the event dispatch thread when interacting with ImageJ
        try {
            ImagePlus imp;
            PathObjectHierarchy hierarchy = viewer.getHierarchy();
            OverlayOptions options = viewer.getOverlayOptions();
            if (zEnd - zStart > 1 || tEnd - tStart > 1) {
                // TODO: Handle overlays
                imp = IJTools.extractHyperstack(server, region, zStart, zEnd, tStart, tEnd);
                if (includeROI && roi != null) {
                    Roi roiIJ = IJTools.convertToIJRoi(roi, imp.getCalibration(), region.getDownsample());
                    imp.setRoi(roiIJ);
                }
                if (includeOverlay) {
                    Overlay overlay = new Overlay();
                    for (int t = tStart; t < tEnd; t++) {
                        for (int z = zStart; z < zEnd; z++) {
                            RegionRequest request2 = RegionRequest.createInstance(region.getPath(), region.getDownsample(), region.getX(), region.getY(), region.getWidth(), region.getHeight(), z, t);
                            var regionPredicate = PathObjectTools.createImageRegionPredicate(request2);
                            Overlay temp = IJExtension.extractOverlay(hierarchy, request2, options, p -> p != pathObject && regionPredicate.test(p));
                            if (overlay == null)
                                overlay = temp;
                            for (int i = 0; i < temp.size(); i++) {
                                Roi roiIJ = temp.get(i);
                                roiIJ.setPosition(-1, z + 1, t + 1);
                                overlay.add(roiIJ);
                            }
                        }
                    }
                    if (overlay != null && overlay.size() > 0)
                        imp.setOverlay(overlay);
                }
            } else if (includeOverlay)
                imp = IJExtension.extractROIWithOverlay(server, pathObject, hierarchy, region, includeROI, options).getImage();
            else
                imp = IJExtension.extractROIWithOverlay(server, pathObject, null, region, includeROI, options).getImage();
            // Set display ranges if we can
            if (viewer != null && imp instanceof CompositeImage) {
                var availableChannels = viewer.getImageDisplay().availableChannels().stream().filter(c -> c instanceof SingleChannelDisplayInfo).map(c -> (SingleChannelDisplayInfo) c).collect(Collectors.toList());
                CompositeImage impComp = (CompositeImage) imp;
                if (availableChannels.size() == imp.getNChannels()) {
                    for (int c = 0; c < availableChannels.size(); c++) {
                        var channel = availableChannels.get(c);
                        imp.setPosition(c + 1, 1, 1);
                        impComp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
                    }
                    imp.setPosition(1);
                }
            } else if (selectedChannels.size() == 1 && imp.getType() != ImagePlus.COLOR_RGB) {
                // Setting the display range for non-RGB images can give unexpected results (changing pixel values)
                var channel = selectedChannels.get(0);
                imp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
            }
            imps.add(imp);
        } catch (IOException e) {
            Dialogs.showErrorMessage("Send region to ImageJ", e);
            return;
        }
    }
    // Show all the images we've got
    if (!imps.isEmpty()) {
        SwingUtilities.invokeLater(() -> {
            boolean batchMode = Interpreter.batchMode;
            // Try to start an ImageJ instance, and return if this fails
            try {
                ImageJ ij = IJExtension.getImageJInstance();
                if (ij == null)
                    return;
                ij.setVisible(true);
                // Make sure we aren't in batch mode, so that image will display
                Interpreter.batchMode = false;
                for (ImagePlus imp : imps) {
                    imp.show();
                }
            } finally {
                Interpreter.batchMode = batchMode;
            }
        });
    }
}
Also used : CompositeImage(ij.CompositeImage) Rectangle(java.awt.Rectangle) ImageServer(qupath.lib.images.servers.ImageServer) IJTools(qupath.imagej.tools.IJTools) LoggerFactory(org.slf4j.LoggerFactory) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) ArrayList(java.util.ArrayList) Dialogs(qupath.lib.gui.dialogs.Dialogs) SwingUtilities(javax.swing.SwingUtilities) ParameterList(qupath.lib.plugins.parameters.ParameterList) Overlay(ij.gui.Overlay) QuPathGUI(qupath.lib.gui.QuPathGUI) Logger(org.slf4j.Logger) Interpreter(ij.macro.Interpreter) BufferedImage(java.awt.image.BufferedImage) GeneralTools(qupath.lib.common.GeneralTools) RegionRequest(qupath.lib.regions.RegionRequest) Collection(java.util.Collection) ChannelDisplayTransformServer(qupath.lib.gui.images.servers.ChannelDisplayTransformServer) AwtTools(qupath.lib.awt.common.AwtTools) ChannelDisplayInfo(qupath.lib.display.ChannelDisplayInfo) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) PathObjectTools(qupath.lib.objects.PathObjectTools) PathObject(qupath.lib.objects.PathObject) ImageJ(ij.ImageJ) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) ROI(qupath.lib.roi.interfaces.ROI) ImagePlus(ij.ImagePlus) List(java.util.List) IJ(ij.IJ) PixelCalibration(qupath.lib.images.servers.PixelCalibration) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) Collections(java.util.Collections) Roi(ij.gui.Roi) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) BufferedImage(java.awt.image.BufferedImage) ImageJ(ij.ImageJ) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) CompositeImage(ij.CompositeImage) Overlay(ij.gui.Overlay) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) IOException(java.io.IOException) ChannelDisplayInfo(qupath.lib.display.ChannelDisplayInfo) SingleChannelDisplayInfo(qupath.lib.display.SingleChannelDisplayInfo) ImagePlus(ij.ImagePlus) ROI(qupath.lib.roi.interfaces.ROI) Roi(ij.gui.Roi) PathObject(qupath.lib.objects.PathObject) ParameterList(qupath.lib.plugins.parameters.ParameterList) RegionRequest(qupath.lib.regions.RegionRequest)

Example 19 with ParameterList

use of qupath.lib.plugins.parameters.ParameterList in project qupath by qupath.

the class PixelClassifierUI method promptToCreateObjects.

/**
 * Prompt the user to create objects directly from the pixels of an {@link ImageServer}.
 * Often, the {@link ImageServer} has been created by applying a {@link PixelClassifier}.
 *
 * @param imageData the {@link ImageData} to which objects should be added
 * @param classifier the {@link ImageServer} used to generate objects
 * @param classifierName the name of the classifier; if not null and the command runs to completion, it will be logged in the history
 * 						 workflow of the {@link ImageData} for later scripting.
 * @return true if changes were made, false otherwise
 */
public static boolean promptToCreateObjects(ImageData<BufferedImage> imageData, PixelClassifier classifier, String classifierName) {
    Objects.requireNonNull(imageData);
    Objects.requireNonNull(classifier);
    // Check what is selected
    List<SelectionChoice> choices = buildChoiceList(imageData.getHierarchy(), SelectionChoice.FULL_IMAGE, SelectionChoice.CURRENT_SELECTION, SelectionChoice.ANNOTATIONS, SelectionChoice.TMA);
    SelectionChoice defaultChoice;
    if (choices.contains(SelectionChoice.CURRENT_SELECTION))
        defaultChoice = SelectionChoice.CURRENT_SELECTION;
    else if (choices.contains(SelectionChoice.ANNOTATIONS))
        defaultChoice = SelectionChoice.ANNOTATIONS;
    else
        defaultChoice = choices.get(0);
    var parentChoice = Dialogs.showChoiceDialog("Pixel classifier", "Choose parent objects", choices, defaultChoice);
    if (parentChoice == null)
        return false;
    var outputObjectTypes = Arrays.asList("Annotation", "Detection");
    // To avoid confusing the user unnecessarily, if we *only* have ignored classes then set default for includeIgnored to true
    var labels = classifier.getMetadata().getClassificationLabels();
    boolean allIgnored = !labels.isEmpty() && labels.values().stream().allMatch(p -> p == null || PathClassTools.isIgnoredClass(p));
    boolean includeIgnored = allIgnored;
    var cal = imageData.getServer().getPixelCalibration();
    var units = cal.unitsMatch2D() ? cal.getPixelWidthUnit() + "^2" : cal.getPixelWidthUnit() + "x" + cal.getPixelHeightUnit();
    ParameterList params;
    if (lastCreateObjectParams != null) {
        params = lastCreateObjectParams.duplicate();
        params.setHiddenParameters(false, params.getKeyValueParameters(true).keySet().toArray(String[]::new));
        ((BooleanParameter) params.getParameters().get("includeIgnored")).setValue(includeIgnored);
    } else {
        params = new ParameterList().addChoiceParameter("objectType", "New object type", "Annotation", outputObjectTypes, "Define the type of objects that will be created").addDoubleParameter("minSize", "Minimum object size", 0, units, "Minimum size of a region to keep (smaller regions will be dropped)").addDoubleParameter("minHoleSize", "Minimum hole size", 0, units, "Minimum size of a hole to keep (smaller holes will be filled)").addBooleanParameter("doSplit", "Split objects", false, "Split multi-part regions into separate objects").addBooleanParameter("clearExisting", "Delete existing objects", false, "Delete any existing objects within the selected object before adding new objects (or entire image if no object is selected)").addBooleanParameter("includeIgnored", "Create objects for ignored classes", includeIgnored, "Create objects for classifications that are usually ignored (e.g. \"Ignore*\", \"Region*\")").addBooleanParameter("selectNew", "Set new objects to selected", false, "Set the newly-created objects to be selected");
    }
    if (!Dialogs.showParameterDialog("Create objects", params))
        return false;
    boolean createDetections = params.getChoiceParameterValue("objectType").equals("Detection");
    boolean doSplit = params.getBooleanParameterValue("doSplit");
    includeIgnored = params.getBooleanParameterValue("includeIgnored");
    double minSize = params.getDoubleParameterValue("minSize");
    double minHoleSize = params.getDoubleParameterValue("minHoleSize");
    boolean clearExisting = params.getBooleanParameterValue("clearExisting");
    boolean selectNew = params.getBooleanParameterValue("selectNew");
    lastCreateObjectParams = params;
    parentChoice.handleSelection(imageData);
    List<CreateObjectOptions> options = new ArrayList<>();
    if (doSplit)
        options.add(CreateObjectOptions.SPLIT);
    if (clearExisting)
        options.add(CreateObjectOptions.DELETE_EXISTING);
    if (includeIgnored)
        options.add(CreateObjectOptions.INCLUDE_IGNORED);
    else if (allIgnored) {
        Dialogs.showErrorMessage(title, "Cannot create objects - all class names have an asterisk to show they should be 'ignored'!");
        return false;
    }
    if (selectNew)
        options.add(CreateObjectOptions.SELECT_NEW);
    var optionsArray = options.toArray(CreateObjectOptions[]::new);
    String optionsString = "";
    if (!options.isEmpty())
        optionsString = ", " + options.stream().map(o -> "\"" + o.name() + "\"").collect(Collectors.joining(", "));
    try {
        if (createDetections) {
            if (PixelClassifierTools.createDetectionsFromPixelClassifier(imageData, classifier, minSize, minHoleSize, optionsArray)) {
                if (classifierName != null) {
                    imageData.getHistoryWorkflow().addStep(new DefaultScriptableWorkflowStep("Pixel classifier create detections", String.format("createDetectionsFromPixelClassifier(\"%s\", %s, %s)", classifierName, minSize, minHoleSize + optionsString)));
                }
                return true;
            }
        } else {
            if (PixelClassifierTools.createAnnotationsFromPixelClassifier(imageData, classifier, minSize, minHoleSize, optionsArray)) {
                if (classifierName != null) {
                    imageData.getHistoryWorkflow().addStep(new DefaultScriptableWorkflowStep("Pixel classifier create annotations", String.format("createAnnotationsFromPixelClassifier(\"%s\", %s, %s)", classifierName, minSize, minHoleSize + optionsString)));
                }
                return true;
            }
        }
    } catch (IOException e) {
        Dialogs.showErrorMessage(title, e);
    }
    return false;
}
Also used : Button(javafx.scene.control.Button) CreateObjectOptions(qupath.opencv.ml.pixel.PixelClassifierTools.CreateObjectOptions) Arrays(java.util.Arrays) ImageServer(qupath.lib.images.servers.ImageServer) PathTileObject(qupath.lib.objects.PathTileObject) BooleanBinding(javafx.beans.binding.BooleanBinding) CheckMenuItem(javafx.scene.control.CheckMenuItem) LoggerFactory(org.slf4j.LoggerFactory) Side(javafx.geometry.Side) ImageWriter(qupath.lib.images.writers.ImageWriter) ParameterList(qupath.lib.plugins.parameters.ParameterList) ComboBox(javafx.scene.control.ComboBox) ContextMenu(javafx.scene.control.ContextMenu) Map(java.util.Map) PixelClassifierTools(qupath.opencv.ml.pixel.PixelClassifierTools) Pane(javafx.scene.layout.Pane) MenuItem(javafx.scene.control.MenuItem) BufferedImage(java.awt.image.BufferedImage) Collectors(java.util.stream.Collectors) PathAnnotationObject(qupath.lib.objects.PathAnnotationObject) PathDetectionObject(qupath.lib.objects.PathDetectionObject) PathObject(qupath.lib.objects.PathObject) Objects(java.util.Objects) List(java.util.List) BooleanProperty(javafx.beans.property.BooleanProperty) Project(qupath.lib.projects.Project) DefaultScriptableWorkflowStep(qupath.lib.plugins.workflow.DefaultScriptableWorkflowStep) GuiTools(qupath.lib.gui.tools.GuiTools) RegionFilter(qupath.lib.gui.viewer.RegionFilter) BorderPane(javafx.scene.layout.BorderPane) StringProperty(javafx.beans.property.StringProperty) PathCellObject(qupath.lib.objects.PathCellObject) SaveResourcePaneBuilder(qupath.process.gui.commands.ui.SaveResourcePaneBuilder) ObjectExpression(javafx.beans.binding.ObjectExpression) PathClassTools(qupath.lib.objects.classes.PathClassTools) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) Bindings(javafx.beans.binding.Bindings) ArrayList(java.util.ArrayList) StandardRegionFilters(qupath.lib.gui.viewer.RegionFilter.StandardRegionFilters) LinkedHashMap(java.util.LinkedHashMap) Dialogs(qupath.lib.gui.dialogs.Dialogs) Tooltip(javafx.scene.control.Tooltip) ImageData(qupath.lib.images.ImageData) Logger(org.slf4j.Logger) StringExpression(javafx.beans.binding.StringExpression) ImageWriterTools(qupath.lib.images.writers.ImageWriterTools) Commands(qupath.lib.gui.commands.Commands) IOException(java.io.IOException) TMACoreObject(qupath.lib.objects.TMACoreObject) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) SimpleBooleanProperty(javafx.beans.property.SimpleBooleanProperty) PixelClassifier(qupath.lib.classifiers.pixel.PixelClassifier) BooleanParameter(qupath.lib.plugins.parameters.BooleanParameter) Collections(java.util.Collections) PaneTools(qupath.lib.gui.tools.PaneTools) ArrayList(java.util.ArrayList) IOException(java.io.IOException) BooleanParameter(qupath.lib.plugins.parameters.BooleanParameter) DefaultScriptableWorkflowStep(qupath.lib.plugins.workflow.DefaultScriptableWorkflowStep) ParameterList(qupath.lib.plugins.parameters.ParameterList) CreateObjectOptions(qupath.opencv.ml.pixel.PixelClassifierTools.CreateObjectOptions)

Example 20 with ParameterList

use of qupath.lib.plugins.parameters.ParameterList in project qupath by qupath.

the class HaralickFeaturesPlugin method addRunnableTasks.

@Override
protected void addRunnableTasks(final ImageData<BufferedImage> imageData, final PathObject parentObject, List<Runnable> tasks) {
    final ParameterList params = getParameterList(imageData);
    final ImageServer<BufferedImage> server = imageData.getServer();
    tasks.add(new HaralickRunnable(server, parentObject, params, imageData.getColorDeconvolutionStains()));
}
Also used : ParameterList(qupath.lib.plugins.parameters.ParameterList) BufferedImage(java.awt.image.BufferedImage)

Aggregations

ParameterList (qupath.lib.plugins.parameters.ParameterList)53 BufferedImage (java.awt.image.BufferedImage)15 ArrayList (java.util.ArrayList)13 IOException (java.io.IOException)11 List (java.util.List)9 Collectors (java.util.stream.Collectors)8 BorderPane (javafx.scene.layout.BorderPane)8 PathObject (qupath.lib.objects.PathObject)8 Logger (org.slf4j.Logger)7 LoggerFactory (org.slf4j.LoggerFactory)7 GeneralTools (qupath.lib.common.GeneralTools)7 Dialogs (qupath.lib.gui.dialogs.Dialogs)7 ImageServer (qupath.lib.images.servers.ImageServer)7 Insets (javafx.geometry.Insets)6 Tooltip (javafx.scene.control.Tooltip)6 Collection (java.util.Collection)5 Map (java.util.Map)5 SimpleStringProperty (javafx.beans.property.SimpleStringProperty)5 ContextMenu (javafx.scene.control.ContextMenu)5 Label (javafx.scene.control.Label)5