Search in sources :

Example 21 with ImageData

use of qupath.lib.images.ImageData in project qupath by qupath.

the class PixelClassifierPane method doClassification.

private void doClassification() {
    // if (helper == null || helper.getFeatureServer() == null) {
    // //			updateFeatureCalculator();
    // //			updateClassifier();
    // if (helper == null) {
    // logger.error("No pixel classifier helper available!");
    // return;
    // }
    // }
    var imageData = qupath.getImageData();
    if (imageData == null) {
        if (!qupath.getViewers().stream().anyMatch(v -> v.getImageData() != null)) {
            logger.debug("doClassification() called, but no images are open");
            return;
        }
    }
    var model = selectedClassifier.get();
    if (model == null) {
        Dialogs.showErrorNotification("Pixel classifier", "No classifier selected!");
        return;
    }
    ClassifierTrainingData trainingData;
    try {
        var trainingImages = getTrainingImageData();
        if (trainingImages.size() > 1)
            logger.info("Creating training data from {} images", trainingImages.size());
        trainingData = helper.createTrainingData(trainingImages);
    } catch (Exception e) {
        logger.error("Error when updating training data", e);
        return;
    }
    if (trainingData == null) {
        resetPieChart();
        return;
    }
    // TODO: Optionally limit the number of training samples we use
    // var trainData = classifier.createTrainData(matFeatures, matTargets);
    // Ensure we seed the RNG for reproducibility
    opencv_core.setRNGSeed(rngSeed);
    // TODO: Prevent training K nearest neighbor with a huge number of samples (very slow!)
    var actualMaxSamples = this.maxSamples;
    var trainData = trainingData.getTrainData();
    if (actualMaxSamples > 0 && trainData.getNTrainSamples() > actualMaxSamples)
        trainData.setTrainTestSplit(actualMaxSamples, true);
    else
        trainData.shuffleTrainTest();
    // System.err.println("Train: " + trainData.getTrainResponses());
    // System.err.println("Test: " + trainData.getTestResponses());
    // Apply normalization, if we need to
    FeaturePreprocessor preprocessor = normalization.build(trainData.getTrainSamples(), false);
    if (preprocessor.doesSomething()) {
        preprocessingOp = ImageOps.ML.preprocessor(preprocessor);
    } else
        preprocessingOp = null;
    var labels = trainingData.getLabelMap();
    // Using getTrainNormCatResponses() causes confusion if classes are not represented
    // var targets = trainData.getTrainNormCatResponses();
    var targets = trainData.getTrainResponses();
    IntBuffer buffer = targets.createBuffer();
    int n = (int) targets.total();
    var rawCounts = new int[labels.size()];
    for (int i = 0; i < n; i++) {
        rawCounts[buffer.get(i)] += 1;
    }
    Map<PathClass, Integer> counts = new LinkedHashMap<>();
    for (var entry : labels.entrySet()) {
        counts.put(entry.getKey(), rawCounts[entry.getValue()]);
    }
    updatePieChart(counts);
    Mat weights = null;
    if (reweightSamples) {
        weights = new Mat(n, 1, opencv_core.CV_32FC1);
        FloatIndexer bufferWeights = weights.createIndexer();
        float[] weightArray = new float[rawCounts.length];
        for (int i = 0; i < weightArray.length; i++) {
            int c = rawCounts[i];
            // weightArray[i] = c == 0 ? 1 : (float)1.f/c;
            weightArray[i] = c == 0 ? 1 : (float) n / c;
        }
        for (int i = 0; i < n; i++) {
            int label = buffer.get(i);
            bufferWeights.put(i, weightArray[label]);
        }
        bufferWeights.release();
    }
    // Create TrainData in an appropriate format (e.g. labels or one-hot encoding)
    var trainSamples = trainData.getTrainSamples();
    var trainResponses = trainData.getTrainResponses();
    preprocessor.apply(trainSamples, false);
    trainData = model.createTrainData(trainSamples, trainResponses, weights, false);
    logger.info("Training data: {} x {}, Target data: {} x {}", trainSamples.rows(), trainSamples.cols(), trainResponses.rows(), trainResponses.cols());
    model.train(trainData);
    // Calculate accuracy using whatever we can, as a rough guide to progress
    var test = trainData.getTestSamples();
    String testSet = "HELD-OUT TRAINING SET";
    if (test.empty()) {
        test = trainSamples;
        testSet = "TRAINING SET";
    } else {
        preprocessor.apply(test, false);
        buffer = trainData.getTestNormCatResponses().createBuffer();
    }
    var testResults = new Mat();
    model.predict(test, testResults, null);
    IntBuffer bufferResults = testResults.createBuffer();
    int nTest = (int) testResults.rows();
    int nCorrect = 0;
    for (int i = 0; i < nTest; i++) {
        if (bufferResults.get(i) == buffer.get(i))
            nCorrect++;
    }
    logger.info("Current accuracy on the {}: {} %", testSet, GeneralTools.formatNumber(nCorrect * 100.0 / n, 1));
    if (model instanceof RTreesClassifier) {
        var trees = (RTreesClassifier) model;
        if (trees.hasFeatureImportance() && imageData != null)
            logVariableImportance(trees, helper.getFeatureOp().getChannels(imageData).stream().map(c -> c.getName()).collect(Collectors.toList()));
    }
    trainData.close();
    var featureCalculator = helper.getFeatureOp();
    if (preprocessingOp != null)
        featureCalculator = featureCalculator.appendOps(preprocessingOp);
    // TODO: CHECK IF INPUT SIZE SHOULD BE DEFINED
    int inputWidth = 512;
    int inputHeight = 512;
    // int inputWidth = featureCalculator.getInputSize().getWidth();
    // int inputHeight = featureCalculator.getInputSize().getHeight();
    var cal = helper.getResolution();
    var channelType = ImageServerMetadata.ChannelType.CLASSIFICATION;
    if (model.supportsProbabilities()) {
        channelType = selectedOutputType.get();
    }
    // Channels are needed for probability output (and work for classification as well)
    var labels2 = new TreeMap<Integer, PathClass>();
    for (var entry : labels.entrySet()) {
        var previous = labels2.put(entry.getValue(), entry.getKey());
        if (previous != null)
            logger.warn("Duplicate label found! {} matches with {} and {}, only the latter be used", entry.getValue(), previous, entry.getKey());
    }
    var channels = PathClassifierTools.classificationLabelsToChannels(labels2, true);
    PixelClassifierMetadata metadata = new PixelClassifierMetadata.Builder().inputResolution(cal).inputShape(inputWidth, inputHeight).setChannelType(channelType).outputChannels(channels).build();
    currentClassifier.set(PixelClassifiers.createClassifier(model, featureCalculator, metadata, true));
    var overlay = PixelClassificationOverlay.create(qupath.getOverlayOptions(), currentClassifier.get(), getLivePredictionThreads());
    replaceOverlay(overlay);
}
Also used : Arrays(java.util.Arrays) IJTools(qupath.imagej.tools.IJTools) ParameterList(qupath.lib.plugins.parameters.ParameterList) Map(java.util.Map) Point2D(javafx.geometry.Point2D) IJExtension(qupath.imagej.gui.IJExtension) Platform(javafx.application.Platform) PieChart(javafx.scene.chart.PieChart) BooleanProperty(javafx.beans.property.BooleanProperty) Region(javafx.scene.layout.Region) ObservableList(javafx.collections.ObservableList) BorderPane(javafx.scene.layout.BorderPane) StringProperty(javafx.beans.property.StringProperty) CompositeImage(ij.CompositeImage) RectangleROI(qupath.lib.roi.RectangleROI) org.bytedeco.opencv.global.opencv_core(org.bytedeco.opencv.global.opencv_core) FXCollections(javafx.collections.FXCollections) Bindings(javafx.beans.binding.Bindings) IntegerProperty(javafx.beans.property.IntegerProperty) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) KNearest(org.bytedeco.opencv.opencv_ml.KNearest) ImageOps(qupath.opencv.ops.ImageOps) Slider(javafx.scene.control.Slider) Mat(org.bytedeco.opencv.opencv_core.Mat) TextAlignment(javafx.scene.text.TextAlignment) GridPane(javafx.scene.layout.GridPane) PathClassifierTools(qupath.lib.classifiers.PathClassifierTools) GeneralTools(qupath.lib.common.GeneralTools) RegionRequest(qupath.lib.regions.RegionRequest) IOException(java.io.IOException) ChartTools(qupath.lib.gui.charts.ChartTools) PixelClassifier(qupath.lib.classifiers.pixel.PixelClassifier) TreeMap(java.util.TreeMap) FloatIndexer(org.bytedeco.javacpp.indexer.FloatIndexer) SimpleObjectProperty(javafx.beans.property.SimpleObjectProperty) PixelCalibration(qupath.lib.images.servers.PixelCalibration) ObservableValue(javafx.beans.value.ObservableValue) PathObjectHierarchyListener(qupath.lib.objects.hierarchy.events.PathObjectHierarchyListener) ANN_MLP(org.bytedeco.opencv.opencv_ml.ANN_MLP) PathPrefs(qupath.lib.gui.prefs.PathPrefs) ImageServerMetadata(qupath.lib.images.servers.ImageServerMetadata) PaneTools(qupath.lib.gui.tools.PaneTools) EventHandler(javafx.event.EventHandler) Button(javafx.scene.control.Button) Pos(javafx.geometry.Pos) ImageServer(qupath.lib.images.servers.ImageServer) ListCell(javafx.scene.control.ListCell) LoggerFactory(org.slf4j.LoggerFactory) OverrunStyle(javafx.scene.control.OverrunStyle) RTreesClassifier(qupath.opencv.ml.OpenCVClassifiers.RTreesClassifier) OpenCVStatModel(qupath.opencv.ml.OpenCVClassifiers.OpenCVStatModel) Side(javafx.geometry.Side) PixelClassificationOverlay(qupath.lib.gui.viewer.overlays.PixelClassificationOverlay) ChannelType(qupath.lib.images.servers.ImageServerMetadata.ChannelType) ComboBox(javafx.scene.control.ComboBox) IntBuffer(java.nio.IntBuffer) PathObjectHierarchyEvent(qupath.lib.objects.hierarchy.events.PathObjectHierarchyEvent) QuPathGUI(qupath.lib.gui.QuPathGUI) BufferedImage(java.awt.image.BufferedImage) PixelClassifiers(qupath.opencv.ml.pixel.PixelClassifiers) Collection(java.util.Collection) Spinner(javafx.scene.control.Spinner) Collectors(java.util.stream.Collectors) FeaturePreprocessor(qupath.opencv.ml.FeaturePreprocessor) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) Priority(javafx.scene.layout.Priority) List(java.util.List) ToggleButton(javafx.scene.control.ToggleButton) GuiTools(qupath.lib.gui.tools.GuiTools) ColorToolsFX(qupath.lib.gui.tools.ColorToolsFX) IntStream(java.util.stream.IntStream) Scene(javafx.scene.Scene) ListView(javafx.scene.control.ListView) ReadOnlyObjectProperty(javafx.beans.property.ReadOnlyObjectProperty) SimpleStringProperty(javafx.beans.property.SimpleStringProperty) ButtonType(javafx.scene.control.ButtonType) MouseEvent(javafx.scene.input.MouseEvent) ProjectDialogs(qupath.lib.gui.dialogs.ProjectDialogs) Dialogs(qupath.lib.gui.dialogs.Dialogs) Insets(javafx.geometry.Insets) Normalization(qupath.lib.classifiers.Normalization) Callback(javafx.util.Callback) Tooltip(javafx.scene.control.Tooltip) WeakHashMap(java.util.WeakHashMap) ImageData(qupath.lib.images.ImageData) ObjectProperty(javafx.beans.property.ObjectProperty) Logger(org.slf4j.Logger) Label(javafx.scene.control.Label) ProjectImageEntry(qupath.lib.projects.ProjectImageEntry) PathClass(qupath.lib.objects.classes.PathClass) ImageOp(qupath.opencv.ops.ImageOp) PixelClassifierMetadata(qupath.lib.classifiers.pixel.PixelClassifierMetadata) OpenCVClassifiers(qupath.opencv.ml.OpenCVClassifiers) RTrees(org.bytedeco.opencv.opencv_ml.RTrees) LogisticRegression(org.bytedeco.opencv.opencv_ml.LogisticRegression) SimpleBooleanProperty(javafx.beans.property.SimpleBooleanProperty) Stage(javafx.stage.Stage) ClassifierTrainingData(qupath.process.gui.commands.ml.PixelClassifierTraining.ClassifierTrainingData) MiniViewers(qupath.lib.gui.commands.MiniViewers) Comparator(java.util.Comparator) ChangeListener(javafx.beans.value.ChangeListener) Collections(java.util.Collections) ContentDisplay(javafx.scene.control.ContentDisplay) Mat(org.bytedeco.opencv.opencv_core.Mat) ClassifierTrainingData(qupath.process.gui.commands.ml.PixelClassifierTraining.ClassifierTrainingData) FloatIndexer(org.bytedeco.javacpp.indexer.FloatIndexer) TreeMap(java.util.TreeMap) IOException(java.io.IOException) FeaturePreprocessor(qupath.opencv.ml.FeaturePreprocessor) RTreesClassifier(qupath.opencv.ml.OpenCVClassifiers.RTreesClassifier) LinkedHashMap(java.util.LinkedHashMap) PathClass(qupath.lib.objects.classes.PathClass) PixelClassifierMetadata(qupath.lib.classifiers.pixel.PixelClassifierMetadata) IntBuffer(java.nio.IntBuffer)

Example 22 with ImageData

use of qupath.lib.images.ImageData in project qupath by qupath.

the class ObjectClassifierLoadCommand method run.

@Override
public void run() {
    project = qupath.getProject();
    var listClassifiers = new ListView<String>();
    externalObjectClassifiers = new HashMap<>();
    listClassifiers.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
    var labelPlaceholder = new Label("Object classifiers in the\n" + "current project will appear here");
    labelPlaceholder.setAlignment(Pos.CENTER);
    labelPlaceholder.setTextAlignment(TextAlignment.CENTER);
    listClassifiers.setPlaceholder(labelPlaceholder);
    refreshNames(listClassifiers.getItems());
    // Provide an option to remove a classifier
    var popup = new ContextMenu();
    var miAdd = new MenuItem("Add classifier");
    miAdd.setOnAction(e -> {
        List<File> files = Dialogs.promptForMultipleFiles(title, null, "QuPath classifier file", "json");
        if (files == null || files.isEmpty())
            return;
        try {
            addClassifierFiles(files);
            List<String> updatedNames = new ArrayList<>();
            updatedNames.addAll(project.getPixelClassifiers().getNames());
            updatedNames.addAll(externalObjectClassifiers.keySet());
        } catch (IOException ex) {
            Dialogs.showErrorMessage(title, ex);
        }
    });
    var miRemove = new MenuItem("Delete selected");
    popup.getItems().setAll(miAdd, miRemove);
    miRemove.disableProperty().bind(listClassifiers.getSelectionModel().selectedItemProperty().isNull());
    listClassifiers.setContextMenu(popup);
    miRemove.setOnAction(e -> {
        var selectedItems = new ArrayList<>(listClassifiers.getSelectionModel().getSelectedItems());
        if (selectedItems.isEmpty() || project == null)
            return;
        try {
            String message = selectedItems.size() == 1 ? "'" + selectedItems.get(0) + "'" : selectedItems.size() + " classifiers";
            if (!Dialogs.showConfirmDialog(title, "Are you sure you want to delete " + message + "?"))
                return;
            for (var selected : selectedItems) {
                if (!project.getObjectClassifiers().getNames().contains(selected)) {
                    Dialogs.showErrorMessage(title, "Unable to delete " + selected + " - not found in the current project");
                    return;
                }
                project.getObjectClassifiers().remove(selected);
                listClassifiers.getItems().remove(selected);
            }
        } catch (Exception ex) {
            Dialogs.showErrorMessage("Error deleting classifier", ex);
        }
    });
    // listClassifiers.setOnMouseClicked(e -> {
    // if (e.getClickCount() == 2) {
    // List<File> files = Dialogs.promptForMultipleFiles(title, null, "QuPath classifier file", "json");
    // if (files == null || files.isEmpty())
    // return;
    // 
    // try {
    // addClassifierFiles(files);
    // List<String> updatedNames = new ArrayList<>();
    // updatedNames.addAll(project.getPixelClassifiers().getNames());
    // updatedNames.addAll(externalObjectClassifiers.keySet());
    // } catch (IOException ex) {
    // Dialogs.showErrorMessage(title, ex);
    // }
    // }
    // });
    // Support drag & drop for classifiers
    listClassifiers.setOnDragOver(e -> {
        e.acceptTransferModes(TransferMode.COPY);
        e.consume();
    });
    listClassifiers.setOnDragDropped(e -> {
        Dragboard dragboard = e.getDragboard();
        if (dragboard.hasFiles()) {
            logger.trace("File(s) dragged onto classifier listView");
            try {
                var files = dragboard.getFiles().stream().filter(f -> f.isFile() && !f.isHidden()).collect(Collectors.toList());
                addClassifierFiles(files);
            } catch (Exception ex) {
                String plural = dragboard.getFiles().size() == 1 ? "" : "s";
                Dialogs.showErrorMessage("Error adding classifier" + plural, ex.getLocalizedMessage());
            }
        }
        refreshNames(listClassifiers.getItems());
        e.consume();
    });
    var label = new Label("Choose classifier");
    label.setLabelFor(listClassifiers);
    // var enableButtons = qupath.viewerProperty().isNotNull().and(selectedClassifier.isNotNull());
    var btnApplyClassifier = new Button("Apply classifier");
    btnApplyClassifier.textProperty().bind(Bindings.createStringBinding(() -> {
        if (listClassifiers.getSelectionModel().getSelectedItems().size() > 1)
            return "Apply classifiers sequentially";
        return "Apply classifier";
    }, listClassifiers.getSelectionModel().getSelectedItems()));
    btnApplyClassifier.disableProperty().bind(listClassifiers.getSelectionModel().selectedItemProperty().isNull());
    btnApplyClassifier.setOnAction(e -> {
        var imageData = qupath.getImageData();
        if (imageData == null) {
            Dialogs.showErrorMessage(title, "No image open!");
            return;
        }
        runClassifier(imageData, project, externalObjectClassifiers, listClassifiers.getSelectionModel().getSelectedItems(), true);
    });
    // var pane = new BorderPane();
    // pane.setPadding(new Insets(10.0));
    // pane.setTop(label);
    // pane.setCenter(comboClassifiers);
    // pane.setBottom(btnApplyClassifier);
    var pane = new GridPane();
    pane.setPadding(new Insets(10.0));
    pane.setHgap(5);
    pane.setVgap(10);
    int row = 0;
    PaneTools.setFillWidth(Boolean.TRUE, label, listClassifiers, btnApplyClassifier);
    PaneTools.setVGrowPriority(Priority.ALWAYS, listClassifiers);
    PaneTools.setHGrowPriority(Priority.ALWAYS, label, listClassifiers, btnApplyClassifier);
    PaneTools.setMaxWidth(Double.MAX_VALUE, label, listClassifiers, btnApplyClassifier);
    PaneTools.addGridRow(pane, row++, 0, "Choose object classification model to apply to the current image", label);
    PaneTools.addGridRow(pane, row++, 0, "Drag and drop a file here to add a new classifier", listClassifiers);
    PaneTools.addGridRow(pane, row++, 0, "Apply object classification to all open images", btnApplyClassifier);
    PaneTools.setMaxWidth(Double.MAX_VALUE, listClassifiers, btnApplyClassifier);
    var stage = new Stage();
    stage.setTitle(title);
    stage.setScene(new Scene(pane));
    stage.initOwner(qupath.getStage());
    // stage.sizeToScene();
    stage.setWidth(300);
    stage.setHeight(400);
    stage.focusedProperty().addListener((v, o, n) -> {
        if (n)
            refreshNames(listClassifiers.getItems());
    });
    // stage.setResizable(false);
    stage.show();
}
Also used : Button(javafx.scene.control.Button) Pos(javafx.geometry.Pos) Scene(javafx.scene.Scene) Arrays(java.util.Arrays) ListView(javafx.scene.control.ListView) GsonTools(qupath.lib.io.GsonTools) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) Bindings(javafx.beans.binding.Bindings) TransferMode(javafx.scene.input.TransferMode) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Dialogs(qupath.lib.gui.dialogs.Dialogs) Dragboard(javafx.scene.input.Dragboard) Insets(javafx.geometry.Insets) ObjectClassifiers(qupath.lib.classifiers.object.ObjectClassifiers) ContextMenu(javafx.scene.control.ContextMenu) Map(java.util.Map) TextAlignment(javafx.scene.text.TextAlignment) UpdateUrisCommand(qupath.lib.gui.commands.UpdateUrisCommand) GridPane(javafx.scene.layout.GridPane) QuPathGUI(qupath.lib.gui.QuPathGUI) ImageData(qupath.lib.images.ImageData) Logger(org.slf4j.Logger) Label(javafx.scene.control.Label) MenuItem(javafx.scene.control.MenuItem) BufferedImage(java.awt.image.BufferedImage) Files(java.nio.file.Files) GeneralTools(qupath.lib.common.GeneralTools) UriUpdater(qupath.lib.io.UriUpdater) IOException(java.io.IOException) WorkflowStep(qupath.lib.plugins.workflow.WorkflowStep) Collectors(java.util.stream.Collectors) File(java.io.File) Priority(javafx.scene.layout.Priority) List(java.util.List) Project(qupath.lib.projects.Project) SelectionMode(javafx.scene.control.SelectionMode) DefaultScriptableWorkflowStep(qupath.lib.plugins.workflow.DefaultScriptableWorkflowStep) Stage(javafx.stage.Stage) ObjectClassifier(qupath.lib.classifiers.object.ObjectClassifier) ObservableList(javafx.collections.ObservableList) UriResource(qupath.lib.io.UriResource) DialogButton(qupath.lib.gui.dialogs.Dialogs.DialogButton) PaneTools(qupath.lib.gui.tools.PaneTools) GridPane(javafx.scene.layout.GridPane) Insets(javafx.geometry.Insets) Label(javafx.scene.control.Label) ArrayList(java.util.ArrayList) ContextMenu(javafx.scene.control.ContextMenu) MenuItem(javafx.scene.control.MenuItem) IOException(java.io.IOException) Scene(javafx.scene.Scene) IOException(java.io.IOException) ListView(javafx.scene.control.ListView) Button(javafx.scene.control.Button) DialogButton(qupath.lib.gui.dialogs.Dialogs.DialogButton) Stage(javafx.stage.Stage) File(java.io.File) Dragboard(javafx.scene.input.Dragboard)

Example 23 with ImageData

use of qupath.lib.images.ImageData in project qupath by qupath.

the class DragDropImportListener method handleFileDropImpl.

private void handleFileDropImpl(final QuPathViewer viewer, final List<File> list) throws IOException {
    // Shouldn't occur... but keeps FindBugs happy to check
    if (list == null) {
        logger.warn("No files given!");
        return;
    }
    // Check if we have only jar files
    int nJars = 0;
    for (File file : list) {
        if (file.getName().toLowerCase().endsWith(".jar"))
            nJars++;
    }
    if (nJars == list.size()) {
        if (qupath.canInstallExtensions())
            qupath.installExtensions(list);
        else
            Dialogs.showErrorMessage("Install extensions", "Sorry, extensions can only be installed when QuPath is run as a standalone application.");
        return;
    }
    // Try to get a hierarchy for importing ROIs
    ImageData<BufferedImage> imageData = viewer == null ? null : viewer.getImageData();
    PathObjectHierarchy hierarchy = imageData == null ? null : imageData.getHierarchy();
    // Some consumers can only handle one file
    boolean singleFile = list.size() == 1;
    // Gather together the extensions - if this has length one, we know all the files have the same extension
    Set<String> allExtensions = list.stream().map(f -> GeneralTools.getExtension(f).orElse("")).collect(Collectors.toSet());
    // If we have a zipped file, create a set that includes the files within the zip image
    // This helps us determine whether or not a zip file contains an image or objects, for example
    Set<String> allUnzippedExtensions = allExtensions;
    if (allExtensions.contains(".zip")) {
        allUnzippedExtensions = list.stream().flatMap(f -> {
            try {
                return PathIO.unzippedExtensions(f.toPath()).stream();
            } catch (IOException e) {
                logger.debug(e.getLocalizedMessage(), e);
                return Arrays.stream(new String[0]);
            }
        }).collect(Collectors.toSet());
    }
    // Extract the first (and possibly only) file
    File file = list.get(0);
    String fileName = file.getName().toLowerCase();
    // Check if this is a hierarchy file
    if (singleFile && (fileName.endsWith(PathPrefs.getSerializationExtension()))) {
        // If we have a different path, open as a new image
        if (viewer == null) {
            Dialogs.showErrorMessage("Load data", "Please drag the file onto a specific viewer to open!");
            return;
        }
        try {
            // Check if we should be importing objects or opening the file
            if (imageData != null) {
                var dialog = new Dialog<ButtonType>();
                var btOpen = new ButtonType("Open image");
                var btImport = new ButtonType("Import objects");
                dialog.getDialogPane().getButtonTypes().setAll(btOpen, btImport, ButtonType.CANCEL);
                dialog.setTitle("Open data");
                dialog.setHeaderText("What do you want to do with the data file?");
                dialog.setContentText("You can\n" + " 1. Open the image in the current viewer\n" + " 2. Import objects and add them to the current image");
                // dialog.setHeaderText("What do you want to do?");
                var choice = dialog.showAndWait().orElse(ButtonType.CANCEL);
                if (choice == ButtonType.CANCEL)
                    return;
                if (choice == btImport) {
                    var pathObjects = PathIO.readObjects(file);
                    hierarchy.addPathObjects(pathObjects);
                    return;
                }
            }
            qupath.openSavedData(viewer, file, false, true);
        } catch (Exception e) {
            Dialogs.showErrorMessage("Load data", e);
        }
        return;
    }
    // Check if this is a directory - if so, look for a single project file
    if (singleFile && file.isDirectory()) {
        // Identify all files in the directory, and also all potential project files
        File[] filesInDirectory = file.listFiles(f -> !f.isHidden());
        List<File> projectFiles = Arrays.stream(filesInDirectory).filter(f -> f.isFile() && f.getAbsolutePath().toLowerCase().endsWith(ProjectIO.getProjectExtension())).collect(Collectors.toList());
        if (projectFiles.size() == 1) {
            file = projectFiles.get(0);
            logger.warn("Selecting project file {}", file);
        } else if (projectFiles.size() > 1) {
            // Prompt to select which project file to open
            logger.debug("Multiple project files found in directory {}", file);
            String[] fileNames = projectFiles.stream().map(f -> f.getName()).toArray(n -> new String[n]);
            String selectedName = Dialogs.showChoiceDialog("Select project", "Select project to open", fileNames, fileNames[0]);
            if (selectedName == null)
                return;
            file = new File(file, selectedName);
        } else if (filesInDirectory.length == 0) {
            // If we have an empty directory, offer to set it as a project
            if (Dialogs.showYesNoDialog("Create project", "Create project for empty directory?")) {
                Project<BufferedImage> project = Projects.createProject(file, BufferedImage.class);
                qupath.setProject(project);
                if (!project.isEmpty())
                    project.syncChanges();
                return;
            } else
                // Can't do anything else with an empty folder
                return;
        }
    }
    // Check if this is a project
    if (singleFile && (fileName.endsWith(ProjectIO.getProjectExtension()))) {
        try {
            Project<BufferedImage> project = ProjectIO.loadProject(file, BufferedImage.class);
            qupath.setProject(project);
        } catch (Exception e) {
            // Dialogs.showErrorMessage("Project error", e);
            logger.error("Could not open as project file: {}, opening in the Script Editor instead", e);
            qupath.getScriptEditor().showScript(file);
        }
        return;
    }
    // Check if it is an object file in GeoJSON format (.geojson)
    if (PathIO.getObjectFileExtensions(false).containsAll(allUnzippedExtensions)) {
        if (imageData == null || hierarchy == null) {
            qupath.getScriptEditor().showScript(file);
            logger.info("Opening the dragged file in the Script Editor as there is no currently opened image in the viewer");
            // Dialogs.showErrorMessage("Open object file", "Please open an image first to import objects!");
            return;
        }
        List<PathObject> pathObjects = new ArrayList<>();
        List<WorkflowStep> steps = new ArrayList<>();
        for (var tempFile : list) {
            try {
                var tempObjects = PathIO.readObjects(tempFile);
                if (tempObjects.isEmpty()) {
                    logger.warn("No objects found in {}, opening the dragged file in the Script Editor instead", tempFile.getAbsolutePath());
                    qupath.getScriptEditor().showScript(file);
                    return;
                }
                pathObjects.addAll(tempObjects);
                // Add step to workflow
                Map<String, String> map = new HashMap<>();
                map.put("path", file.getPath());
                String method = "Import objects";
                String methodString = String.format("%s(%s%s%s)", "importObjectsFromFile", "\"", GeneralTools.escapeFilePath(tempFile.getPath()), "\"");
                steps.add(new DefaultScriptableWorkflowStep(method, map, methodString));
            } catch (IOException | IllegalArgumentException e) {
                Dialogs.showErrorNotification("Object import", e.getLocalizedMessage());
                return;
            }
        }
        // Ask confirmation to user
        int nObjects = pathObjects.size();
        String message = nObjects == 1 ? "Add object to the hierarchy?" : String.format("Add %d objects to the hierarchy?", nObjects);
        var confirm = Dialogs.showConfirmDialog("Add to hierarchy", message);
        if (!confirm)
            return;
        // Add objects to hierarchy
        hierarchy.addPathObjects(pathObjects);
        imageData.getHistoryWorkflow().addSteps(steps);
        return;
    }
    // Check if this is TMA dearraying data file
    if (singleFile && (fileName.endsWith(TMADataIO.TMA_DEARRAYING_DATA_EXTENSION))) {
        if (hierarchy == null)
            Dialogs.showErrorMessage("TMA grid import", "Please open an image first before importing a dearrayed TMA grid!");
        else {
            TMAGrid tmaGrid = TMADataIO.importDearrayedTMAData(file);
            if (tmaGrid != null) {
                if (hierarchy.isEmpty() || Dialogs.showYesNoDialog("TMA grid import", "Set TMA grid for existing hierarchy?"))
                    hierarchy.setTMAGrid(tmaGrid);
            } else
                Dialogs.showErrorMessage("TMA grid import", "Could not parse TMA grid from " + file.getName());
        }
        return;
    }
    // Open file with an extension supported by the Script Editor
    ScriptEditor scriptEditor = qupath.getScriptEditor();
    if (scriptEditor instanceof DefaultScriptEditor && ((DefaultScriptEditor) scriptEditor).supportsFile(file)) {
        scriptEditor.showScript(file);
        return;
    }
    // Check handlers
    for (DropHandler<File> handler : dropHandlers) {
        if (handler.handleDrop(viewer, list))
            return;
    }
    // Assume we have images
    if (singleFile && file.isFile()) {
        // Try to open as an image, if the extension is known
        if (viewer == null) {
            Dialogs.showErrorMessage("Open image", "Please drag the file only a specific viewer to open!");
            return;
        }
        qupath.openImage(viewer, file.getAbsolutePath(), true, true);
        return;
    } else if (qupath.getProject() != null) {
        // Try importing multiple images to a project
        String[] potentialFiles = list.stream().filter(f -> f.isFile()).map(f -> f.getAbsolutePath()).toArray(String[]::new);
        if (potentialFiles.length > 0) {
            ProjectCommands.promptToImportImages(qupath, potentialFiles);
            return;
        }
    }
    if (qupath.getProject() == null) {
        if (list.size() > 1) {
            Dialogs.showErrorMessage("Drag & drop", "Could not handle multiple file drop - if you want to handle multiple images, you need to create a project first");
            return;
        }
    }
    if (list.size() > 1)
        Dialogs.showErrorMessage("Drag & drop", "Sorry, I couldn't figure out what to do with these files - try opening one at a time");
    else
        Dialogs.showErrorMessage("Drag & drop", "Sorry, I couldn't figure out what to do with " + list.get(0).getName());
}
Also used : EventHandler(javafx.event.EventHandler) Scene(javafx.scene.Scene) Arrays(java.util.Arrays) ButtonType(javafx.scene.control.ButtonType) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) ScriptEditor(qupath.lib.gui.scripting.ScriptEditor) DragEvent(javafx.scene.input.DragEvent) TransferMode(javafx.scene.input.TransferMode) Projects(qupath.lib.projects.Projects) ArrayList(java.util.ArrayList) Dialogs(qupath.lib.gui.dialogs.Dialogs) TMADataIO(qupath.lib.gui.tma.TMADataIO) Dragboard(javafx.scene.input.Dragboard) Map(java.util.Map) QuPathGUI(qupath.lib.gui.QuPathGUI) ImageData(qupath.lib.images.ImageData) Logger(org.slf4j.Logger) Dialog(javafx.scene.control.Dialog) BufferedImage(java.awt.image.BufferedImage) GeneralTools(qupath.lib.common.GeneralTools) Node(javafx.scene.Node) Set(java.util.Set) IOException(java.io.IOException) WorkflowStep(qupath.lib.plugins.workflow.WorkflowStep) ProjectCommands(qupath.lib.gui.commands.ProjectCommands) Collectors(java.util.stream.Collectors) File(java.io.File) PathObject(qupath.lib.objects.PathObject) List(java.util.List) Project(qupath.lib.projects.Project) DefaultScriptableWorkflowStep(qupath.lib.plugins.workflow.DefaultScriptableWorkflowStep) ProjectIO(qupath.lib.projects.ProjectIO) DefaultScriptEditor(qupath.lib.gui.scripting.DefaultScriptEditor) TMAGrid(qupath.lib.objects.hierarchy.TMAGrid) PathPrefs(qupath.lib.gui.prefs.PathPrefs) PathIO(qupath.lib.io.PathIO) PathObjectHierarchy(qupath.lib.objects.hierarchy.PathObjectHierarchy) WorkflowStep(qupath.lib.plugins.workflow.WorkflowStep) DefaultScriptableWorkflowStep(qupath.lib.plugins.workflow.DefaultScriptableWorkflowStep) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) DefaultScriptEditor(qupath.lib.gui.scripting.DefaultScriptEditor) BufferedImage(java.awt.image.BufferedImage) DefaultScriptableWorkflowStep(qupath.lib.plugins.workflow.DefaultScriptableWorkflowStep) Dialog(javafx.scene.control.Dialog) ButtonType(javafx.scene.control.ButtonType) TMAGrid(qupath.lib.objects.hierarchy.TMAGrid) IOException(java.io.IOException) IOException(java.io.IOException) ScriptEditor(qupath.lib.gui.scripting.ScriptEditor) DefaultScriptEditor(qupath.lib.gui.scripting.DefaultScriptEditor) PathObject(qupath.lib.objects.PathObject) File(java.io.File)

Example 24 with ImageData

use of qupath.lib.images.ImageData in project qupath by qupath.

the class BufferedImageOverlay method paintOverlay.

// /**
// * Add another region to the overlay.
// * @param region
// * @param img
// * @return any existing region with the same key
// */
// public BufferedImage put(ImageRegion region, BufferedImage img) {
// var previous = regions.put(region, img);
// if (viewer != null)
// viewer.repaint();
// return previous;
// }
@Override
public void paintOverlay(Graphics2D g2d, ImageRegion imageRegion, double downsampleFactor, ImageData<BufferedImage> imageData, boolean paintCompletely) {
    // Don't show if pixel classifications aren't being shown
    if (!isVisible() || !getOverlayOptions().getShowPixelClassification())
        return;
    super.paintOverlay(g2d, imageRegion, downsampleFactor, imageData, paintCompletely);
    // Paint the regions we have
    for (Map.Entry<ImageRegion, BufferedImage> entry : regions.entrySet()) {
        ImageRegion region = entry.getKey();
        // Check if the region intersects or not
        if (!imageRegion.intersects(region))
            continue;
        // Draw the region
        BufferedImage img = entry.getValue();
        if (colorModel != null && colorModel != img.getColorModel()) {
            // Apply the color model to get a version of the image we can draw quickly
            var imgRGB = cacheRGB.get(img);
            if (imgRGB == null) {
                var img2 = new BufferedImage(colorModel, img.getRaster(), img.getColorModel().isAlphaPremultiplied(), null);
                imgRGB = convertToDrawable(img2);
                cacheRGB.put(img, imgRGB);
            }
            img = imgRGB;
        } else {
            img = cacheRGB.computeIfAbsent(img, img2 -> convertToDrawable(img2));
        }
        g2d.drawImage(img, region.getX(), region.getY(), region.getWidth(), region.getHeight(), null);
    }
}
Also used : ImageData(qupath.lib.images.ImageData) ImageServer(qupath.lib.images.servers.ImageServer) Logger(org.slf4j.Logger) BufferedImage(java.awt.image.BufferedImage) LoggerFactory(org.slf4j.LoggerFactory) IOException(java.io.IOException) HashMap(java.util.HashMap) OverlayOptions(qupath.lib.gui.viewer.OverlayOptions) LinkedHashMap(java.util.LinkedHashMap) QuPathViewer(qupath.lib.gui.viewer.QuPathViewer) ColorModel(java.awt.image.ColorModel) ImageRegion(qupath.lib.regions.ImageRegion) Graphics2D(java.awt.Graphics2D) Map(java.util.Map) IndexColorModel(java.awt.image.IndexColorModel) ObservableValue(javafx.beans.value.ObservableValue) ChangeListener(javafx.beans.value.ChangeListener) Collections(java.util.Collections) ImageRegion(qupath.lib.regions.ImageRegion) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) BufferedImage(java.awt.image.BufferedImage)

Example 25 with ImageData

use of qupath.lib.images.ImageData in project qupath by qupath.

the class TileExporter method createRequests.

/**
 * Create region requests, along with information about whether we have a partial tile (which should not be resized/padded) or not.
 * @return
 */
private Collection<RegionRequestWrapper> createRequests() {
    List<RegionRequestWrapper> requests = new ArrayList<>();
    // Work out which RegionRequests to use
    // If the downsample hasn't been specified, use the level 0 resolution
    double downsample = this.downsample;
    if (downsample <= 0) {
        downsample = server.getDownsampleForResolution(0);
        if (this.downsample < 0)
            logger.warn("Invalid downsample {}, I will use the level 0 downsample {}", this.downsample, downsample);
        else
            logger.debug("Using level 0 downsample {}", downsample);
    }
    if (parentObjects == null)
        requests.addAll(getTiledRegionRequests(downsample));
    else {
        for (var parent : parentObjects) {
            int w = (int) Math.ceil(tileWidth * downsample);
            int h = (int) Math.ceil(tileHeight * downsample);
            if (parent.isRootObject()) {
                for (int t = 0; t < server.nTimepoints(); t++) {
                    for (int z = 0; z < server.nZSlices(); z++) {
                        RegionRequest newRequest;
                        if (useParentRoiBounds) {
                            newRequest = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), z, t);
                        } else {
                            int x = (int) Math.floor(server.getWidth() / 2.0 - w / 2.0);
                            int y = (int) Math.floor(server.getHeight() / 2.0 - h / 2.0);
                            newRequest = RegionRequest.createInstance(server.getPath(), downsample, x, y, w, h, z, t);
                        }
                        if (includePartialTiles || withinImage(newRequest, server))
                            requests.add(new RegionRequestWrapper(newRequest, false));
                    }
                }
            } else if (parent.hasROI()) {
                RegionRequest newRequest;
                var roi = PathObjectTools.getROI(parent, preferNucleus);
                if (useParentRoiBounds) {
                    newRequest = RegionRequest.createInstance(server.getPath(), downsample, roi);
                } else {
                    int x = (int) Math.floor(roi.getCentroidX() - w / 2.0);
                    int y = (int) Math.floor(roi.getCentroidY() - h / 2.0);
                    newRequest = RegionRequest.createInstance(server.getPath(), downsample, x, y, w, h, roi.getImagePlane());
                }
                if (includePartialTiles || withinImage(newRequest, server))
                    requests.add(new RegionRequestWrapper(newRequest, false));
            }
        }
    }
    // If we want only annotated tiles, skip regions that lack annotations
    var iterator = requests.iterator();
    while (iterator.hasNext()) {
        var r = iterator.next().request;
        if (annotatedCentroidTilesOnly) {
            double cx = (r.getMinX() + r.getMaxX()) / 2.0;
            double cy = (r.getMinY() + r.getMaxY()) / 2.0;
            if (serverLabeled != null && (serverLabeled instanceof LabeledImageServer)) {
                if (!((LabeledImageServer) serverLabeled).getObjectsForRegion(r).stream().anyMatch(p -> p.getROI().contains(cx, cy))) {
                    logger.trace("Skipping empty labelled region based on centroid test {}", r);
                    iterator.remove();
                    continue;
                }
            } else if (imageData != null) {
                if (PathObjectTools.getObjectsForLocation(imageData.getHierarchy(), cx, cy, r.getZ(), r.getT(), 0).isEmpty()) {
                    iterator.remove();
                    continue;
                }
            }
        } else if (annotatedTilesOnly) {
            if (serverLabeled != null) {
                if (serverLabeled.isEmptyRegion(r)) {
                    logger.trace("Skipping empty labelled region {}", r);
                    iterator.remove();
                    continue;
                }
            } else if (imageData != null) {
                if (!imageData.getHierarchy().getObjectsForRegion(PathAnnotationObject.class, r, null).stream().anyMatch(p -> RoiTools.intersectsRegion(p.getROI(), r))) {
                    iterator.remove();
                    continue;
                }
            }
        }
    }
    return requests;
}
Also used : LabeledImageServer(qupath.lib.images.servers.LabeledImageServer) ImageServer(qupath.lib.images.servers.ImageServer) GsonTools(qupath.lib.io.GsonTools) LoggerFactory(org.slf4j.LoggerFactory) NumberFormat(java.text.NumberFormat) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ChannelType(qupath.lib.images.servers.ImageServerMetadata.ChannelType) ImageRegion(qupath.lib.regions.ImageRegion) Locale(java.util.Locale) BufferedImageTools(qupath.lib.awt.common.BufferedImageTools) LabeledImageServer(qupath.lib.images.servers.LabeledImageServer) LinkedHashSet(java.util.LinkedHashSet) ImageData(qupath.lib.images.ImageData) RoiTools(qupath.lib.roi.RoiTools) Logger(org.slf4j.Logger) BufferedImage(java.awt.image.BufferedImage) Files(java.nio.file.Files) GeneralTools(qupath.lib.common.GeneralTools) RegionRequest(qupath.lib.regions.RegionRequest) Predicate(java.util.function.Predicate) Collection(java.util.Collection) PathClass(qupath.lib.objects.classes.PathClass) Set(java.util.Set) IOException(java.io.IOException) Padding(qupath.lib.regions.Padding) Collectors(java.util.stream.Collectors) File(java.io.File) StandardCharsets(java.nio.charset.StandardCharsets) Executors(java.util.concurrent.Executors) PathObjectTools(qupath.lib.objects.PathObjectTools) PathAnnotationObject(qupath.lib.objects.PathAnnotationObject) PathObject(qupath.lib.objects.PathObject) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Paths(java.nio.file.Paths) ThreadTools(qupath.lib.common.ThreadTools) TransformedServerBuilder(qupath.lib.images.servers.TransformedServerBuilder) Collections(java.util.Collections) PathAnnotationObject(qupath.lib.objects.PathAnnotationObject) ArrayList(java.util.ArrayList) RegionRequest(qupath.lib.regions.RegionRequest)

Aggregations

ImageData (qupath.lib.images.ImageData)32 BufferedImage (java.awt.image.BufferedImage)27 Collectors (java.util.stream.Collectors)26 Logger (org.slf4j.Logger)26 LoggerFactory (org.slf4j.LoggerFactory)26 List (java.util.List)25 ArrayList (java.util.ArrayList)23 IOException (java.io.IOException)21 PathObject (qupath.lib.objects.PathObject)21 File (java.io.File)19 Collection (java.util.Collection)19 Dialogs (qupath.lib.gui.dialogs.Dialogs)19 ImageServer (qupath.lib.images.servers.ImageServer)19 Collections (java.util.Collections)17 Map (java.util.Map)17 GeneralTools (qupath.lib.common.GeneralTools)17 QuPathGUI (qupath.lib.gui.QuPathGUI)17 Arrays (java.util.Arrays)16 PathPrefs (qupath.lib.gui.prefs.PathPrefs)15 Bindings (javafx.beans.binding.Bindings)14