use of qupath.lib.images.ImageData in project qupath by qupath.
the class PixelClassifierPane method doClassification.
private void doClassification() {
// if (helper == null || helper.getFeatureServer() == null) {
// // updateFeatureCalculator();
// // updateClassifier();
// if (helper == null) {
// logger.error("No pixel classifier helper available!");
// return;
// }
// }
var imageData = qupath.getImageData();
if (imageData == null) {
if (!qupath.getViewers().stream().anyMatch(v -> v.getImageData() != null)) {
logger.debug("doClassification() called, but no images are open");
return;
}
}
var model = selectedClassifier.get();
if (model == null) {
Dialogs.showErrorNotification("Pixel classifier", "No classifier selected!");
return;
}
ClassifierTrainingData trainingData;
try {
var trainingImages = getTrainingImageData();
if (trainingImages.size() > 1)
logger.info("Creating training data from {} images", trainingImages.size());
trainingData = helper.createTrainingData(trainingImages);
} catch (Exception e) {
logger.error("Error when updating training data", e);
return;
}
if (trainingData == null) {
resetPieChart();
return;
}
// TODO: Optionally limit the number of training samples we use
// var trainData = classifier.createTrainData(matFeatures, matTargets);
// Ensure we seed the RNG for reproducibility
opencv_core.setRNGSeed(rngSeed);
// TODO: Prevent training K nearest neighbor with a huge number of samples (very slow!)
var actualMaxSamples = this.maxSamples;
var trainData = trainingData.getTrainData();
if (actualMaxSamples > 0 && trainData.getNTrainSamples() > actualMaxSamples)
trainData.setTrainTestSplit(actualMaxSamples, true);
else
trainData.shuffleTrainTest();
// System.err.println("Train: " + trainData.getTrainResponses());
// System.err.println("Test: " + trainData.getTestResponses());
// Apply normalization, if we need to
FeaturePreprocessor preprocessor = normalization.build(trainData.getTrainSamples(), false);
if (preprocessor.doesSomething()) {
preprocessingOp = ImageOps.ML.preprocessor(preprocessor);
} else
preprocessingOp = null;
var labels = trainingData.getLabelMap();
// Using getTrainNormCatResponses() causes confusion if classes are not represented
// var targets = trainData.getTrainNormCatResponses();
var targets = trainData.getTrainResponses();
IntBuffer buffer = targets.createBuffer();
int n = (int) targets.total();
var rawCounts = new int[labels.size()];
for (int i = 0; i < n; i++) {
rawCounts[buffer.get(i)] += 1;
}
Map<PathClass, Integer> counts = new LinkedHashMap<>();
for (var entry : labels.entrySet()) {
counts.put(entry.getKey(), rawCounts[entry.getValue()]);
}
updatePieChart(counts);
Mat weights = null;
if (reweightSamples) {
weights = new Mat(n, 1, opencv_core.CV_32FC1);
FloatIndexer bufferWeights = weights.createIndexer();
float[] weightArray = new float[rawCounts.length];
for (int i = 0; i < weightArray.length; i++) {
int c = rawCounts[i];
// weightArray[i] = c == 0 ? 1 : (float)1.f/c;
weightArray[i] = c == 0 ? 1 : (float) n / c;
}
for (int i = 0; i < n; i++) {
int label = buffer.get(i);
bufferWeights.put(i, weightArray[label]);
}
bufferWeights.release();
}
// Create TrainData in an appropriate format (e.g. labels or one-hot encoding)
var trainSamples = trainData.getTrainSamples();
var trainResponses = trainData.getTrainResponses();
preprocessor.apply(trainSamples, false);
trainData = model.createTrainData(trainSamples, trainResponses, weights, false);
logger.info("Training data: {} x {}, Target data: {} x {}", trainSamples.rows(), trainSamples.cols(), trainResponses.rows(), trainResponses.cols());
model.train(trainData);
// Calculate accuracy using whatever we can, as a rough guide to progress
var test = trainData.getTestSamples();
String testSet = "HELD-OUT TRAINING SET";
if (test.empty()) {
test = trainSamples;
testSet = "TRAINING SET";
} else {
preprocessor.apply(test, false);
buffer = trainData.getTestNormCatResponses().createBuffer();
}
var testResults = new Mat();
model.predict(test, testResults, null);
IntBuffer bufferResults = testResults.createBuffer();
int nTest = (int) testResults.rows();
int nCorrect = 0;
for (int i = 0; i < nTest; i++) {
if (bufferResults.get(i) == buffer.get(i))
nCorrect++;
}
logger.info("Current accuracy on the {}: {} %", testSet, GeneralTools.formatNumber(nCorrect * 100.0 / n, 1));
if (model instanceof RTreesClassifier) {
var trees = (RTreesClassifier) model;
if (trees.hasFeatureImportance() && imageData != null)
logVariableImportance(trees, helper.getFeatureOp().getChannels(imageData).stream().map(c -> c.getName()).collect(Collectors.toList()));
}
trainData.close();
var featureCalculator = helper.getFeatureOp();
if (preprocessingOp != null)
featureCalculator = featureCalculator.appendOps(preprocessingOp);
// TODO: CHECK IF INPUT SIZE SHOULD BE DEFINED
int inputWidth = 512;
int inputHeight = 512;
// int inputWidth = featureCalculator.getInputSize().getWidth();
// int inputHeight = featureCalculator.getInputSize().getHeight();
var cal = helper.getResolution();
var channelType = ImageServerMetadata.ChannelType.CLASSIFICATION;
if (model.supportsProbabilities()) {
channelType = selectedOutputType.get();
}
// Channels are needed for probability output (and work for classification as well)
var labels2 = new TreeMap<Integer, PathClass>();
for (var entry : labels.entrySet()) {
var previous = labels2.put(entry.getValue(), entry.getKey());
if (previous != null)
logger.warn("Duplicate label found! {} matches with {} and {}, only the latter be used", entry.getValue(), previous, entry.getKey());
}
var channels = PathClassifierTools.classificationLabelsToChannels(labels2, true);
PixelClassifierMetadata metadata = new PixelClassifierMetadata.Builder().inputResolution(cal).inputShape(inputWidth, inputHeight).setChannelType(channelType).outputChannels(channels).build();
currentClassifier.set(PixelClassifiers.createClassifier(model, featureCalculator, metadata, true));
var overlay = PixelClassificationOverlay.create(qupath.getOverlayOptions(), currentClassifier.get(), getLivePredictionThreads());
replaceOverlay(overlay);
}
use of qupath.lib.images.ImageData in project qupath by qupath.
the class ObjectClassifierLoadCommand method run.
@Override
public void run() {
project = qupath.getProject();
var listClassifiers = new ListView<String>();
externalObjectClassifiers = new HashMap<>();
listClassifiers.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
var labelPlaceholder = new Label("Object classifiers in the\n" + "current project will appear here");
labelPlaceholder.setAlignment(Pos.CENTER);
labelPlaceholder.setTextAlignment(TextAlignment.CENTER);
listClassifiers.setPlaceholder(labelPlaceholder);
refreshNames(listClassifiers.getItems());
// Provide an option to remove a classifier
var popup = new ContextMenu();
var miAdd = new MenuItem("Add classifier");
miAdd.setOnAction(e -> {
List<File> files = Dialogs.promptForMultipleFiles(title, null, "QuPath classifier file", "json");
if (files == null || files.isEmpty())
return;
try {
addClassifierFiles(files);
List<String> updatedNames = new ArrayList<>();
updatedNames.addAll(project.getPixelClassifiers().getNames());
updatedNames.addAll(externalObjectClassifiers.keySet());
} catch (IOException ex) {
Dialogs.showErrorMessage(title, ex);
}
});
var miRemove = new MenuItem("Delete selected");
popup.getItems().setAll(miAdd, miRemove);
miRemove.disableProperty().bind(listClassifiers.getSelectionModel().selectedItemProperty().isNull());
listClassifiers.setContextMenu(popup);
miRemove.setOnAction(e -> {
var selectedItems = new ArrayList<>(listClassifiers.getSelectionModel().getSelectedItems());
if (selectedItems.isEmpty() || project == null)
return;
try {
String message = selectedItems.size() == 1 ? "'" + selectedItems.get(0) + "'" : selectedItems.size() + " classifiers";
if (!Dialogs.showConfirmDialog(title, "Are you sure you want to delete " + message + "?"))
return;
for (var selected : selectedItems) {
if (!project.getObjectClassifiers().getNames().contains(selected)) {
Dialogs.showErrorMessage(title, "Unable to delete " + selected + " - not found in the current project");
return;
}
project.getObjectClassifiers().remove(selected);
listClassifiers.getItems().remove(selected);
}
} catch (Exception ex) {
Dialogs.showErrorMessage("Error deleting classifier", ex);
}
});
// listClassifiers.setOnMouseClicked(e -> {
// if (e.getClickCount() == 2) {
// List<File> files = Dialogs.promptForMultipleFiles(title, null, "QuPath classifier file", "json");
// if (files == null || files.isEmpty())
// return;
//
// try {
// addClassifierFiles(files);
// List<String> updatedNames = new ArrayList<>();
// updatedNames.addAll(project.getPixelClassifiers().getNames());
// updatedNames.addAll(externalObjectClassifiers.keySet());
// } catch (IOException ex) {
// Dialogs.showErrorMessage(title, ex);
// }
// }
// });
// Support drag & drop for classifiers
listClassifiers.setOnDragOver(e -> {
e.acceptTransferModes(TransferMode.COPY);
e.consume();
});
listClassifiers.setOnDragDropped(e -> {
Dragboard dragboard = e.getDragboard();
if (dragboard.hasFiles()) {
logger.trace("File(s) dragged onto classifier listView");
try {
var files = dragboard.getFiles().stream().filter(f -> f.isFile() && !f.isHidden()).collect(Collectors.toList());
addClassifierFiles(files);
} catch (Exception ex) {
String plural = dragboard.getFiles().size() == 1 ? "" : "s";
Dialogs.showErrorMessage("Error adding classifier" + plural, ex.getLocalizedMessage());
}
}
refreshNames(listClassifiers.getItems());
e.consume();
});
var label = new Label("Choose classifier");
label.setLabelFor(listClassifiers);
// var enableButtons = qupath.viewerProperty().isNotNull().and(selectedClassifier.isNotNull());
var btnApplyClassifier = new Button("Apply classifier");
btnApplyClassifier.textProperty().bind(Bindings.createStringBinding(() -> {
if (listClassifiers.getSelectionModel().getSelectedItems().size() > 1)
return "Apply classifiers sequentially";
return "Apply classifier";
}, listClassifiers.getSelectionModel().getSelectedItems()));
btnApplyClassifier.disableProperty().bind(listClassifiers.getSelectionModel().selectedItemProperty().isNull());
btnApplyClassifier.setOnAction(e -> {
var imageData = qupath.getImageData();
if (imageData == null) {
Dialogs.showErrorMessage(title, "No image open!");
return;
}
runClassifier(imageData, project, externalObjectClassifiers, listClassifiers.getSelectionModel().getSelectedItems(), true);
});
// var pane = new BorderPane();
// pane.setPadding(new Insets(10.0));
// pane.setTop(label);
// pane.setCenter(comboClassifiers);
// pane.setBottom(btnApplyClassifier);
var pane = new GridPane();
pane.setPadding(new Insets(10.0));
pane.setHgap(5);
pane.setVgap(10);
int row = 0;
PaneTools.setFillWidth(Boolean.TRUE, label, listClassifiers, btnApplyClassifier);
PaneTools.setVGrowPriority(Priority.ALWAYS, listClassifiers);
PaneTools.setHGrowPriority(Priority.ALWAYS, label, listClassifiers, btnApplyClassifier);
PaneTools.setMaxWidth(Double.MAX_VALUE, label, listClassifiers, btnApplyClassifier);
PaneTools.addGridRow(pane, row++, 0, "Choose object classification model to apply to the current image", label);
PaneTools.addGridRow(pane, row++, 0, "Drag and drop a file here to add a new classifier", listClassifiers);
PaneTools.addGridRow(pane, row++, 0, "Apply object classification to all open images", btnApplyClassifier);
PaneTools.setMaxWidth(Double.MAX_VALUE, listClassifiers, btnApplyClassifier);
var stage = new Stage();
stage.setTitle(title);
stage.setScene(new Scene(pane));
stage.initOwner(qupath.getStage());
// stage.sizeToScene();
stage.setWidth(300);
stage.setHeight(400);
stage.focusedProperty().addListener((v, o, n) -> {
if (n)
refreshNames(listClassifiers.getItems());
});
// stage.setResizable(false);
stage.show();
}
use of qupath.lib.images.ImageData in project qupath by qupath.
the class DragDropImportListener method handleFileDropImpl.
private void handleFileDropImpl(final QuPathViewer viewer, final List<File> list) throws IOException {
// Shouldn't occur... but keeps FindBugs happy to check
if (list == null) {
logger.warn("No files given!");
return;
}
// Check if we have only jar files
int nJars = 0;
for (File file : list) {
if (file.getName().toLowerCase().endsWith(".jar"))
nJars++;
}
if (nJars == list.size()) {
if (qupath.canInstallExtensions())
qupath.installExtensions(list);
else
Dialogs.showErrorMessage("Install extensions", "Sorry, extensions can only be installed when QuPath is run as a standalone application.");
return;
}
// Try to get a hierarchy for importing ROIs
ImageData<BufferedImage> imageData = viewer == null ? null : viewer.getImageData();
PathObjectHierarchy hierarchy = imageData == null ? null : imageData.getHierarchy();
// Some consumers can only handle one file
boolean singleFile = list.size() == 1;
// Gather together the extensions - if this has length one, we know all the files have the same extension
Set<String> allExtensions = list.stream().map(f -> GeneralTools.getExtension(f).orElse("")).collect(Collectors.toSet());
// If we have a zipped file, create a set that includes the files within the zip image
// This helps us determine whether or not a zip file contains an image or objects, for example
Set<String> allUnzippedExtensions = allExtensions;
if (allExtensions.contains(".zip")) {
allUnzippedExtensions = list.stream().flatMap(f -> {
try {
return PathIO.unzippedExtensions(f.toPath()).stream();
} catch (IOException e) {
logger.debug(e.getLocalizedMessage(), e);
return Arrays.stream(new String[0]);
}
}).collect(Collectors.toSet());
}
// Extract the first (and possibly only) file
File file = list.get(0);
String fileName = file.getName().toLowerCase();
// Check if this is a hierarchy file
if (singleFile && (fileName.endsWith(PathPrefs.getSerializationExtension()))) {
// If we have a different path, open as a new image
if (viewer == null) {
Dialogs.showErrorMessage("Load data", "Please drag the file onto a specific viewer to open!");
return;
}
try {
// Check if we should be importing objects or opening the file
if (imageData != null) {
var dialog = new Dialog<ButtonType>();
var btOpen = new ButtonType("Open image");
var btImport = new ButtonType("Import objects");
dialog.getDialogPane().getButtonTypes().setAll(btOpen, btImport, ButtonType.CANCEL);
dialog.setTitle("Open data");
dialog.setHeaderText("What do you want to do with the data file?");
dialog.setContentText("You can\n" + " 1. Open the image in the current viewer\n" + " 2. Import objects and add them to the current image");
// dialog.setHeaderText("What do you want to do?");
var choice = dialog.showAndWait().orElse(ButtonType.CANCEL);
if (choice == ButtonType.CANCEL)
return;
if (choice == btImport) {
var pathObjects = PathIO.readObjects(file);
hierarchy.addPathObjects(pathObjects);
return;
}
}
qupath.openSavedData(viewer, file, false, true);
} catch (Exception e) {
Dialogs.showErrorMessage("Load data", e);
}
return;
}
// Check if this is a directory - if so, look for a single project file
if (singleFile && file.isDirectory()) {
// Identify all files in the directory, and also all potential project files
File[] filesInDirectory = file.listFiles(f -> !f.isHidden());
List<File> projectFiles = Arrays.stream(filesInDirectory).filter(f -> f.isFile() && f.getAbsolutePath().toLowerCase().endsWith(ProjectIO.getProjectExtension())).collect(Collectors.toList());
if (projectFiles.size() == 1) {
file = projectFiles.get(0);
logger.warn("Selecting project file {}", file);
} else if (projectFiles.size() > 1) {
// Prompt to select which project file to open
logger.debug("Multiple project files found in directory {}", file);
String[] fileNames = projectFiles.stream().map(f -> f.getName()).toArray(n -> new String[n]);
String selectedName = Dialogs.showChoiceDialog("Select project", "Select project to open", fileNames, fileNames[0]);
if (selectedName == null)
return;
file = new File(file, selectedName);
} else if (filesInDirectory.length == 0) {
// If we have an empty directory, offer to set it as a project
if (Dialogs.showYesNoDialog("Create project", "Create project for empty directory?")) {
Project<BufferedImage> project = Projects.createProject(file, BufferedImage.class);
qupath.setProject(project);
if (!project.isEmpty())
project.syncChanges();
return;
} else
// Can't do anything else with an empty folder
return;
}
}
// Check if this is a project
if (singleFile && (fileName.endsWith(ProjectIO.getProjectExtension()))) {
try {
Project<BufferedImage> project = ProjectIO.loadProject(file, BufferedImage.class);
qupath.setProject(project);
} catch (Exception e) {
// Dialogs.showErrorMessage("Project error", e);
logger.error("Could not open as project file: {}, opening in the Script Editor instead", e);
qupath.getScriptEditor().showScript(file);
}
return;
}
// Check if it is an object file in GeoJSON format (.geojson)
if (PathIO.getObjectFileExtensions(false).containsAll(allUnzippedExtensions)) {
if (imageData == null || hierarchy == null) {
qupath.getScriptEditor().showScript(file);
logger.info("Opening the dragged file in the Script Editor as there is no currently opened image in the viewer");
// Dialogs.showErrorMessage("Open object file", "Please open an image first to import objects!");
return;
}
List<PathObject> pathObjects = new ArrayList<>();
List<WorkflowStep> steps = new ArrayList<>();
for (var tempFile : list) {
try {
var tempObjects = PathIO.readObjects(tempFile);
if (tempObjects.isEmpty()) {
logger.warn("No objects found in {}, opening the dragged file in the Script Editor instead", tempFile.getAbsolutePath());
qupath.getScriptEditor().showScript(file);
return;
}
pathObjects.addAll(tempObjects);
// Add step to workflow
Map<String, String> map = new HashMap<>();
map.put("path", file.getPath());
String method = "Import objects";
String methodString = String.format("%s(%s%s%s)", "importObjectsFromFile", "\"", GeneralTools.escapeFilePath(tempFile.getPath()), "\"");
steps.add(new DefaultScriptableWorkflowStep(method, map, methodString));
} catch (IOException | IllegalArgumentException e) {
Dialogs.showErrorNotification("Object import", e.getLocalizedMessage());
return;
}
}
// Ask confirmation to user
int nObjects = pathObjects.size();
String message = nObjects == 1 ? "Add object to the hierarchy?" : String.format("Add %d objects to the hierarchy?", nObjects);
var confirm = Dialogs.showConfirmDialog("Add to hierarchy", message);
if (!confirm)
return;
// Add objects to hierarchy
hierarchy.addPathObjects(pathObjects);
imageData.getHistoryWorkflow().addSteps(steps);
return;
}
// Check if this is TMA dearraying data file
if (singleFile && (fileName.endsWith(TMADataIO.TMA_DEARRAYING_DATA_EXTENSION))) {
if (hierarchy == null)
Dialogs.showErrorMessage("TMA grid import", "Please open an image first before importing a dearrayed TMA grid!");
else {
TMAGrid tmaGrid = TMADataIO.importDearrayedTMAData(file);
if (tmaGrid != null) {
if (hierarchy.isEmpty() || Dialogs.showYesNoDialog("TMA grid import", "Set TMA grid for existing hierarchy?"))
hierarchy.setTMAGrid(tmaGrid);
} else
Dialogs.showErrorMessage("TMA grid import", "Could not parse TMA grid from " + file.getName());
}
return;
}
// Open file with an extension supported by the Script Editor
ScriptEditor scriptEditor = qupath.getScriptEditor();
if (scriptEditor instanceof DefaultScriptEditor && ((DefaultScriptEditor) scriptEditor).supportsFile(file)) {
scriptEditor.showScript(file);
return;
}
// Check handlers
for (DropHandler<File> handler : dropHandlers) {
if (handler.handleDrop(viewer, list))
return;
}
// Assume we have images
if (singleFile && file.isFile()) {
// Try to open as an image, if the extension is known
if (viewer == null) {
Dialogs.showErrorMessage("Open image", "Please drag the file only a specific viewer to open!");
return;
}
qupath.openImage(viewer, file.getAbsolutePath(), true, true);
return;
} else if (qupath.getProject() != null) {
// Try importing multiple images to a project
String[] potentialFiles = list.stream().filter(f -> f.isFile()).map(f -> f.getAbsolutePath()).toArray(String[]::new);
if (potentialFiles.length > 0) {
ProjectCommands.promptToImportImages(qupath, potentialFiles);
return;
}
}
if (qupath.getProject() == null) {
if (list.size() > 1) {
Dialogs.showErrorMessage("Drag & drop", "Could not handle multiple file drop - if you want to handle multiple images, you need to create a project first");
return;
}
}
if (list.size() > 1)
Dialogs.showErrorMessage("Drag & drop", "Sorry, I couldn't figure out what to do with these files - try opening one at a time");
else
Dialogs.showErrorMessage("Drag & drop", "Sorry, I couldn't figure out what to do with " + list.get(0).getName());
}
use of qupath.lib.images.ImageData in project qupath by qupath.
the class BufferedImageOverlay method paintOverlay.
// /**
// * Add another region to the overlay.
// * @param region
// * @param img
// * @return any existing region with the same key
// */
// public BufferedImage put(ImageRegion region, BufferedImage img) {
// var previous = regions.put(region, img);
// if (viewer != null)
// viewer.repaint();
// return previous;
// }
@Override
public void paintOverlay(Graphics2D g2d, ImageRegion imageRegion, double downsampleFactor, ImageData<BufferedImage> imageData, boolean paintCompletely) {
// Don't show if pixel classifications aren't being shown
if (!isVisible() || !getOverlayOptions().getShowPixelClassification())
return;
super.paintOverlay(g2d, imageRegion, downsampleFactor, imageData, paintCompletely);
// Paint the regions we have
for (Map.Entry<ImageRegion, BufferedImage> entry : regions.entrySet()) {
ImageRegion region = entry.getKey();
// Check if the region intersects or not
if (!imageRegion.intersects(region))
continue;
// Draw the region
BufferedImage img = entry.getValue();
if (colorModel != null && colorModel != img.getColorModel()) {
// Apply the color model to get a version of the image we can draw quickly
var imgRGB = cacheRGB.get(img);
if (imgRGB == null) {
var img2 = new BufferedImage(colorModel, img.getRaster(), img.getColorModel().isAlphaPremultiplied(), null);
imgRGB = convertToDrawable(img2);
cacheRGB.put(img, imgRGB);
}
img = imgRGB;
} else {
img = cacheRGB.computeIfAbsent(img, img2 -> convertToDrawable(img2));
}
g2d.drawImage(img, region.getX(), region.getY(), region.getWidth(), region.getHeight(), null);
}
}
use of qupath.lib.images.ImageData in project qupath by qupath.
the class TileExporter method createRequests.
/**
* Create region requests, along with information about whether we have a partial tile (which should not be resized/padded) or not.
* @return
*/
private Collection<RegionRequestWrapper> createRequests() {
List<RegionRequestWrapper> requests = new ArrayList<>();
// Work out which RegionRequests to use
// If the downsample hasn't been specified, use the level 0 resolution
double downsample = this.downsample;
if (downsample <= 0) {
downsample = server.getDownsampleForResolution(0);
if (this.downsample < 0)
logger.warn("Invalid downsample {}, I will use the level 0 downsample {}", this.downsample, downsample);
else
logger.debug("Using level 0 downsample {}", downsample);
}
if (parentObjects == null)
requests.addAll(getTiledRegionRequests(downsample));
else {
for (var parent : parentObjects) {
int w = (int) Math.ceil(tileWidth * downsample);
int h = (int) Math.ceil(tileHeight * downsample);
if (parent.isRootObject()) {
for (int t = 0; t < server.nTimepoints(); t++) {
for (int z = 0; z < server.nZSlices(); z++) {
RegionRequest newRequest;
if (useParentRoiBounds) {
newRequest = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), z, t);
} else {
int x = (int) Math.floor(server.getWidth() / 2.0 - w / 2.0);
int y = (int) Math.floor(server.getHeight() / 2.0 - h / 2.0);
newRequest = RegionRequest.createInstance(server.getPath(), downsample, x, y, w, h, z, t);
}
if (includePartialTiles || withinImage(newRequest, server))
requests.add(new RegionRequestWrapper(newRequest, false));
}
}
} else if (parent.hasROI()) {
RegionRequest newRequest;
var roi = PathObjectTools.getROI(parent, preferNucleus);
if (useParentRoiBounds) {
newRequest = RegionRequest.createInstance(server.getPath(), downsample, roi);
} else {
int x = (int) Math.floor(roi.getCentroidX() - w / 2.0);
int y = (int) Math.floor(roi.getCentroidY() - h / 2.0);
newRequest = RegionRequest.createInstance(server.getPath(), downsample, x, y, w, h, roi.getImagePlane());
}
if (includePartialTiles || withinImage(newRequest, server))
requests.add(new RegionRequestWrapper(newRequest, false));
}
}
}
// If we want only annotated tiles, skip regions that lack annotations
var iterator = requests.iterator();
while (iterator.hasNext()) {
var r = iterator.next().request;
if (annotatedCentroidTilesOnly) {
double cx = (r.getMinX() + r.getMaxX()) / 2.0;
double cy = (r.getMinY() + r.getMaxY()) / 2.0;
if (serverLabeled != null && (serverLabeled instanceof LabeledImageServer)) {
if (!((LabeledImageServer) serverLabeled).getObjectsForRegion(r).stream().anyMatch(p -> p.getROI().contains(cx, cy))) {
logger.trace("Skipping empty labelled region based on centroid test {}", r);
iterator.remove();
continue;
}
} else if (imageData != null) {
if (PathObjectTools.getObjectsForLocation(imageData.getHierarchy(), cx, cy, r.getZ(), r.getT(), 0).isEmpty()) {
iterator.remove();
continue;
}
}
} else if (annotatedTilesOnly) {
if (serverLabeled != null) {
if (serverLabeled.isEmptyRegion(r)) {
logger.trace("Skipping empty labelled region {}", r);
iterator.remove();
continue;
}
} else if (imageData != null) {
if (!imageData.getHierarchy().getObjectsForRegion(PathAnnotationObject.class, r, null).stream().anyMatch(p -> RoiTools.intersectsRegion(p.getROI(), r))) {
iterator.remove();
continue;
}
}
}
}
return requests;
}
Aggregations