use of qupath.lib.objects.hierarchy.PathObjectHierarchy in project qupath by qupath.
the class ImageJMacroRunner method runPlugin.
@Override
public boolean runPlugin(final PluginRunner<BufferedImage> runner, final String arg) {
if (!parseArgument(runner.getImageData(), arg))
return false;
if (dialog == null) {
dialog = new Stage();
dialog.initOwner(qupath.getStage());
dialog.setTitle("ImageJ macro runner");
BorderPane pane = new BorderPane();
if (arg != null)
macroText = arg;
// Create text area
final TextArea textArea = new TextArea();
textArea.setPrefRowCount(12);
textArea.setPrefSize(400, 400);
textArea.setWrapText(true);
textArea.setFont(Font.font("Courier"));
if (macroText != null)
textArea.setText(macroText);
BorderPane panelMacro = new BorderPane();
// panelMacro.setBorder(BorderFactory.createTitledBorder("Macro"));
panelMacro.setCenter(textArea);
ParameterPanelFX parameterPanel = new ParameterPanelFX(getParameterList(runner.getImageData()));
panelMacro.setBottom(parameterPanel.getPane());
// Create button panel
Button btnRun = new Button("Run");
btnRun.setOnAction(e -> {
macroText = textArea.getText().trim();
if (macroText.length() == 0)
return;
PathObjectHierarchy hierarchy = getHierarchy(runner);
PathObject pathObject = hierarchy.getSelectionModel().singleSelection() ? hierarchy.getSelectionModel().getSelectedObject() : null;
if (pathObject instanceof PathAnnotationObject || pathObject instanceof TMACoreObject) {
SwingUtilities.invokeLater(() -> {
runMacro(params, qupath.getViewer().getImageData(), qupath.getViewer().getImageDisplay(), pathObject, macroText);
});
} else {
// DisplayHelpers.showErrorMessage(getClass().getSimpleName(), "Sorry, ImageJ macros can only be run for single selected images");
// logger.warn("ImageJ macro being run in current thread");
// runPlugin(runner, arg); // TODO: Consider running in a background thread?
// Run in a background thread
Collection<? extends PathObject> parents = getParentObjects(runner);
if (parents.isEmpty()) {
Dialogs.showErrorMessage("ImageJ macro runner", "No annotation or TMA core objects selected!");
return;
}
List<Runnable> tasks = new ArrayList<>();
for (PathObject parent : parents) addRunnableTasks(qupath.getViewer().getImageData(), parent, tasks);
qupath.submitShortTask(() -> runner.runTasks(tasks, true));
// runner.runTasks(tasks);
// Runnable r = new Runnable() {
// public void run() {
// runPlugin(runner, arg);
// }
// };
// new Thread(r).start();
}
});
Button btnClose = new Button("Close");
btnClose.setOnAction(e -> dialog.hide());
GridPane panelButtons = PaneTools.createRowGridControls(btnRun, btnClose);
pane.setCenter(panelMacro);
pane.setBottom(panelButtons);
panelButtons.setPadding(new Insets(5, 0, 0, 0));
pane.setPadding(new Insets(10, 10, 10, 10));
dialog.setScene(new Scene(pane));
}
dialog.show();
return true;
}
use of qupath.lib.objects.hierarchy.PathObjectHierarchy in project qupath by qupath.
the class ImageJMacroRunner method getParentObjects.
@Override
protected Collection<? extends PathObject> getParentObjects(final PluginRunner<BufferedImage> runner) {
// Try to get currently-selected objects
PathObjectHierarchy hierarchy = getHierarchy(runner);
List<PathObject> pathObjects = hierarchy.getSelectionModel().getSelectedObjects().stream().filter(p -> p.isAnnotation() || p.isTMACore()).collect(Collectors.toList());
if (pathObjects.isEmpty()) {
if (GuiTools.promptForParentObjects(this.getName(), runner.getImageData(), false, getSupportedParentObjectClasses()))
pathObjects = new ArrayList<>(hierarchy.getSelectionModel().getSelectedObjects());
}
return pathObjects;
// // TODO: Give option to analyse annotations, even when TMA grid is present
// ImageData<BufferedImage> imageData = runner.getImageData();
// TMAGrid tmaGrid = imageData.getHierarchy().getTMAGrid();
// if (tmaGrid != null && tmaGrid.nCores() > 0)
// return PathObjectTools.getTMACoreObjects(imageData.getHierarchy(), false);
// else
// return imageData.getHierarchy().getObjects(null, PathAnnotationObject.class);
}
use of qupath.lib.objects.hierarchy.PathObjectHierarchy in project qupath by qupath.
the class ExtractRegionCommand method run.
@Override
public void run() {
QuPathViewer viewer = qupath.getViewer();
ImageServer<BufferedImage> server = null;
if (viewer != null)
server = viewer.getServer();
if (server == null)
return;
List<String> unitOptions = new ArrayList<>();
unitOptions.add(PIXELS_UNIT);
String unit = server.getPixelCalibration().getPixelWidthUnit();
if (unit.equals(server.getPixelCalibration().getPixelHeightUnit()) && !unit.equals(PixelCalibration.PIXEL))
unitOptions.add(unit);
if (!unitOptions.contains(resolutionUnit))
resolutionUnit = PIXELS_UNIT;
ParameterList params = new ParameterList().addDoubleParameter("resolution", "Resolution", resolution, null, "Resolution at which the image will be exported, defined as the 'pixel size' in Resolution units").addChoiceParameter("resolutionUnit", "Resolution unit", resolutionUnit, unitOptions, "Units defining the export resolution; if 'pixels' then the resolution is the same as a downsample value").addBooleanParameter("includeROI", "Include ROI", includeROI, "Include the primary object defining the exported region as an active ROI in ImageJ").addBooleanParameter("includeOverlay", "Include overlay", includeOverlay, "Include any objects overlapping the exported region as ROIs on an ImageJ overlay").addBooleanParameter("doTransforms", "Apply color transforms", doTransforms, "Optionally apply any color transforms when sending the pixels to ImageJ").addBooleanParameter("doZ", "All z-slices", doZ, "Optionally include all slices of a z-stack").addBooleanParameter("doT", "All timepoints", doT, "Optionally include all timepoints of a time series");
// params.setHiddenParameters(unitOptions.size() <= 1, "resolutionUnit");
params.setHiddenParameters(server.nZSlices() == 1, "doZ");
params.setHiddenParameters(server.nTimepoints() == 1, "doT");
if (!Dialogs.showParameterDialog("Send region to ImageJ", params))
return;
// Parse values
resolution = params.getDoubleParameterValue("resolution");
resolutionUnit = (String) params.getChoiceParameterValue("resolutionUnit");
includeROI = params.getBooleanParameterValue("includeROI");
includeOverlay = params.getBooleanParameterValue("includeOverlay");
doTransforms = params.getBooleanParameterValue("doTransforms");
doZ = params.getBooleanParameterValue("doZ");
doT = params.getBooleanParameterValue("doT");
// Calculate downsample
double downsample = resolution;
if (!resolutionUnit.equals(PIXELS_UNIT))
downsample = resolution / (server.getPixelCalibration().getPixelHeight().doubleValue() / 2.0 + server.getPixelCalibration().getPixelWidth().doubleValue() / 2.0);
// Color transforms are (currently) only applied for brightfield images - for fluorescence we always provide everything as unchanged as possible
List<ChannelDisplayInfo> selectedChannels = new ArrayList<>(viewer.getImageDisplay().selectedChannels());
List<ChannelDisplayInfo> channels = doTransforms && !selectedChannels.isEmpty() ? selectedChannels : null;
if (channels != null)
server = ChannelDisplayTransformServer.createColorTransformServer(server, channels);
// Loop through all selected objects
Collection<PathObject> pathObjects = viewer.getHierarchy().getSelectionModel().getSelectedObjects();
if (pathObjects.isEmpty())
pathObjects = Collections.singletonList(viewer.getHierarchy().getRootObject());
List<ImagePlus> imps = new ArrayList<>();
for (PathObject pathObject : pathObjects) {
if (Thread.currentThread().isInterrupted() || IJ.escapePressed())
return;
int width, height;
if (pathObject == null || !pathObject.hasROI()) {
width = server.getWidth();
height = server.getHeight();
} else {
Rectangle bounds = AwtTools.getBounds(pathObject.getROI());
width = bounds.width;
height = bounds.height;
}
RegionRequest region;
ROI roi = pathObject == null ? null : pathObject.getROI();
if (roi == null || PathObjectTools.hasPointROI(pathObject)) {
region = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), viewer.getZPosition(), viewer.getTPosition());
} else
region = RegionRequest.createInstance(server.getPath(), downsample, roi);
// region = RegionRequest.createInstance(server.getPath(), downsample, pathObject.getROI(), viewer.getZPosition(), viewer.getTPosition());
// Minimum size has been removed (v0.2.0-m4); returned regions should be at least 1x1 pixels
// if (region.getWidth() / downsample < 8 || region.getHeight() / downsample < 8) {
// DisplayHelpers.showErrorMessage("Send region to ImageJ", "The width & height of the extracted image must both be >= 8 pixels");
// continue;
// }
// Calculate required z-slices and time-points
int zStart = doZ ? 0 : region.getZ();
int zEnd = doZ ? server.nZSlices() : region.getZ() + 1;
int tStart = doT ? 0 : region.getT();
int tEnd = doT ? server.nTimepoints() : region.getT() + 1;
long nZ = zEnd - zStart;
long nT = tEnd - tStart;
int bytesPerPixel = server.isRGB() ? 4 : server.getPixelType().getBytesPerPixel() * server.nChannels();
double memory = ((long) width * height * nZ * nT * bytesPerPixel) / (downsample * downsample);
// TODO: Perform calculation based on actual amount of available memory
long availableMemory = GeneralTools.estimateAvailableMemory();
if (memory >= availableMemory * 0.95) {
logger.error("Cannot extract region {} - estimated size is too large (approx. {} MB)", pathObject, GeneralTools.formatNumber(memory / (1024.0 * 1024.0), 2));
Dialogs.showErrorMessage("Send region to ImageJ error", "Selected region is too large to extract - please selected a smaller region or use a higher downsample factor");
continue;
}
if (memory / 1024 / 1024 > 100) {
if (pathObjects.size() == 1 && !Dialogs.showYesNoDialog("Send region to ImageJ", String.format("Attempting to extract this region is likely to require > %.2f MB - are you sure you want to continue?", memory / 1024 / 1024)))
return;
}
// We should switch to the event dispatch thread when interacting with ImageJ
try {
ImagePlus imp;
PathObjectHierarchy hierarchy = viewer.getHierarchy();
OverlayOptions options = viewer.getOverlayOptions();
if (zEnd - zStart > 1 || tEnd - tStart > 1) {
// TODO: Handle overlays
imp = IJTools.extractHyperstack(server, region, zStart, zEnd, tStart, tEnd);
if (includeROI && roi != null) {
Roi roiIJ = IJTools.convertToIJRoi(roi, imp.getCalibration(), region.getDownsample());
imp.setRoi(roiIJ);
}
if (includeOverlay) {
Overlay overlay = new Overlay();
for (int t = tStart; t < tEnd; t++) {
for (int z = zStart; z < zEnd; z++) {
RegionRequest request2 = RegionRequest.createInstance(region.getPath(), region.getDownsample(), region.getX(), region.getY(), region.getWidth(), region.getHeight(), z, t);
var regionPredicate = PathObjectTools.createImageRegionPredicate(request2);
Overlay temp = IJExtension.extractOverlay(hierarchy, request2, options, p -> p != pathObject && regionPredicate.test(p));
if (overlay == null)
overlay = temp;
for (int i = 0; i < temp.size(); i++) {
Roi roiIJ = temp.get(i);
roiIJ.setPosition(-1, z + 1, t + 1);
overlay.add(roiIJ);
}
}
}
if (overlay != null && overlay.size() > 0)
imp.setOverlay(overlay);
}
} else if (includeOverlay)
imp = IJExtension.extractROIWithOverlay(server, pathObject, hierarchy, region, includeROI, options).getImage();
else
imp = IJExtension.extractROIWithOverlay(server, pathObject, null, region, includeROI, options).getImage();
// Set display ranges if we can
if (viewer != null && imp instanceof CompositeImage) {
var availableChannels = viewer.getImageDisplay().availableChannels().stream().filter(c -> c instanceof SingleChannelDisplayInfo).map(c -> (SingleChannelDisplayInfo) c).collect(Collectors.toList());
CompositeImage impComp = (CompositeImage) imp;
if (availableChannels.size() == imp.getNChannels()) {
for (int c = 0; c < availableChannels.size(); c++) {
var channel = availableChannels.get(c);
imp.setPosition(c + 1, 1, 1);
impComp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imp.setPosition(1);
}
} else if (selectedChannels.size() == 1 && imp.getType() != ImagePlus.COLOR_RGB) {
// Setting the display range for non-RGB images can give unexpected results (changing pixel values)
var channel = selectedChannels.get(0);
imp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imps.add(imp);
} catch (IOException e) {
Dialogs.showErrorMessage("Send region to ImageJ", e);
return;
}
}
// Show all the images we've got
if (!imps.isEmpty()) {
SwingUtilities.invokeLater(() -> {
boolean batchMode = Interpreter.batchMode;
// Try to start an ImageJ instance, and return if this fails
try {
ImageJ ij = IJExtension.getImageJInstance();
if (ij == null)
return;
ij.setVisible(true);
// Make sure we aren't in batch mode, so that image will display
Interpreter.batchMode = false;
for (ImagePlus imp : imps) {
imp.show();
}
} finally {
Interpreter.batchMode = batchMode;
}
});
}
}
use of qupath.lib.objects.hierarchy.PathObjectHierarchy in project qupath by qupath.
the class QP method resetSelection.
/**
* Clear the selected objects for the current {@code PathObjectHierarchy}.
*/
public static void resetSelection() {
PathObjectHierarchy hierarchy = getCurrentHierarchy();
if (hierarchy == null)
return;
hierarchy.getSelectionModel().clearSelection();
}
use of qupath.lib.objects.hierarchy.PathObjectHierarchy in project qupath by qupath.
the class QP method getAllObjects.
/**
* Get an array of all objects in the current hierarchy.
*
* @param includeRootObject
* @return
* @see #getCurrentHierarchy
*/
public static PathObject[] getAllObjects(boolean includeRootObject) {
PathObjectHierarchy hierarchy = getCurrentHierarchy();
if (hierarchy == null)
return new PathObject[0];
var objList = hierarchy.getFlattenedObjectList(null);
if (includeRootObject)
return objList.toArray(new PathObject[0]);
return objList.parallelStream().filter(e -> !e.isRootObject()).toArray(PathObject[]::new);
}
Aggregations