use of qupath.lib.gui.viewer.OverlayOptions in project qupath by qupath.
the class QuPathGUI method setupViewer.
void setupViewer(final QuPathViewerPlus viewer) {
viewer.getView().setFocusTraversable(true);
// Update active viewer as required
viewer.getView().focusedProperty().addListener((e, f, nowFocussed) -> {
if (nowFocussed) {
viewerManager.setActiveViewer(viewer);
}
});
viewer.getView().addEventFilter(MouseEvent.MOUSE_PRESSED, e -> viewer.getView().requestFocus());
viewer.zoomToFitProperty().bind(zoomToFit);
// Create popup menu
setViewerPopupMenu(viewer);
viewer.getView().widthProperty().addListener((e, f, g) -> {
if (viewer.getZoomToFit())
updateMagnificationString();
});
viewer.getView().heightProperty().addListener((e, f, g) -> {
if (viewer.getZoomToFit())
updateMagnificationString();
});
// Enable drag and drop
dragAndDrop.setupTarget(viewer.getView());
// Listen to the scroll wheel
viewer.getView().setOnScroll(e -> {
if (viewer == viewerManager.getActiveViewer() || !viewerManager.getSynchronizeViewers()) {
double scrollUnits = e.getDeltaY() * PathPrefs.getScaledScrollSpeed();
// Use shift down to adjust opacity
if (e.isShortcutDown()) {
OverlayOptions options = viewer.getOverlayOptions();
options.setOpacity((float) (options.getOpacity() + scrollUnits * 0.001));
return;
}
// Avoid zooming at the end of a gesture when using touchscreens
if (e.isInertia())
return;
if (PathPrefs.invertScrollingProperty().get())
scrollUnits = -scrollUnits;
double newDownsampleFactor = viewer.getDownsampleFactor() * Math.pow(viewer.getDefaultZoomFactor(), scrollUnits);
newDownsampleFactor = Math.min(viewer.getMaxDownsample(), Math.max(newDownsampleFactor, viewer.getMinDownsample()));
viewer.setDownsampleFactor(newDownsampleFactor, e.getX(), e.getY());
}
});
viewer.getView().addEventFilter(RotateEvent.ANY, e -> {
if (!PathPrefs.useRotateGesturesProperty().get())
return;
// logger.debug("Rotating: " + e.getAngle());
viewer.setRotation(viewer.getRotation() + Math.toRadians(e.getAngle()));
e.consume();
});
viewer.getView().addEventFilter(ZoomEvent.ANY, e -> {
if (!PathPrefs.useZoomGesturesProperty().get())
return;
double zoomFactor = e.getZoomFactor();
if (Double.isNaN(zoomFactor))
return;
logger.debug("Zooming: " + e.getZoomFactor() + " (" + e.getTotalZoomFactor() + ")");
viewer.setDownsampleFactor(viewer.getDownsampleFactor() / zoomFactor, e.getX(), e.getY());
e.consume();
});
viewer.getView().addEventFilter(ScrollEvent.ANY, new ScrollEventPanningFilter(viewer));
viewer.getView().addEventHandler(KeyEvent.KEY_PRESSED, e -> {
PathObject pathObject = viewer.getSelectedObject();
if (!e.isConsumed() && pathObject != null) {
if (pathObject.isTMACore()) {
TMACoreObject core = (TMACoreObject) pathObject;
if (e.getCode() == KeyCode.ENTER) {
defaultActions.TMA_ADD_NOTE.handle(new ActionEvent(e.getSource(), e.getTarget()));
e.consume();
} else if (e.getCode() == KeyCode.BACK_SPACE) {
core.setMissing(!core.isMissing());
viewer.getHierarchy().fireObjectsChangedEvent(this, Collections.singleton(core));
e.consume();
}
} else if (pathObject.isAnnotation()) {
if (e.getCode() == KeyCode.ENTER) {
GuiTools.promptToSetActiveAnnotationProperties(viewer.getHierarchy());
e.consume();
}
}
}
});
}
use of qupath.lib.gui.viewer.OverlayOptions in project qupath by qupath.
the class TMADataIO method writeTMAData.
/**
* Write TMA data in a human-readable (and viewable) way, with JPEGs and TXT/CSV files.
*
* @param file
* @param imageData
* @param overlayOptions
* @param downsampleFactor The downsample factor used for the TMA cores. If NaN, an automatic downsample value will be selected (>= 1). If <= 0, no cores are exported.
*/
public static void writeTMAData(File file, final ImageData<BufferedImage> imageData, OverlayOptions overlayOptions, final double downsampleFactor) {
if (imageData == null || imageData.getHierarchy() == null || imageData.getHierarchy().getTMAGrid() == null) {
logger.error("No TMA data available to save!");
return;
}
final ImageServer<BufferedImage> server = imageData.getServer();
String coreExt = imageData.getServer().isRGB() ? ".jpg" : ".tif";
if (file == null) {
file = Dialogs.promptToSaveFile("Save TMA data", null, ServerTools.getDisplayableImageName(server), "TMA data", "qptma");
if (file == null)
return;
} else if (file.isDirectory() || (!file.exists() && file.getAbsolutePath().endsWith(File.pathSeparator))) {
// Put inside the specified directory
file = new File(file, ServerTools.getDisplayableImageName(server) + TMA_DEARRAYING_DATA_EXTENSION);
if (!file.getParentFile().exists())
file.getParentFile().mkdirs();
}
final File dirData = new File(file + ".data");
if (!dirData.exists())
dirData.mkdir();
// Write basic file info
String delimiter = "\t";
TMAGrid tmaGrid = imageData.getHierarchy().getTMAGrid();
try {
PrintWriter writer = new PrintWriter(file);
writer.println(server.getPath());
writer.println(ServerTools.getDisplayableImageName(server));
writer.println();
writer.println("TMA grid width: " + tmaGrid.getGridWidth());
writer.println("TMA grid height: " + tmaGrid.getGridHeight());
writer.println("Core name" + delimiter + "X" + delimiter + "Y" + delimiter + "Width" + delimiter + "Height" + delimiter + "Present" + delimiter + TMACoreObject.KEY_UNIQUE_ID);
for (int row = 0; row < tmaGrid.getGridHeight(); row++) {
for (int col = 0; col < tmaGrid.getGridWidth(); col++) {
TMACoreObject core = tmaGrid.getTMACore(row, col);
if (!core.hasROI()) {
writer.println(core.getName() + delimiter + delimiter + delimiter + delimiter);
continue;
}
ROI pathROI = core.getROI();
int x = (int) pathROI.getBoundsX();
int y = (int) pathROI.getBoundsY();
int w = (int) Math.ceil(pathROI.getBoundsWidth());
int h = (int) Math.ceil(pathROI.getBoundsHeight());
String id = core.getUniqueID() == null ? "" : core.getUniqueID();
writer.println(core.getName() + delimiter + x + delimiter + y + delimiter + w + delimiter + h + delimiter + !core.isMissing() + delimiter + id);
}
}
writer.close();
} catch (Exception e) {
logger.error("Error writing TMA data: " + e.getLocalizedMessage(), e);
return;
}
// Save the summary results
ObservableMeasurementTableData tableData = new ObservableMeasurementTableData();
tableData.setImageData(imageData, tmaGrid.getTMACoreList());
SummaryMeasurementTableCommand.saveTableModel(tableData, new File(dirData, "TMA results - " + ServerTools.getDisplayableImageName(server) + ".txt"), Collections.emptyList());
boolean outputCoreImages = Double.isNaN(downsampleFactor) || downsampleFactor > 0;
if (outputCoreImages) {
// Create new overlay options, if we don't have some already
if (overlayOptions == null) {
overlayOptions = new OverlayOptions();
overlayOptions.setFillDetections(true);
}
final OverlayOptions options = overlayOptions;
// Write an overall TMA map (for quickly checking if the dearraying is ok)
File fileTMAMap = new File(dirData, "TMA map - " + ServerTools.getDisplayableImageName(server) + ".jpg");
double downsampleThumbnail = Math.max(1, (double) Math.max(server.getWidth(), server.getHeight()) / 1024);
RegionRequest request = RegionRequest.createInstance(server.getPath(), downsampleThumbnail, 0, 0, server.getWidth(), server.getHeight());
OverlayOptions optionsThumbnail = new OverlayOptions();
optionsThumbnail.setShowTMAGrid(true);
optionsThumbnail.setShowGrid(false);
optionsThumbnail.setShowAnnotations(false);
optionsThumbnail.setShowDetections(false);
try {
var renderedServer = new RenderedImageServer.Builder(imageData).layers(new TMAGridOverlay(overlayOptions)).downsamples(downsampleThumbnail).build();
ImageWriterTools.writeImageRegion(renderedServer, request, fileTMAMap.getAbsolutePath());
// ImageWriters.writeImageRegionWithOverlay(imageData.getServer(), Collections.singletonList(new TMAGridOverlay(overlayOptions, imageData)), request, fileTMAMap.getAbsolutePath());
} catch (IOException e) {
logger.warn("Unable to write image overview: " + e.getLocalizedMessage(), e);
}
final double downsample = Double.isNaN(downsampleFactor) ? (server.getPixelCalibration().hasPixelSizeMicrons() ? ServerTools.getDownsampleFactor(server, preferredExportPixelSizeMicrons) : 1) : downsampleFactor;
// Creating a plugin makes it possible to parallelize & show progress easily
var renderedImageServer = new RenderedImageServer.Builder(imageData).layers(new HierarchyOverlay(null, options, imageData)).downsamples(downsample).build();
ExportCoresPlugin plugin = new ExportCoresPlugin(dirData, renderedImageServer, downsample, coreExt);
PluginRunner<BufferedImage> runner;
var qupath = QuPathGUI.getInstance();
if (qupath == null || qupath.getImageData() != imageData) {
runner = new CommandLinePluginRunner<>(imageData);
plugin.runPlugin(runner, null);
} else {
try {
qupath.runPlugin(plugin, null, false);
} catch (Exception e) {
logger.error("Error writing TMA data: " + e.getLocalizedMessage(), e);
}
// new Thread(() -> qupath.runPlugin(plugin, null, false)).start();
// runner = new PluginRunnerFX(QuPathGUI.getInstance());
// new Thread(() -> plugin.runPlugin(runner, null)).start();
}
}
}
use of qupath.lib.gui.viewer.OverlayOptions in project qupath by qupath.
the class HierarchyOverlay method paintOverlay.
@Override
public void paintOverlay(final Graphics2D g2d, final ImageRegion imageRegion, final double downsampleFactor, final ImageData<BufferedImage> imageData, final boolean paintCompletely) {
if (this.imageData != imageData) {
this.imageData = imageData;
updateOverlayServer();
}
// Get the selection model, which can influence colours (TODO: this might not be the best way to do it!)
PathObjectHierarchy hierarchy = imageData == null ? null : imageData.getHierarchy();
if (hierarchy == null)
return;
if (!isVisible() && hierarchy.getSelectionModel().noSelection())
return;
// Default RenderingHints (may be temporarily changed in some places)
var defaultAntiAlias = RenderingHints.VALUE_ANTIALIAS_ON;
var defaultStroke = RenderingHints.VALUE_STROKE_PURE;
// Doesn't seem to help...?
// boolean fastRendering = true;
// if (fastRendering) {
// defaultAntiAlias = RenderingHints.VALUE_ANTIALIAS_OFF;
// defaultStroke = RenderingHints.VALUE_STROKE_DEFAULT;
// }
OverlayOptions overlayOptions = getOverlayOptions();
long timestamp = overlayOptions.lastChangeTimestamp().get();
int pointRadius = PathPrefs.pointRadiusProperty().get();
if (overlayOptionsTimestamp != timestamp || pointRadius != lastPointRadius) {
lastPointRadius = pointRadius;
overlayOptionsTimestamp = timestamp;
}
int t = imageRegion.getT();
int z = imageRegion.getZ();
Rectangle serverBounds = AwtTools.getBounds(imageRegion);
// Ensure antialias is on...?
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, defaultAntiAlias);
// Get the displayed clip bounds for fast checking if ROIs need to be drawn
Shape shapeRegion = g2d.getClip();
if (shapeRegion == null)
shapeRegion = AwtTools.getBounds(imageRegion);
var boundsDisplayed = shapeRegion.getBounds();
// Ensure the bounds do not extend beyond what the server actually contains
boundsDisplayed = boundsDisplayed.intersection(serverBounds);
if (boundsDisplayed.width <= 0 || boundsDisplayed.height <= 0)
return;
// Get the annotations & selected objects (which must be painted directly)
Collection<PathObject> selectedObjects = new ArrayList<>(hierarchy.getSelectionModel().getSelectedObjects());
selectedObjects.removeIf(p -> !p.hasROI() || (p.getROI().getZ() != z || p.getROI().getT() != t));
ImageRegion region = AwtTools.getImageRegion(boundsDisplayed, z, t);
// Paint detection objects
long startTime = System.currentTimeMillis();
if (overlayOptions.getShowDetections() && !hierarchy.isEmpty()) {
// If we aren't downsampling by much, or we're upsampling, paint directly - making sure to paint the right number of times, and in the right order
if (overlayServer == null || regionStore == null || downsampleFactor < 1.0) {
Collection<PathObject> pathObjects;
try {
Set<PathObject> pathObjectsToPaint = new TreeSet<>(comparator);
pathObjects = hierarchy.getObjectsForRegion(PathDetectionObject.class, region, pathObjectsToPaint);
} catch (IllegalArgumentException e) {
// This can happen (rarely) in a multithreaded environment if the level of a detection changes.
// However, protecting against this fully by caching the level with integer boxing/unboxing would be expensive.
logger.debug("Exception requesting detections to paint: " + e.getLocalizedMessage(), e);
pathObjects = hierarchy.getObjectsForRegion(PathDetectionObject.class, region, null);
}
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
PathHierarchyPaintingHelper.paintSpecifiedObjects(g2d, boundsDisplayed, pathObjects, overlayOptions, hierarchy.getSelectionModel(), downsampleFactor);
if (overlayOptions.getShowConnections()) {
Object connections = imageData.getProperty(DefaultPathObjectConnectionGroup.KEY_OBJECT_CONNECTIONS);
if (connections instanceof PathObjectConnections)
PathHierarchyPaintingHelper.paintConnections((PathObjectConnections) connections, hierarchy, g2d, imageData.isFluorescence() ? ColorToolsAwt.TRANSLUCENT_WHITE : ColorToolsAwt.TRANSLUCENT_BLACK, downsampleFactor);
}
} else {
// On the other hand, if a large image has been updated then we may be browsing quickly - better to repaint quickly while tiles may still be loading
if (paintCompletely) {
regionStore.paintRegionCompletely(overlayServer, g2d, shapeRegion, z, t, downsampleFactor, null, null, 5000);
} else {
regionStore.paintRegion(overlayServer, g2d, shapeRegion, z, t, downsampleFactor, null, null, null);
}
}
}
long endTime = System.currentTimeMillis();
if (endTime - startTime > 500)
logger.debug("Painting time: {} seconds", GeneralTools.formatNumber((endTime - startTime) / 1000.0, 4));
// The setting below stops some weird 'jiggling' effects during zooming in/out, or poor rendering of shape ROIs
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, defaultAntiAlias);
g2d.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, defaultStroke);
// Prepare to handle labels, if we need to
Collection<PathObject> objectsWithNames = new ArrayList<>();
Collection<PathObject> annotations = hierarchy.getObjectsForRegion(PathAnnotationObject.class, region, null);
for (var iterator = annotations.iterator(); iterator.hasNext(); ) {
var next = iterator.next();
if ((next.getName() != null && !next.getName().isBlank()))
objectsWithNames.add(next);
if (selectedObjects.contains(next))
iterator.remove();
}
// Paint the annotations
List<PathObject> pathObjectList = new ArrayList<>(annotations);
Collections.sort(pathObjectList, Comparator.comparingInt(PathObject::getLevel).reversed().thenComparing(Comparator.comparingDouble((PathObject p) -> -p.getROI().getArea())));
PathHierarchyPaintingHelper.paintSpecifiedObjects(g2d, boundsDisplayed, pathObjectList, overlayOptions, null, downsampleFactor);
// Ensure that selected objects are painted last, to make sure they aren't obscured
if (!selectedObjects.isEmpty()) {
Composite previousComposite = g2d.getComposite();
float opacity = overlayOptions.getOpacity();
if (opacity < 1) {
g2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER));
PathHierarchyPaintingHelper.paintSpecifiedObjects(g2d, boundsDisplayed, selectedObjects, overlayOptions, hierarchy.getSelectionModel(), downsampleFactor);
g2d.setComposite(previousComposite);
} else {
PathHierarchyPaintingHelper.paintSpecifiedObjects(g2d, boundsDisplayed, selectedObjects, overlayOptions, hierarchy.getSelectionModel(), downsampleFactor);
}
}
// Paint labels
if (overlayOptions.getShowNames() && !objectsWithNames.isEmpty()) {
double requestedFontSize;
switch(PathPrefs.viewerFontSizeProperty().get()) {
case HUGE:
requestedFontSize = 24;
break;
case LARGE:
requestedFontSize = 18;
break;
case SMALL:
requestedFontSize = 10;
break;
case TINY:
requestedFontSize = 8;
break;
case MEDIUM:
default:
requestedFontSize = 14;
break;
}
float fontSize = (float) (requestedFontSize * downsampleFactor);
if (!GeneralTools.almostTheSame(font.getSize2D(), fontSize, 0.001))
font = font.deriveFont(fontSize);
g2d.setFont(font);
var metrics = g2d.getFontMetrics(font);
var rect = new Rectangle2D.Double();
g2d.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON);
g2d.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
for (var annotation : objectsWithNames) {
var name = annotation.getName();
var roi = annotation.getROI();
if (name != null && !name.isBlank() && roi != null && !overlayOptions.isPathClassHidden(annotation.getPathClass())) {
g2d.setColor(ColorToolsAwt.TRANSLUCENT_BLACK);
var bounds = metrics.getStringBounds(name, g2d);
double pad = 5.0 * downsampleFactor;
double x = roi.getCentroidX() - bounds.getWidth() / 2.0;
double y = roi.getCentroidY() + bounds.getY() + metrics.getAscent() + pad;
rect.setFrame(x + bounds.getX() - pad, y + bounds.getY() - pad, bounds.getWidth() + pad * 2, bounds.getHeight() + pad * 2);
g2d.fill(rect);
g2d.setColor(Color.WHITE);
g2d.drawString(name, (float) x, (float) y);
}
}
}
}
use of qupath.lib.gui.viewer.OverlayOptions in project qupath by qupath.
the class ExtractRegionCommand method run.
@Override
public void run() {
QuPathViewer viewer = qupath.getViewer();
ImageServer<BufferedImage> server = null;
if (viewer != null)
server = viewer.getServer();
if (server == null)
return;
List<String> unitOptions = new ArrayList<>();
unitOptions.add(PIXELS_UNIT);
String unit = server.getPixelCalibration().getPixelWidthUnit();
if (unit.equals(server.getPixelCalibration().getPixelHeightUnit()) && !unit.equals(PixelCalibration.PIXEL))
unitOptions.add(unit);
if (!unitOptions.contains(resolutionUnit))
resolutionUnit = PIXELS_UNIT;
ParameterList params = new ParameterList().addDoubleParameter("resolution", "Resolution", resolution, null, "Resolution at which the image will be exported, defined as the 'pixel size' in Resolution units").addChoiceParameter("resolutionUnit", "Resolution unit", resolutionUnit, unitOptions, "Units defining the export resolution; if 'pixels' then the resolution is the same as a downsample value").addBooleanParameter("includeROI", "Include ROI", includeROI, "Include the primary object defining the exported region as an active ROI in ImageJ").addBooleanParameter("includeOverlay", "Include overlay", includeOverlay, "Include any objects overlapping the exported region as ROIs on an ImageJ overlay").addBooleanParameter("doTransforms", "Apply color transforms", doTransforms, "Optionally apply any color transforms when sending the pixels to ImageJ").addBooleanParameter("doZ", "All z-slices", doZ, "Optionally include all slices of a z-stack").addBooleanParameter("doT", "All timepoints", doT, "Optionally include all timepoints of a time series");
// params.setHiddenParameters(unitOptions.size() <= 1, "resolutionUnit");
params.setHiddenParameters(server.nZSlices() == 1, "doZ");
params.setHiddenParameters(server.nTimepoints() == 1, "doT");
if (!Dialogs.showParameterDialog("Send region to ImageJ", params))
return;
// Parse values
resolution = params.getDoubleParameterValue("resolution");
resolutionUnit = (String) params.getChoiceParameterValue("resolutionUnit");
includeROI = params.getBooleanParameterValue("includeROI");
includeOverlay = params.getBooleanParameterValue("includeOverlay");
doTransforms = params.getBooleanParameterValue("doTransforms");
doZ = params.getBooleanParameterValue("doZ");
doT = params.getBooleanParameterValue("doT");
// Calculate downsample
double downsample = resolution;
if (!resolutionUnit.equals(PIXELS_UNIT))
downsample = resolution / (server.getPixelCalibration().getPixelHeight().doubleValue() / 2.0 + server.getPixelCalibration().getPixelWidth().doubleValue() / 2.0);
// Color transforms are (currently) only applied for brightfield images - for fluorescence we always provide everything as unchanged as possible
List<ChannelDisplayInfo> selectedChannels = new ArrayList<>(viewer.getImageDisplay().selectedChannels());
List<ChannelDisplayInfo> channels = doTransforms && !selectedChannels.isEmpty() ? selectedChannels : null;
if (channels != null)
server = ChannelDisplayTransformServer.createColorTransformServer(server, channels);
// Loop through all selected objects
Collection<PathObject> pathObjects = viewer.getHierarchy().getSelectionModel().getSelectedObjects();
if (pathObjects.isEmpty())
pathObjects = Collections.singletonList(viewer.getHierarchy().getRootObject());
List<ImagePlus> imps = new ArrayList<>();
for (PathObject pathObject : pathObjects) {
if (Thread.currentThread().isInterrupted() || IJ.escapePressed())
return;
int width, height;
if (pathObject == null || !pathObject.hasROI()) {
width = server.getWidth();
height = server.getHeight();
} else {
Rectangle bounds = AwtTools.getBounds(pathObject.getROI());
width = bounds.width;
height = bounds.height;
}
RegionRequest region;
ROI roi = pathObject == null ? null : pathObject.getROI();
if (roi == null || PathObjectTools.hasPointROI(pathObject)) {
region = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), viewer.getZPosition(), viewer.getTPosition());
} else
region = RegionRequest.createInstance(server.getPath(), downsample, roi);
// region = RegionRequest.createInstance(server.getPath(), downsample, pathObject.getROI(), viewer.getZPosition(), viewer.getTPosition());
// Minimum size has been removed (v0.2.0-m4); returned regions should be at least 1x1 pixels
// if (region.getWidth() / downsample < 8 || region.getHeight() / downsample < 8) {
// DisplayHelpers.showErrorMessage("Send region to ImageJ", "The width & height of the extracted image must both be >= 8 pixels");
// continue;
// }
// Calculate required z-slices and time-points
int zStart = doZ ? 0 : region.getZ();
int zEnd = doZ ? server.nZSlices() : region.getZ() + 1;
int tStart = doT ? 0 : region.getT();
int tEnd = doT ? server.nTimepoints() : region.getT() + 1;
long nZ = zEnd - zStart;
long nT = tEnd - tStart;
int bytesPerPixel = server.isRGB() ? 4 : server.getPixelType().getBytesPerPixel() * server.nChannels();
double memory = ((long) width * height * nZ * nT * bytesPerPixel) / (downsample * downsample);
// TODO: Perform calculation based on actual amount of available memory
long availableMemory = GeneralTools.estimateAvailableMemory();
if (memory >= availableMemory * 0.95) {
logger.error("Cannot extract region {} - estimated size is too large (approx. {} MB)", pathObject, GeneralTools.formatNumber(memory / (1024.0 * 1024.0), 2));
Dialogs.showErrorMessage("Send region to ImageJ error", "Selected region is too large to extract - please selected a smaller region or use a higher downsample factor");
continue;
}
if (memory / 1024 / 1024 > 100) {
if (pathObjects.size() == 1 && !Dialogs.showYesNoDialog("Send region to ImageJ", String.format("Attempting to extract this region is likely to require > %.2f MB - are you sure you want to continue?", memory / 1024 / 1024)))
return;
}
// We should switch to the event dispatch thread when interacting with ImageJ
try {
ImagePlus imp;
PathObjectHierarchy hierarchy = viewer.getHierarchy();
OverlayOptions options = viewer.getOverlayOptions();
if (zEnd - zStart > 1 || tEnd - tStart > 1) {
// TODO: Handle overlays
imp = IJTools.extractHyperstack(server, region, zStart, zEnd, tStart, tEnd);
if (includeROI && roi != null) {
Roi roiIJ = IJTools.convertToIJRoi(roi, imp.getCalibration(), region.getDownsample());
imp.setRoi(roiIJ);
}
if (includeOverlay) {
Overlay overlay = new Overlay();
for (int t = tStart; t < tEnd; t++) {
for (int z = zStart; z < zEnd; z++) {
RegionRequest request2 = RegionRequest.createInstance(region.getPath(), region.getDownsample(), region.getX(), region.getY(), region.getWidth(), region.getHeight(), z, t);
var regionPredicate = PathObjectTools.createImageRegionPredicate(request2);
Overlay temp = IJExtension.extractOverlay(hierarchy, request2, options, p -> p != pathObject && regionPredicate.test(p));
if (overlay == null)
overlay = temp;
for (int i = 0; i < temp.size(); i++) {
Roi roiIJ = temp.get(i);
roiIJ.setPosition(-1, z + 1, t + 1);
overlay.add(roiIJ);
}
}
}
if (overlay != null && overlay.size() > 0)
imp.setOverlay(overlay);
}
} else if (includeOverlay)
imp = IJExtension.extractROIWithOverlay(server, pathObject, hierarchy, region, includeROI, options).getImage();
else
imp = IJExtension.extractROIWithOverlay(server, pathObject, null, region, includeROI, options).getImage();
// Set display ranges if we can
if (viewer != null && imp instanceof CompositeImage) {
var availableChannels = viewer.getImageDisplay().availableChannels().stream().filter(c -> c instanceof SingleChannelDisplayInfo).map(c -> (SingleChannelDisplayInfo) c).collect(Collectors.toList());
CompositeImage impComp = (CompositeImage) imp;
if (availableChannels.size() == imp.getNChannels()) {
for (int c = 0; c < availableChannels.size(); c++) {
var channel = availableChannels.get(c);
imp.setPosition(c + 1, 1, 1);
impComp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imp.setPosition(1);
}
} else if (selectedChannels.size() == 1 && imp.getType() != ImagePlus.COLOR_RGB) {
// Setting the display range for non-RGB images can give unexpected results (changing pixel values)
var channel = selectedChannels.get(0);
imp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imps.add(imp);
} catch (IOException e) {
Dialogs.showErrorMessage("Send region to ImageJ", e);
return;
}
}
// Show all the images we've got
if (!imps.isEmpty()) {
SwingUtilities.invokeLater(() -> {
boolean batchMode = Interpreter.batchMode;
// Try to start an ImageJ instance, and return if this fails
try {
ImageJ ij = IJExtension.getImageJInstance();
if (ij == null)
return;
ij.setVisible(true);
// Make sure we aren't in batch mode, so that image will display
Interpreter.batchMode = false;
for (ImagePlus imp : imps) {
imp.show();
}
} finally {
Interpreter.batchMode = batchMode;
}
});
}
}
use of qupath.lib.gui.viewer.OverlayOptions in project qupath by qupath.
the class PathClassPane method setSelectedClassesVisibility.
void setSelectedClassesVisibility(boolean visible) {
OverlayOptions overlayOptions = qupath.getViewer().getOverlayOptions();
for (var pathClass : getSelectedPathClasses()) {
// if (pathClass == null || pathClass == PathClassFactory.getPathClassUnclassified())
// continue;
overlayOptions.setPathClassHidden(pathClass, !visible);
}
listClasses.refresh();
}
Aggregations