use of qupath.lib.gui.viewer.QuPathViewer in project qupath by qupath.
the class ZoomCommand method run.
@Override
public void run() {
QuPathViewer viewer = viewerValue.getValue();
if (viewer != null) {
if (timer != null)
timer.stop();
timer = new Timeline(new KeyFrame(Duration.ZERO, actionEvent -> viewer.zoomIn((int) (zoomAmount / (timer.getCurrentTime().toMillis() / 10 + 1)))), new KeyFrame(Duration.millis(20)));
timer.setCycleCount(15);
timer.playFromStart();
}
}
use of qupath.lib.gui.viewer.QuPathViewer in project qupath by qupath.
the class QPEx method copyToClipboard.
/**
* Try to copy an object to the clipboard.
* This will attempt to perform a smart conversion; for example, if a window is provided a snapshot will be taken
* and copied as an image.
* @param o the object to copy
*/
public static void copyToClipboard(Object o) {
if (!Platform.isFxApplicationThread()) {
Object o2 = o;
Platform.runLater(() -> copyToClipboard(o2));
return;
}
ClipboardContent content = new ClipboardContent();
// Handle things that are (or could become) images
if (o instanceof BufferedImage)
o = SwingFXUtils.toFXImage((BufferedImage) o, null);
if (o instanceof QuPathGUI)
o = ((QuPathGUI) o).getStage();
if (o instanceof QuPathViewer)
o = ((QuPathViewer) o).getView();
if (o instanceof Window)
o = ((Window) o).getScene();
if (o instanceof Scene)
o = ((Scene) o).snapshot(null);
if (o instanceof Node)
o = ((Node) o).snapshot(null, null);
if (o instanceof Image)
content.putImage((Image) o);
// Handle files
List<File> files = null;
if (o instanceof File)
files = Arrays.asList((File) o);
else if (o instanceof File[])
files = Arrays.asList((File[]) o);
else if (o instanceof Collection) {
files = new ArrayList<>();
for (var something : (Collection<?>) o) {
if (something instanceof File)
files.add((File) something);
}
}
if (files != null && !files.isEmpty())
content.putFiles(files);
// Handle URLs
if (o instanceof URL)
content.putUrl(((URL) o).toString());
// Always put a String representation
content.putString(o.toString());
Clipboard.getSystemClipboard().setContent(content);
}
use of qupath.lib.gui.viewer.QuPathViewer in project qupath by qupath.
the class ExtractRegionCommand method run.
@Override
public void run() {
QuPathViewer viewer = qupath.getViewer();
ImageServer<BufferedImage> server = null;
if (viewer != null)
server = viewer.getServer();
if (server == null)
return;
List<String> unitOptions = new ArrayList<>();
unitOptions.add(PIXELS_UNIT);
String unit = server.getPixelCalibration().getPixelWidthUnit();
if (unit.equals(server.getPixelCalibration().getPixelHeightUnit()) && !unit.equals(PixelCalibration.PIXEL))
unitOptions.add(unit);
if (!unitOptions.contains(resolutionUnit))
resolutionUnit = PIXELS_UNIT;
ParameterList params = new ParameterList().addDoubleParameter("resolution", "Resolution", resolution, null, "Resolution at which the image will be exported, defined as the 'pixel size' in Resolution units").addChoiceParameter("resolutionUnit", "Resolution unit", resolutionUnit, unitOptions, "Units defining the export resolution; if 'pixels' then the resolution is the same as a downsample value").addBooleanParameter("includeROI", "Include ROI", includeROI, "Include the primary object defining the exported region as an active ROI in ImageJ").addBooleanParameter("includeOverlay", "Include overlay", includeOverlay, "Include any objects overlapping the exported region as ROIs on an ImageJ overlay").addBooleanParameter("doTransforms", "Apply color transforms", doTransforms, "Optionally apply any color transforms when sending the pixels to ImageJ").addBooleanParameter("doZ", "All z-slices", doZ, "Optionally include all slices of a z-stack").addBooleanParameter("doT", "All timepoints", doT, "Optionally include all timepoints of a time series");
// params.setHiddenParameters(unitOptions.size() <= 1, "resolutionUnit");
params.setHiddenParameters(server.nZSlices() == 1, "doZ");
params.setHiddenParameters(server.nTimepoints() == 1, "doT");
if (!Dialogs.showParameterDialog("Send region to ImageJ", params))
return;
// Parse values
resolution = params.getDoubleParameterValue("resolution");
resolutionUnit = (String) params.getChoiceParameterValue("resolutionUnit");
includeROI = params.getBooleanParameterValue("includeROI");
includeOverlay = params.getBooleanParameterValue("includeOverlay");
doTransforms = params.getBooleanParameterValue("doTransforms");
doZ = params.getBooleanParameterValue("doZ");
doT = params.getBooleanParameterValue("doT");
// Calculate downsample
double downsample = resolution;
if (!resolutionUnit.equals(PIXELS_UNIT))
downsample = resolution / (server.getPixelCalibration().getPixelHeight().doubleValue() / 2.0 + server.getPixelCalibration().getPixelWidth().doubleValue() / 2.0);
// Color transforms are (currently) only applied for brightfield images - for fluorescence we always provide everything as unchanged as possible
List<ChannelDisplayInfo> selectedChannels = new ArrayList<>(viewer.getImageDisplay().selectedChannels());
List<ChannelDisplayInfo> channels = doTransforms && !selectedChannels.isEmpty() ? selectedChannels : null;
if (channels != null)
server = ChannelDisplayTransformServer.createColorTransformServer(server, channels);
// Loop through all selected objects
Collection<PathObject> pathObjects = viewer.getHierarchy().getSelectionModel().getSelectedObjects();
if (pathObjects.isEmpty())
pathObjects = Collections.singletonList(viewer.getHierarchy().getRootObject());
List<ImagePlus> imps = new ArrayList<>();
for (PathObject pathObject : pathObjects) {
if (Thread.currentThread().isInterrupted() || IJ.escapePressed())
return;
int width, height;
if (pathObject == null || !pathObject.hasROI()) {
width = server.getWidth();
height = server.getHeight();
} else {
Rectangle bounds = AwtTools.getBounds(pathObject.getROI());
width = bounds.width;
height = bounds.height;
}
RegionRequest region;
ROI roi = pathObject == null ? null : pathObject.getROI();
if (roi == null || PathObjectTools.hasPointROI(pathObject)) {
region = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), viewer.getZPosition(), viewer.getTPosition());
} else
region = RegionRequest.createInstance(server.getPath(), downsample, roi);
// region = RegionRequest.createInstance(server.getPath(), downsample, pathObject.getROI(), viewer.getZPosition(), viewer.getTPosition());
// Minimum size has been removed (v0.2.0-m4); returned regions should be at least 1x1 pixels
// if (region.getWidth() / downsample < 8 || region.getHeight() / downsample < 8) {
// DisplayHelpers.showErrorMessage("Send region to ImageJ", "The width & height of the extracted image must both be >= 8 pixels");
// continue;
// }
// Calculate required z-slices and time-points
int zStart = doZ ? 0 : region.getZ();
int zEnd = doZ ? server.nZSlices() : region.getZ() + 1;
int tStart = doT ? 0 : region.getT();
int tEnd = doT ? server.nTimepoints() : region.getT() + 1;
long nZ = zEnd - zStart;
long nT = tEnd - tStart;
int bytesPerPixel = server.isRGB() ? 4 : server.getPixelType().getBytesPerPixel() * server.nChannels();
double memory = ((long) width * height * nZ * nT * bytesPerPixel) / (downsample * downsample);
// TODO: Perform calculation based on actual amount of available memory
long availableMemory = GeneralTools.estimateAvailableMemory();
if (memory >= availableMemory * 0.95) {
logger.error("Cannot extract region {} - estimated size is too large (approx. {} MB)", pathObject, GeneralTools.formatNumber(memory / (1024.0 * 1024.0), 2));
Dialogs.showErrorMessage("Send region to ImageJ error", "Selected region is too large to extract - please selected a smaller region or use a higher downsample factor");
continue;
}
if (memory / 1024 / 1024 > 100) {
if (pathObjects.size() == 1 && !Dialogs.showYesNoDialog("Send region to ImageJ", String.format("Attempting to extract this region is likely to require > %.2f MB - are you sure you want to continue?", memory / 1024 / 1024)))
return;
}
// We should switch to the event dispatch thread when interacting with ImageJ
try {
ImagePlus imp;
PathObjectHierarchy hierarchy = viewer.getHierarchy();
OverlayOptions options = viewer.getOverlayOptions();
if (zEnd - zStart > 1 || tEnd - tStart > 1) {
// TODO: Handle overlays
imp = IJTools.extractHyperstack(server, region, zStart, zEnd, tStart, tEnd);
if (includeROI && roi != null) {
Roi roiIJ = IJTools.convertToIJRoi(roi, imp.getCalibration(), region.getDownsample());
imp.setRoi(roiIJ);
}
if (includeOverlay) {
Overlay overlay = new Overlay();
for (int t = tStart; t < tEnd; t++) {
for (int z = zStart; z < zEnd; z++) {
RegionRequest request2 = RegionRequest.createInstance(region.getPath(), region.getDownsample(), region.getX(), region.getY(), region.getWidth(), region.getHeight(), z, t);
var regionPredicate = PathObjectTools.createImageRegionPredicate(request2);
Overlay temp = IJExtension.extractOverlay(hierarchy, request2, options, p -> p != pathObject && regionPredicate.test(p));
if (overlay == null)
overlay = temp;
for (int i = 0; i < temp.size(); i++) {
Roi roiIJ = temp.get(i);
roiIJ.setPosition(-1, z + 1, t + 1);
overlay.add(roiIJ);
}
}
}
if (overlay != null && overlay.size() > 0)
imp.setOverlay(overlay);
}
} else if (includeOverlay)
imp = IJExtension.extractROIWithOverlay(server, pathObject, hierarchy, region, includeROI, options).getImage();
else
imp = IJExtension.extractROIWithOverlay(server, pathObject, null, region, includeROI, options).getImage();
// Set display ranges if we can
if (viewer != null && imp instanceof CompositeImage) {
var availableChannels = viewer.getImageDisplay().availableChannels().stream().filter(c -> c instanceof SingleChannelDisplayInfo).map(c -> (SingleChannelDisplayInfo) c).collect(Collectors.toList());
CompositeImage impComp = (CompositeImage) imp;
if (availableChannels.size() == imp.getNChannels()) {
for (int c = 0; c < availableChannels.size(); c++) {
var channel = availableChannels.get(c);
imp.setPosition(c + 1, 1, 1);
impComp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imp.setPosition(1);
}
} else if (selectedChannels.size() == 1 && imp.getType() != ImagePlus.COLOR_RGB) {
// Setting the display range for non-RGB images can give unexpected results (changing pixel values)
var channel = selectedChannels.get(0);
imp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imps.add(imp);
} catch (IOException e) {
Dialogs.showErrorMessage("Send region to ImageJ", e);
return;
}
}
// Show all the images we've got
if (!imps.isEmpty()) {
SwingUtilities.invokeLater(() -> {
boolean batchMode = Interpreter.batchMode;
// Try to start an ImageJ instance, and return if this fails
try {
ImageJ ij = IJExtension.getImageJInstance();
if (ij == null)
return;
ij.setVisible(true);
// Make sure we aren't in batch mode, so that image will display
Interpreter.batchMode = false;
for (ImagePlus imp : imps) {
imp.show();
}
} finally {
Interpreter.batchMode = batchMode;
}
});
}
}
use of qupath.lib.gui.viewer.QuPathViewer in project qupath by qupath.
the class WandToolCV method createShape.
@Override
protected Geometry createShape(MouseEvent e, double x, double y, boolean useTiles, Geometry addToShape) {
GeometryFactory factory = getGeometryFactory();
if (addToShape != null && pLast != null && pLast.distanceSq(x, y) < 2)
return null;
long startTime = System.currentTimeMillis();
QuPathViewer viewer = getViewer();
if (viewer == null)
return null;
double downsample = Math.max(1, Math.round(viewer.getDownsampleFactor() * 4)) / 4.0;
var regionStore = viewer.getImageRegionStore();
// Paint the image as it is currently being viewed
var type = wandType.get();
boolean doGray = type == WandType.GRAY;
BufferedImage imgTemp = doGray ? imgGray : imgBGR;
int nChannels = doGray ? 1 : 3;
Graphics2D g2d = imgTemp.createGraphics();
g2d.setColor(Color.BLACK);
g2d.setClip(0, 0, w, w);
g2d.fillRect(0, 0, w, w);
double xStart = Math.round(x - w * downsample * 0.5);
double yStart = Math.round(y - w * downsample * 0.5);
bounds.setFrame(xStart, yStart, w * downsample, w * downsample);
g2d.scale(1.0 / downsample, 1.0 / downsample);
g2d.translate(-xStart, -yStart);
regionStore.paintRegion(viewer.getServer(), g2d, bounds, viewer.getZPosition(), viewer.getTPosition(), downsample, null, null, viewer.getImageDisplay());
// regionStore.paintRegionCompletely(viewer.getServer(), g2d, bounds, viewer.getZPosition(), viewer.getTPosition(), viewer.getDownsampleFactor(), null, viewer.getImageDisplay(), 250);
// Optionally include the overlay information when using the wand
float opacity = viewer.getOverlayOptions().getOpacity();
if (opacity > 0 && getWandUseOverlays()) {
ImageRegion region = ImageRegion.createInstance((int) bounds.getX() - 1, (int) bounds.getY() - 1, (int) bounds.getWidth() + 2, (int) bounds.getHeight() + 2, viewer.getZPosition(), viewer.getTPosition());
if (opacity < 1)
g2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opacity));
for (PathOverlay overlay : viewer.getOverlayLayers().toArray(PathOverlay[]::new)) {
if (!(overlay instanceof HierarchyOverlay))
overlay.paintOverlay(g2d, region, downsample, viewer.getImageData(), true);
}
}
// Ensure we have Mats & the correct channel number
if (mat != null && (mat.channels() != nChannels || mat.depth() != opencv_core.CV_8U)) {
mat.close();
mat = null;
}
if (mat == null || mat.isNull() || mat.empty())
mat = new Mat(w, w, CV_8UC(nChannels));
// if (matMask == null)
// matMask = new Mat(w+2, w+2, CV_8U);
// if (matSelected == null)
// matSelected = new Mat(w+2, w+2, CV_8U);
// Put pixels into an OpenCV image
byte[] buffer = ((DataBufferByte) imgTemp.getRaster().getDataBuffer()).getData();
ByteBuffer matBuffer = mat.createBuffer();
matBuffer.put(buffer);
// mat.put(0, 0, buffer);
// opencv_imgproc.cvtColor(mat, mat, opencv_imgproc.COLOR_BGR2Lab);
// blurSigma = 4;
boolean doSimpleSelection = e.isShortcutDown() && !e.isShiftDown();
if (doSimpleSelection) {
matMask.put(Scalar.ZERO);
// opencv_imgproc.circle(matMask, seed, radius, Scalar.ONE);
opencv_imgproc.floodFill(mat, matMask, seed, Scalar.ONE, null, Scalar.ZERO, Scalar.ZERO, 4 | (2 << 8) | opencv_imgproc.FLOODFILL_MASK_ONLY | opencv_imgproc.FLOODFILL_FIXED_RANGE);
subtractPut(matMask, Scalar.ONE);
} else {
double blurSigma = Math.max(0.5, getWandSigmaPixels());
int size = (int) Math.ceil(blurSigma * 2) * 2 + 1;
blurSize.width(size);
blurSize.height(size);
// Smooth a little
opencv_imgproc.GaussianBlur(mat, mat, blurSize, blurSigma);
// Choose mat to threshold (may be adjusted)
Mat matThreshold = mat;
// Apply color transform if required
if (type == WandType.LAB_DISTANCE) {
mat.convertTo(matFloat, opencv_core.CV_32F, 1.0 / 255.0, 0.0);
opencv_imgproc.cvtColor(matFloat, matFloat, opencv_imgproc.COLOR_BGR2Lab);
double max = 0;
double mean = 0;
try (FloatIndexer idx = matFloat.createIndexer()) {
int k = w / 2;
double v1 = idx.get(k, k, 0);
double v2 = idx.get(k, k, 1);
double v3 = idx.get(k, k, 2);
double meanScale = 1.0 / (w * w);
for (int row = 0; row < w; row++) {
for (int col = 0; col < w; col++) {
double L = idx.get(row, col, 0) - v1;
double A = idx.get(row, col, 1) - v2;
double B = idx.get(row, col, 2) - v3;
double dist = Math.sqrt(L * L + A * A + B * B);
if (dist > max)
max = dist;
mean += dist * meanScale;
idx.put(row, col, 0, (float) dist);
}
}
}
if (matThreshold == null)
matThreshold = new Mat();
opencv_core.extractChannel(matFloat, matThreshold, 0);
// There are various ways we might choose a threshold now...
// Here, we use a multiple of the mean. Since values are 'distances'
// they are all >= 0
matThreshold.convertTo(matThreshold, opencv_core.CV_8U, 255.0 / max, 0);
threshold.put(mean * getWandSensitivity());
// // OpenCVTools.matToImagePlus(matThreshold, "Before").show();
// // Apply local Otsu threshold
// opencv_imgproc.threshold(matThreshold, matThreshold,
// 0,
// 255, opencv_imgproc.THRESH_BINARY + opencv_imgproc.THRESH_OTSU);
// threshold.put(Scalar.ZERO);
nChannels = 1;
} else {
// Base threshold on local standard deviation
meanStdDev(matThreshold, mean, stddev);
DoubleBuffer stddevBuffer = stddev.createBuffer();
double[] stddev2 = new double[nChannels];
stddevBuffer.get(stddev2);
double scale = 1.0 / getWandSensitivity();
if (scale < 0)
scale = 0.01;
for (int i = 0; i < stddev2.length; i++) stddev2[i] = stddev2[i] * scale;
threshold.put(stddev2);
}
// Limit maximum radius by pen
int radius = (int) Math.round(w / 2 * QuPathPenManager.getPenManager().getPressure());
if (radius == 0)
return null;
matMask.put(Scalar.ZERO);
opencv_imgproc.circle(matMask, seed, radius, Scalar.ONE);
opencv_imgproc.floodFill(matThreshold, matMask, seed, Scalar.ONE, null, threshold, threshold, 4 | (2 << 8) | opencv_imgproc.FLOODFILL_MASK_ONLY | opencv_imgproc.FLOODFILL_FIXED_RANGE);
subtractPut(matMask, Scalar.ONE);
if (strel == null)
strel = opencv_imgproc.getStructuringElement(opencv_imgproc.MORPH_ELLIPSE, new Size(5, 5));
opencv_imgproc.morphologyEx(matMask, matMask, opencv_imgproc.MORPH_CLOSE, strel);
}
MatVector contours = new MatVector();
if (contourHierarchy == null)
contourHierarchy = new Mat();
opencv_imgproc.findContours(matMask, contours, contourHierarchy, opencv_imgproc.RETR_EXTERNAL, opencv_imgproc.CHAIN_APPROX_SIMPLE);
// logger.trace("Contours: " + contours.size());
List<Coordinate> coords = new ArrayList<>();
List<Geometry> geometries = new ArrayList<>();
for (Mat contour : contours.get()) {
// Discard single pixels / lines
if (contour.size().height() <= 2)
continue;
// Create a polygon geometry
try (IntIndexer idxrContours = contour.createIndexer()) {
for (long r = 0; r < idxrContours.size(0); r++) {
int px = idxrContours.get(r, 0L, 0L);
int py = idxrContours.get(r, 0L, 1L);
// * downsample + x;
double xx = (px - w / 2 - 1);
// * downsample + y;
double yy = (py - w / 2 - 1);
coords.add(new Coordinate(xx, yy));
}
}
if (coords.size() > 1) {
// Ensure closed
if (!coords.get(coords.size() - 1).equals(coords.get(0)))
coords.add(coords.get(0));
// Exclude single pixels
var polygon = factory.createPolygon(coords.toArray(Coordinate[]::new));
if (coords.size() > 5 || polygon.getArea() > 1)
geometries.add(polygon);
}
}
contours.close();
if (geometries.isEmpty())
return null;
// Handle the fact that OpenCV contours are defined using the 'pixel center' by dilating the boundary
var geometry = geometries.size() == 1 ? geometries.get(0) : GeometryCombiner.combine(geometries);
geometry = geometry.buffer(0.5);
// Transform to map to integer pixel locations in the full-resolution image
var transform = new AffineTransformation().scale(downsample, downsample).translate(x, y);
geometry = transform.transform(geometry);
geometry = GeometryTools.roundCoordinates(geometry);
geometry = GeometryTools.constrainToBounds(geometry, 0, 0, viewer.getServerWidth(), viewer.getServerHeight());
if (geometry.getArea() <= 1)
return null;
long endTime = System.currentTimeMillis();
logger.trace(getClass().getSimpleName() + " time: " + (endTime - startTime));
if (pLast == null)
pLast = new Point2D.Double(x, y);
else
pLast.setLocation(x, y);
return geometry;
}
use of qupath.lib.gui.viewer.QuPathViewer in project qupath by qupath.
the class MeasurementMapPane method updateMeasurements.
/**
* Update the measurements according to the current image
*/
public void updateMeasurements() {
QuPathViewer viewer = qupath.getViewer();
PathObjectHierarchy hierarchy = viewer.getHierarchy();
if (hierarchy == null) {
baseList.clear();
return;
}
Collection<PathObject> pathObjects = hierarchy.getDetectionObjects();
Set<String> measurements = PathClassifierTools.getAvailableFeatures(pathObjects);
for (PathObject pathObject : pathObjects) {
if (!Double.isNaN(pathObject.getClassProbability())) {
measurements.add("Class probability");
break;
}
}
// Apply any changes
baseList.setAll(measurements);
}
Aggregations