use of qupath.lib.gui.viewer.overlays.HierarchyOverlay in project qupath by qupath.
the class TMADataIO method writeTMAData.
/**
* Write TMA data in a human-readable (and viewable) way, with JPEGs and TXT/CSV files.
*
* @param file
* @param imageData
* @param overlayOptions
* @param downsampleFactor The downsample factor used for the TMA cores. If NaN, an automatic downsample value will be selected (>= 1). If <= 0, no cores are exported.
*/
public static void writeTMAData(File file, final ImageData<BufferedImage> imageData, OverlayOptions overlayOptions, final double downsampleFactor) {
if (imageData == null || imageData.getHierarchy() == null || imageData.getHierarchy().getTMAGrid() == null) {
logger.error("No TMA data available to save!");
return;
}
final ImageServer<BufferedImage> server = imageData.getServer();
String coreExt = imageData.getServer().isRGB() ? ".jpg" : ".tif";
if (file == null) {
file = Dialogs.promptToSaveFile("Save TMA data", null, ServerTools.getDisplayableImageName(server), "TMA data", "qptma");
if (file == null)
return;
} else if (file.isDirectory() || (!file.exists() && file.getAbsolutePath().endsWith(File.pathSeparator))) {
// Put inside the specified directory
file = new File(file, ServerTools.getDisplayableImageName(server) + TMA_DEARRAYING_DATA_EXTENSION);
if (!file.getParentFile().exists())
file.getParentFile().mkdirs();
}
final File dirData = new File(file + ".data");
if (!dirData.exists())
dirData.mkdir();
// Write basic file info
String delimiter = "\t";
TMAGrid tmaGrid = imageData.getHierarchy().getTMAGrid();
try {
PrintWriter writer = new PrintWriter(file);
writer.println(server.getPath());
writer.println(ServerTools.getDisplayableImageName(server));
writer.println();
writer.println("TMA grid width: " + tmaGrid.getGridWidth());
writer.println("TMA grid height: " + tmaGrid.getGridHeight());
writer.println("Core name" + delimiter + "X" + delimiter + "Y" + delimiter + "Width" + delimiter + "Height" + delimiter + "Present" + delimiter + TMACoreObject.KEY_UNIQUE_ID);
for (int row = 0; row < tmaGrid.getGridHeight(); row++) {
for (int col = 0; col < tmaGrid.getGridWidth(); col++) {
TMACoreObject core = tmaGrid.getTMACore(row, col);
if (!core.hasROI()) {
writer.println(core.getName() + delimiter + delimiter + delimiter + delimiter);
continue;
}
ROI pathROI = core.getROI();
int x = (int) pathROI.getBoundsX();
int y = (int) pathROI.getBoundsY();
int w = (int) Math.ceil(pathROI.getBoundsWidth());
int h = (int) Math.ceil(pathROI.getBoundsHeight());
String id = core.getUniqueID() == null ? "" : core.getUniqueID();
writer.println(core.getName() + delimiter + x + delimiter + y + delimiter + w + delimiter + h + delimiter + !core.isMissing() + delimiter + id);
}
}
writer.close();
} catch (Exception e) {
logger.error("Error writing TMA data: " + e.getLocalizedMessage(), e);
return;
}
// Save the summary results
ObservableMeasurementTableData tableData = new ObservableMeasurementTableData();
tableData.setImageData(imageData, tmaGrid.getTMACoreList());
SummaryMeasurementTableCommand.saveTableModel(tableData, new File(dirData, "TMA results - " + ServerTools.getDisplayableImageName(server) + ".txt"), Collections.emptyList());
boolean outputCoreImages = Double.isNaN(downsampleFactor) || downsampleFactor > 0;
if (outputCoreImages) {
// Create new overlay options, if we don't have some already
if (overlayOptions == null) {
overlayOptions = new OverlayOptions();
overlayOptions.setFillDetections(true);
}
final OverlayOptions options = overlayOptions;
// Write an overall TMA map (for quickly checking if the dearraying is ok)
File fileTMAMap = new File(dirData, "TMA map - " + ServerTools.getDisplayableImageName(server) + ".jpg");
double downsampleThumbnail = Math.max(1, (double) Math.max(server.getWidth(), server.getHeight()) / 1024);
RegionRequest request = RegionRequest.createInstance(server.getPath(), downsampleThumbnail, 0, 0, server.getWidth(), server.getHeight());
OverlayOptions optionsThumbnail = new OverlayOptions();
optionsThumbnail.setShowTMAGrid(true);
optionsThumbnail.setShowGrid(false);
optionsThumbnail.setShowAnnotations(false);
optionsThumbnail.setShowDetections(false);
try {
var renderedServer = new RenderedImageServer.Builder(imageData).layers(new TMAGridOverlay(overlayOptions)).downsamples(downsampleThumbnail).build();
ImageWriterTools.writeImageRegion(renderedServer, request, fileTMAMap.getAbsolutePath());
// ImageWriters.writeImageRegionWithOverlay(imageData.getServer(), Collections.singletonList(new TMAGridOverlay(overlayOptions, imageData)), request, fileTMAMap.getAbsolutePath());
} catch (IOException e) {
logger.warn("Unable to write image overview: " + e.getLocalizedMessage(), e);
}
final double downsample = Double.isNaN(downsampleFactor) ? (server.getPixelCalibration().hasPixelSizeMicrons() ? ServerTools.getDownsampleFactor(server, preferredExportPixelSizeMicrons) : 1) : downsampleFactor;
// Creating a plugin makes it possible to parallelize & show progress easily
var renderedImageServer = new RenderedImageServer.Builder(imageData).layers(new HierarchyOverlay(null, options, imageData)).downsamples(downsample).build();
ExportCoresPlugin plugin = new ExportCoresPlugin(dirData, renderedImageServer, downsample, coreExt);
PluginRunner<BufferedImage> runner;
var qupath = QuPathGUI.getInstance();
if (qupath == null || qupath.getImageData() != imageData) {
runner = new CommandLinePluginRunner<>(imageData);
plugin.runPlugin(runner, null);
} else {
try {
qupath.runPlugin(plugin, null, false);
} catch (Exception e) {
logger.error("Error writing TMA data: " + e.getLocalizedMessage(), e);
}
// new Thread(() -> qupath.runPlugin(plugin, null, false)).start();
// runner = new PluginRunnerFX(QuPathGUI.getInstance());
// new Thread(() -> plugin.runPlugin(runner, null)).start();
}
}
}
use of qupath.lib.gui.viewer.overlays.HierarchyOverlay in project qupath by qupath.
the class WandToolCV method createShape.
@Override
protected Geometry createShape(MouseEvent e, double x, double y, boolean useTiles, Geometry addToShape) {
GeometryFactory factory = getGeometryFactory();
if (addToShape != null && pLast != null && pLast.distanceSq(x, y) < 2)
return null;
long startTime = System.currentTimeMillis();
QuPathViewer viewer = getViewer();
if (viewer == null)
return null;
double downsample = Math.max(1, Math.round(viewer.getDownsampleFactor() * 4)) / 4.0;
var regionStore = viewer.getImageRegionStore();
// Paint the image as it is currently being viewed
var type = wandType.get();
boolean doGray = type == WandType.GRAY;
BufferedImage imgTemp = doGray ? imgGray : imgBGR;
int nChannels = doGray ? 1 : 3;
Graphics2D g2d = imgTemp.createGraphics();
g2d.setColor(Color.BLACK);
g2d.setClip(0, 0, w, w);
g2d.fillRect(0, 0, w, w);
double xStart = Math.round(x - w * downsample * 0.5);
double yStart = Math.round(y - w * downsample * 0.5);
bounds.setFrame(xStart, yStart, w * downsample, w * downsample);
g2d.scale(1.0 / downsample, 1.0 / downsample);
g2d.translate(-xStart, -yStart);
regionStore.paintRegion(viewer.getServer(), g2d, bounds, viewer.getZPosition(), viewer.getTPosition(), downsample, null, null, viewer.getImageDisplay());
// regionStore.paintRegionCompletely(viewer.getServer(), g2d, bounds, viewer.getZPosition(), viewer.getTPosition(), viewer.getDownsampleFactor(), null, viewer.getImageDisplay(), 250);
// Optionally include the overlay information when using the wand
float opacity = viewer.getOverlayOptions().getOpacity();
if (opacity > 0 && getWandUseOverlays()) {
ImageRegion region = ImageRegion.createInstance((int) bounds.getX() - 1, (int) bounds.getY() - 1, (int) bounds.getWidth() + 2, (int) bounds.getHeight() + 2, viewer.getZPosition(), viewer.getTPosition());
if (opacity < 1)
g2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opacity));
for (PathOverlay overlay : viewer.getOverlayLayers().toArray(PathOverlay[]::new)) {
if (!(overlay instanceof HierarchyOverlay))
overlay.paintOverlay(g2d, region, downsample, viewer.getImageData(), true);
}
}
// Ensure we have Mats & the correct channel number
if (mat != null && (mat.channels() != nChannels || mat.depth() != opencv_core.CV_8U)) {
mat.close();
mat = null;
}
if (mat == null || mat.isNull() || mat.empty())
mat = new Mat(w, w, CV_8UC(nChannels));
// if (matMask == null)
// matMask = new Mat(w+2, w+2, CV_8U);
// if (matSelected == null)
// matSelected = new Mat(w+2, w+2, CV_8U);
// Put pixels into an OpenCV image
byte[] buffer = ((DataBufferByte) imgTemp.getRaster().getDataBuffer()).getData();
ByteBuffer matBuffer = mat.createBuffer();
matBuffer.put(buffer);
// mat.put(0, 0, buffer);
// opencv_imgproc.cvtColor(mat, mat, opencv_imgproc.COLOR_BGR2Lab);
// blurSigma = 4;
boolean doSimpleSelection = e.isShortcutDown() && !e.isShiftDown();
if (doSimpleSelection) {
matMask.put(Scalar.ZERO);
// opencv_imgproc.circle(matMask, seed, radius, Scalar.ONE);
opencv_imgproc.floodFill(mat, matMask, seed, Scalar.ONE, null, Scalar.ZERO, Scalar.ZERO, 4 | (2 << 8) | opencv_imgproc.FLOODFILL_MASK_ONLY | opencv_imgproc.FLOODFILL_FIXED_RANGE);
subtractPut(matMask, Scalar.ONE);
} else {
double blurSigma = Math.max(0.5, getWandSigmaPixels());
int size = (int) Math.ceil(blurSigma * 2) * 2 + 1;
blurSize.width(size);
blurSize.height(size);
// Smooth a little
opencv_imgproc.GaussianBlur(mat, mat, blurSize, blurSigma);
// Choose mat to threshold (may be adjusted)
Mat matThreshold = mat;
// Apply color transform if required
if (type == WandType.LAB_DISTANCE) {
mat.convertTo(matFloat, opencv_core.CV_32F, 1.0 / 255.0, 0.0);
opencv_imgproc.cvtColor(matFloat, matFloat, opencv_imgproc.COLOR_BGR2Lab);
double max = 0;
double mean = 0;
try (FloatIndexer idx = matFloat.createIndexer()) {
int k = w / 2;
double v1 = idx.get(k, k, 0);
double v2 = idx.get(k, k, 1);
double v3 = idx.get(k, k, 2);
double meanScale = 1.0 / (w * w);
for (int row = 0; row < w; row++) {
for (int col = 0; col < w; col++) {
double L = idx.get(row, col, 0) - v1;
double A = idx.get(row, col, 1) - v2;
double B = idx.get(row, col, 2) - v3;
double dist = Math.sqrt(L * L + A * A + B * B);
if (dist > max)
max = dist;
mean += dist * meanScale;
idx.put(row, col, 0, (float) dist);
}
}
}
if (matThreshold == null)
matThreshold = new Mat();
opencv_core.extractChannel(matFloat, matThreshold, 0);
// There are various ways we might choose a threshold now...
// Here, we use a multiple of the mean. Since values are 'distances'
// they are all >= 0
matThreshold.convertTo(matThreshold, opencv_core.CV_8U, 255.0 / max, 0);
threshold.put(mean * getWandSensitivity());
// // OpenCVTools.matToImagePlus(matThreshold, "Before").show();
// // Apply local Otsu threshold
// opencv_imgproc.threshold(matThreshold, matThreshold,
// 0,
// 255, opencv_imgproc.THRESH_BINARY + opencv_imgproc.THRESH_OTSU);
// threshold.put(Scalar.ZERO);
nChannels = 1;
} else {
// Base threshold on local standard deviation
meanStdDev(matThreshold, mean, stddev);
DoubleBuffer stddevBuffer = stddev.createBuffer();
double[] stddev2 = new double[nChannels];
stddevBuffer.get(stddev2);
double scale = 1.0 / getWandSensitivity();
if (scale < 0)
scale = 0.01;
for (int i = 0; i < stddev2.length; i++) stddev2[i] = stddev2[i] * scale;
threshold.put(stddev2);
}
// Limit maximum radius by pen
int radius = (int) Math.round(w / 2 * QuPathPenManager.getPenManager().getPressure());
if (radius == 0)
return null;
matMask.put(Scalar.ZERO);
opencv_imgproc.circle(matMask, seed, radius, Scalar.ONE);
opencv_imgproc.floodFill(matThreshold, matMask, seed, Scalar.ONE, null, threshold, threshold, 4 | (2 << 8) | opencv_imgproc.FLOODFILL_MASK_ONLY | opencv_imgproc.FLOODFILL_FIXED_RANGE);
subtractPut(matMask, Scalar.ONE);
if (strel == null)
strel = opencv_imgproc.getStructuringElement(opencv_imgproc.MORPH_ELLIPSE, new Size(5, 5));
opencv_imgproc.morphologyEx(matMask, matMask, opencv_imgproc.MORPH_CLOSE, strel);
}
MatVector contours = new MatVector();
if (contourHierarchy == null)
contourHierarchy = new Mat();
opencv_imgproc.findContours(matMask, contours, contourHierarchy, opencv_imgproc.RETR_EXTERNAL, opencv_imgproc.CHAIN_APPROX_SIMPLE);
// logger.trace("Contours: " + contours.size());
List<Coordinate> coords = new ArrayList<>();
List<Geometry> geometries = new ArrayList<>();
for (Mat contour : contours.get()) {
// Discard single pixels / lines
if (contour.size().height() <= 2)
continue;
// Create a polygon geometry
try (IntIndexer idxrContours = contour.createIndexer()) {
for (long r = 0; r < idxrContours.size(0); r++) {
int px = idxrContours.get(r, 0L, 0L);
int py = idxrContours.get(r, 0L, 1L);
// * downsample + x;
double xx = (px - w / 2 - 1);
// * downsample + y;
double yy = (py - w / 2 - 1);
coords.add(new Coordinate(xx, yy));
}
}
if (coords.size() > 1) {
// Ensure closed
if (!coords.get(coords.size() - 1).equals(coords.get(0)))
coords.add(coords.get(0));
// Exclude single pixels
var polygon = factory.createPolygon(coords.toArray(Coordinate[]::new));
if (coords.size() > 5 || polygon.getArea() > 1)
geometries.add(polygon);
}
}
contours.close();
if (geometries.isEmpty())
return null;
// Handle the fact that OpenCV contours are defined using the 'pixel center' by dilating the boundary
var geometry = geometries.size() == 1 ? geometries.get(0) : GeometryCombiner.combine(geometries);
geometry = geometry.buffer(0.5);
// Transform to map to integer pixel locations in the full-resolution image
var transform = new AffineTransformation().scale(downsample, downsample).translate(x, y);
geometry = transform.transform(geometry);
geometry = GeometryTools.roundCoordinates(geometry);
geometry = GeometryTools.constrainToBounds(geometry, 0, 0, viewer.getServerWidth(), viewer.getServerHeight());
if (geometry.getArea() <= 1)
return null;
long endTime = System.currentTimeMillis();
logger.trace(getClass().getSimpleName() + " time: " + (endTime - startTime));
if (pLast == null)
pLast = new Point2D.Double(x, y);
else
pLast.setLocation(x, y);
return geometry;
}
Aggregations