use of qupath.lib.regions.ImagePlane in project qupath by qupath.
the class DelaunayTools method createFromGeometryCoordinates.
/**
* Create a {@link Subdivision} using the boundary coordinates of ROIs.
* This is primarily useful for computing Voronoi faces centered on ROIs rather than single points,
* i.e. to identify pixels closest to specific objects.
* <p>
* Notes:
* <ul>
* <li>This is typically <i>much</i> slower than {@link #createFromCentroids(Collection, boolean)}</li>
* <li>For interpretable results, ROIs should be non-overlapping.</li>
* </ul>
*
* @param pathObjects collection of objects from which to construct the {@link Subdivision}
* @param preferNucleusROI if true, prefer the nucleus ROI when extracting geometries from a cell
* @param densifyFactor amount to 'density' each ROI; this is needed to interpolate coordinates (suggested value = 4.0)
* @return a new {@link Subdivision} computed from the provided objects
*
* @see #createFromCentroids(Collection, boolean)
*/
public static Subdivision createFromGeometryCoordinates(Collection<PathObject> pathObjects, boolean preferNucleusROI, double densifyFactor) {
logger.debug("Creating subdivision from geometry coordinates for {} objects", pathObjects.size());
var coords = new HashMap<Coordinate, PathObject>();
ImagePlane plane = null;
for (var pathObject : pathObjects) {
var roi = PathObjectTools.getROI(pathObject, preferNucleusROI);
if (plane == null)
plane = roi.getImagePlane();
else if (!plane.equals(roi.getImagePlane())) {
logger.warn("Non-matching image planes: {} and {}! Object will be skipped...", plane, roi.getImagePlane());
continue;
}
var geom = roi.getGeometry();
if (densifyFactor > 0)
geom = Densifier.densify(geom, densifyFactor);
var coordsTemp = geom.getCoordinates();
for (var c : coordsTemp) {
var previous = coords.put(c, pathObject);
if (previous != null)
logger.warn("Previous coordinate: " + previous);
}
}
// Because we do our own clipping anyway, we skip that step by requesting the diagram via the subdivision instead
return new Subdivision(createSubdivision(coords.keySet(), 0.001), pathObjects, coords, plane);
}
use of qupath.lib.regions.ImagePlane in project qupath by qupath.
the class SubcellularDetection method processObject.
/**
* Initial version of subcellular detection processing.
*
* @param pathObject
* @param params
* @param imageWrapper
* @return
* @throws InterruptedException
* @throws IOException
*/
static boolean processObject(final PathObject pathObject, final ParameterList params, final ImageWrapper imageWrapper) throws InterruptedException, IOException {
// Get the base classification for the object as it currently stands
PathClass baseClass = PathClassTools.getNonIntensityAncestorClass(pathObject.getPathClass());
// Variable to hold estimated spot count
double estimatedSpots;
// We assume that after this processing, any previous sub-cellular objects should be removed
pathObject.clearPathObjects();
// Ensure we have no existing subcellular detection measurements - if we do, remove them
String[] existingMeasurements = pathObject.getMeasurementList().getMeasurementNames().stream().filter(n -> n.startsWith("Subcellular:")).toArray(n -> new String[n]);
if (existingMeasurements.length > 0) {
pathObject.getMeasurementList().removeMeasurements(existingMeasurements);
pathObject.getMeasurementList().close();
}
// // If we're part of a TMA core, request the whole core...
// if (pathObject.getParent() instanceof TMACoreObject && pathObject.getParent().hasROI()) {
// regionStore.getImage(server, RegionRequest.createInstance(server.getPath(), 1, pathObject.getParent().getROI()), 25, true);
// }
ROI pathROI = pathObject.getROI();
if (pathROI == null || pathROI.isEmpty())
return false;
// double downsample = 0.5;
double downsample = 1;
// Determine spot size
ImageServer<BufferedImage> server = imageWrapper.getServer();
PixelCalibration cal = server.getPixelCalibration();
double minSpotArea, maxSpotArea, singleSpotArea;
double pixelWidth, pixelHeight;
if (cal.hasPixelSizeMicrons()) {
double spotSizeMicrons = params.getDoubleParameterValue("spotSizeMicrons");
double minSpotSizeMicrons = params.getDoubleParameterValue("minSpotSizeMicrons");
double maxSpotSizeMicrons = params.getDoubleParameterValue("maxSpotSizeMicrons");
pixelWidth = cal.getPixelWidthMicrons() * downsample;
pixelHeight = cal.getPixelHeightMicrons() * downsample;
singleSpotArea = spotSizeMicrons / (pixelWidth * pixelHeight);
minSpotArea = minSpotSizeMicrons / (pixelWidth * pixelHeight);
maxSpotArea = maxSpotSizeMicrons / (pixelWidth * pixelHeight);
} else {
singleSpotArea = params.getDoubleParameterValue("spotSizePixels");
minSpotArea = params.getDoubleParameterValue("minSpotSizePixels");
maxSpotArea = params.getDoubleParameterValue("maxSpotSizePixels");
pixelWidth = downsample;
pixelHeight = downsample;
}
boolean includeClusters = Boolean.TRUE.equals(params.getBooleanParameterValue("includeClusters"));
boolean doSmoothing = Boolean.TRUE.equals(params.getBooleanParameterValue("doSmoothing"));
boolean splitByIntensity = Boolean.TRUE.equals(params.getBooleanParameterValue("splitByIntensity"));
boolean splitByShape = Boolean.TRUE.equals(params.getBooleanParameterValue("splitByShape"));
// Get region to request - give a pixel as border
int xStart = (int) Math.max(0, pathROI.getBoundsX() - 1);
int yStart = (int) Math.max(0, pathROI.getBoundsY() - 1);
int width = (int) Math.min(server.getWidth() - 1, pathROI.getBoundsX() + pathROI.getBoundsWidth() + 1.5) - xStart;
int height = (int) Math.min(server.getHeight() - 1, pathROI.getBoundsY() + pathROI.getBoundsHeight() + 1.5) - yStart;
if (width <= 0 || height <= 0) {
logger.error("Negative ROI size for {}", pathROI);
pathObject.setPathClass(baseClass);
return false;
}
int z = pathROI.getZ();
int t = pathROI.getT();
// Don't associate with channel
int c = -1;
RegionRequest region = RegionRequest.createInstance(server.getPath(), 1.0, xStart, yStart, width, height, z, t);
// Mask to indicate pixels within the cell
byte[] cellMask = null;
for (String channelName : imageWrapper.getChannelNames(true, true)) {
double detectionThreshold = params.getDoubleParameterValue("detection[" + channelName + "]");
if (Double.isNaN(detectionThreshold) || detectionThreshold < 0)
continue;
// // TODO: Consider whether to use channel numbers for non-brightfield images
// if (!imageWrapper.imageData.isBrightfield())
// c++;
SimpleImage img = imageWrapper.getRegion(region, channelName);
// Get an ImageJ-friendly calibration for ROI conversion
Calibration calIJ = new Calibration();
calIJ.xOrigin = -xStart / downsample;
calIJ.yOrigin = -yStart / downsample;
// Create a cell mask
if (cellMask == null) {
BufferedImage imgMask = new BufferedImage(img.getWidth(), img.getHeight(), BufferedImage.TYPE_BYTE_GRAY);
Graphics2D g2d = imgMask.createGraphics();
if (downsample != 1)
g2d.scale(1.0 / downsample, 1.0 / downsample);
g2d.translate(-xStart, -yStart);
Shape shape = RoiTools.getShape(pathROI);
g2d.setColor(Color.WHITE);
g2d.fill(shape);
g2d.dispose();
cellMask = (byte[]) ((DataBufferByte) imgMask.getRaster().getDataBuffer()).getData(0);
}
// Get a buffer containing the image pixels
int w = img.getWidth();
int h = img.getHeight();
// Identify (& try to separate) spots
// Mask out non-cell areas as we go
FloatProcessor fpDetection = new FloatProcessor(w, h);
if (doSmoothing) {
for (int i = 0; i < w * h; i++) fpDetection.setf(i, img.getValue(i % w, i / w));
fpDetection.smooth();
for (int i = 0; i < w * h; i++) {
if (cellMask[i] == (byte) 0)
fpDetection.setf(i, 0f);
}
} else {
for (int i = 0; i < w * h; i++) {
if (cellMask[i] == (byte) 0)
fpDetection.setf(i, 0f);
else
fpDetection.setf(i, img.getValue(i % w, i / w));
}
}
ByteProcessor bpSpots;
if (splitByIntensity)
bpSpots = new MaximumFinder().findMaxima(fpDetection, detectionThreshold / 10.0, detectionThreshold, MaximumFinder.SEGMENTED, false, false);
else
bpSpots = SimpleThresholding.thresholdAboveEquals(fpDetection, (float) detectionThreshold);
if (splitByShape) {
new EDM().toWatershed(bpSpots);
}
// Loop through spot ROIs & make a decision
bpSpots.setThreshold(1, ImageProcessor.NO_THRESHOLD, ImageProcessor.NO_LUT_UPDATE);
List<PolygonRoi> possibleSpotRois = RoiLabeling.getFilledPolygonROIs(bpSpots, Wand.FOUR_CONNECTED);
List<PathObject> spotObjects = new ArrayList<>();
List<PathObject> clusterObjects = new ArrayList<>();
estimatedSpots = 0;
for (PolygonRoi spotRoi : possibleSpotRois) {
fpDetection.setRoi(spotRoi);
ImageStatistics stats = fpDetection.getStatistics();
// In v0.2
// ImagePlane plane = ImagePlane.getPlaneWithChannel(spotRoi.getCPosition(), spotRoi.getZPosition(), spotRoi.getTPosition());
// In v0.3
ImagePlane plane = ImagePlane.getPlaneWithChannel(c, z, t);
PathObject spotOrCluster = null;
if (stats.pixelCount >= minSpotArea && stats.pixelCount <= maxSpotArea) {
ROI roi = IJTools.convertToROI(spotRoi, calIJ, downsample, plane);
// cluster = new SubcellularObject(roi, 1);
spotOrCluster = createSubcellularObject(roi, 1);
estimatedSpots += 1;
} else if (includeClusters && stats.pixelCount >= minSpotArea) {
// Add a cluster
ROI roi = IJTools.convertToROI(spotRoi, calIJ, downsample, plane);
double nSpots = stats.pixelCount / singleSpotArea;
estimatedSpots += nSpots;
// cluster = new SubcellularObject(roi, nSpots);
spotOrCluster = createSubcellularObject(roi, nSpots);
}
if (spotOrCluster != null) {
boolean isCluster = spotOrCluster.getMeasurementList().getMeasurementValue("Num spots") > 1;
int rgb = imageWrapper.getChannelColor(channelName);
rgb = isCluster ? ColorTools.makeScaledRGB(rgb, 0.5) : ColorTools.makeScaledRGB(rgb, 1.5);
PathClass pathClass = PathClassFactory.getDerivedPathClass(spotOrCluster.getPathClass(), channelName + " object", rgb);
spotOrCluster.setPathClass(pathClass);
spotOrCluster.getMeasurementList().putMeasurement("Subcellular cluster: " + channelName + ": Area", stats.pixelCount * pixelWidth * pixelHeight);
spotOrCluster.getMeasurementList().putMeasurement("Subcellular cluster: " + channelName + ": Mean channel intensity", stats.mean);
// cluster.getMeasurementList().putMeasurement("Subcellular cluster: " + channelName + ": Max channel intensity", stats.max);
spotOrCluster.getMeasurementList().close();
if (isCluster)
clusterObjects.add(spotOrCluster);
else
spotObjects.add(spotOrCluster);
}
}
// Add measurements
MeasurementList measurementList = pathObject.getMeasurementList();
measurementList.putMeasurement("Subcellular: " + channelName + ": Num spots estimated", estimatedSpots);
measurementList.putMeasurement("Subcellular: " + channelName + ": Num single spots", spotObjects.size());
measurementList.putMeasurement("Subcellular: " + channelName + ": Num clusters", clusterObjects.size());
// Add spots
pathObject.addPathObjects(spotObjects);
pathObject.addPathObjects(clusterObjects);
}
return true;
}
use of qupath.lib.regions.ImagePlane in project qupath by qupath.
the class PointIO method readPoints.
/**
* Read a list of point annotations from a stream.
* @param stream
* @return list of PathObjects
* @throws IOException
*/
public static List<PathObject> readPoints(InputStream stream) throws IOException {
List<PathObject> pathObjects = new ArrayList<>();
Map<String[], List<Point2>> pointsMap = new HashMap<>();
Scanner scanner = null;
String[] cols = null;
try {
scanner = new Scanner(stream);
// Header
cols = scanner.nextLine().split("\t");
while (scanner.hasNextLine()) {
putPointObjectFromString(scanner.nextLine(), cols, pointsMap);
}
} finally {
if (scanner != null)
scanner.close();
}
ImagePlane defaultPlane = ImagePlane.getDefaultPlane();
for (var entry : pointsMap.entrySet()) {
var temp = Arrays.asList(cols);
String pathClass = temp.indexOf("class") > -1 ? entry.getKey()[temp.indexOf("class") - 2] : "";
String name = temp.indexOf("name") > -1 ? entry.getKey()[temp.indexOf("name") - 2] : "";
Integer color = null;
if (temp.indexOf("color") > -1) {
var colorTemp = entry.getKey()[temp.indexOf("color") - 2];
if (colorTemp != null && !colorTemp.isEmpty())
color = Integer.parseInt(colorTemp);
}
int c = temp.indexOf("c") > defaultPlane.getC() ? Integer.parseInt(entry.getKey()[temp.indexOf("c") - 2]) : defaultPlane.getC();
int z = temp.indexOf("z") > defaultPlane.getZ() ? Integer.parseInt(entry.getKey()[temp.indexOf("z") - 2]) : defaultPlane.getZ();
int t = temp.indexOf("t") > defaultPlane.getT() ? Integer.parseInt(entry.getKey()[temp.indexOf("t") - 2]) : defaultPlane.getT();
ROI points = ROIs.createPointsROI(entry.getValue(), ImagePlane.getPlaneWithChannel(c, z, t));
PathObject pathObject = PathObjects.createAnnotationObject(points);
if (name != null && name.length() > 0 && !"null".equals(name))
pathObject.setName(name);
if (pathClass != null && pathClass.length() > 0 && !"null".equals(pathClass))
pathObject.setPathClass(PathClassFactory.getPathClass(pathClass, color));
pathObject.setColorRGB(color);
if (pathObject != null)
pathObjects.add(pathObject);
}
return pathObjects;
}
use of qupath.lib.regions.ImagePlane in project qupath by qupath.
the class QuPath_Send_Overlay_to_QuPath method promptToImportRois.
private void promptToImportRois(ImageData<?> imageData, ImagePlus imp, Collection<? extends Roi> rois, ImagePlane currentPlane) {
GenericDialog gd = new GenericDialog("Send overlay to QuPath");
gd.addChoice("Choose_object_type", new String[] { "Annotation", "Detection" }, typeChoice);
if (imp != null)
gd.addCheckbox("Include_measurements", false);
gd.addCheckbox("Select_objects", selectObjects);
gd.showDialog();
if (gd.wasCanceled())
return;
typeChoice = gd.getNextChoice();
boolean asDetection = "Detection".equals(typeChoice);
includeMeasurements = imp == null ? false : gd.getNextBoolean();
selectObjects = gd.getNextBoolean();
var server = imageData.getServer();
double downsample = imp == null ? 1.0 : IJTools.estimateDownsampleFactor(imp, server);
var hierarchy = imageData.getHierarchy();
ImagePlane plane = currentPlane;
if (imp == null)
plane = null;
else if (imp != null && server.nZSlices() * server.nTimepoints() > 1) {
if (imp.getNSlices() == server.nZSlices() && imp.getNFrames() == server.nTimepoints())
plane = null;
}
List<PathObject> pathObjects = createObjectsFromROIs(imp, rois, downsample, asDetection, includeMeasurements, plane);
if (!pathObjects.isEmpty()) {
Platform.runLater(() -> {
hierarchy.addPathObjects(pathObjects);
// Select the objects, e.g. so they can be classified or otherwise updated easily
if (selectObjects)
hierarchy.getSelectionModel().selectObjects(pathObjects);
});
}
}
use of qupath.lib.regions.ImagePlane in project qupath by qupath.
the class DnnTools method detect.
/**
* Apply a detection model to generate rectangles surrounding distinct structures.
* @param model the detection model
* @param mat the image
* @param request the region corresponding to the Mat; if provided, this is used to scale and translate detected regions
* @param classifier function to convert the classification label into a {@link PathClass}
* @param creator function to create an object (e.g. detection, annotation) from a ROI
* @return a list of created objects
*/
@SuppressWarnings("unchecked")
public static List<PathObject> detect(DetectionModel model, Mat mat, RegionRequest request, IntFunction<PathClass> classifier, Function<ROI, PathObject> creator) {
try (var scope = new PointerScope()) {
var ids = new IntPointer();
var preds = new FloatPointer();
var rects = new RectVector();
synchronized (model) {
model.detect(mat, ids, preds, rects);
}
double downsample = request == null ? 1.0 : request.getDownsample();
ImagePlane plane = request == null ? ImagePlane.getDefaultPlane() : request.getPlane();
double xOrigin = request == null ? 0 : request.getX();
double yOrigin = request == null ? 0 : request.getY();
long n = rects.size();
List<PathObject> pathObjects = new ArrayList<>();
for (long i = 0; i < n; i++) {
var rect = rects.get(i);
var roi = ROIs.createRectangleROI(xOrigin + rect.x() * downsample, yOrigin + rect.y() * downsample, rect.width() * downsample, rect.height() * downsample, plane);
var pathClass = classifier == null ? null : classifier.apply(ids.get(i));
double pred = preds.get(i);
var pathObject = creator.apply(roi);
pathObject.setPathClass(pathClass);
try (var ml = pathObject.getMeasurementList()) {
ml.putMeasurement("Probability", pred);
}
pathObjects.add(pathObject);
}
return pathObjects;
}
}
Aggregations