use of qupath.lib.analysis.images.ContourTracing.ChannelThreshold in project qupath by qupath.
the class PixelClassifierTools method createObjectsFromPixelClassifier.
/**
* Create objects based upon an {@link ImageServer} that provides classification or probability output.
*
* @param server image server providing pixels from which objects should be created
* @param labels classification labels; if null, these will be taken from ImageServer#getMetadata() and all non-ignored classifications will be used.
* Providing a map makes it possible to explicitly exclude some classifications.
* @param roi region of interest in which objects should be created (optional; if null, the entire image is used)
* @param creator function to create an object from a ROI (e.g. annotation or detection)
* @param minArea minimum area for an object fragment to retain, in calibrated units based on the pixel calibration
* @param minHoleArea minimum area for a hole to fill, in calibrated units based on the pixel calibration
* @param doSplit if true, split connected regions into separate objects
* @return the objects created within the ROI
* @throws IOException
*/
public static Collection<PathObject> createObjectsFromPixelClassifier(ImageServer<BufferedImage> server, Map<Integer, PathClass> labels, ROI roi, Function<ROI, ? extends PathObject> creator, double minArea, double minHoleArea, boolean doSplit) throws IOException {
// We need classification labels to do anything
if (labels == null)
labels = parseClassificationLabels(server.getMetadata().getClassificationLabels(), false);
if (labels == null || labels.isEmpty())
throw new IllegalArgumentException("Cannot create objects for server - no classification labels are available!");
ChannelThreshold[] thresholds = labels.entrySet().stream().map(e -> ChannelThreshold.create(e.getKey())).toArray(ChannelThreshold[]::new);
if (roi != null && !roi.isArea()) {
logger.warn("Cannot create objects for non-area ROIs");
return Collections.emptyList();
}
Geometry clipArea = roi == null ? null : roi.getGeometry();
// Identify regions for selected ROI or entire image
// This is a list because it might need to handle multiple z-slices or timepoints
List<RegionRequest> regionRequests;
if (roi != null) {
var request = RegionRequest.createInstance(server.getPath(), server.getDownsampleForResolution(0), roi);
regionRequests = Collections.singletonList(request);
} else {
regionRequests = RegionRequest.createAllRequests(server, server.getDownsampleForResolution(0));
}
double pixelArea = server.getPixelCalibration().getPixelWidth().doubleValue() * server.getPixelCalibration().getPixelHeight().doubleValue();
double minAreaPixels = minArea / pixelArea;
double minHoleAreaPixels = minHoleArea / pixelArea;
// Create output array
var pathObjects = new ArrayList<PathObject>();
// Loop through region requests (usually 1, unless we have a z-stack or time series)
for (RegionRequest regionRequest : regionRequests) {
Map<Integer, Geometry> geometryMap = ContourTracing.traceGeometries(server, regionRequest, clipArea, thresholds);
var labelMap = labels;
pathObjects.addAll(geometryMap.entrySet().parallelStream().flatMap(e -> geometryToObjects(e.getValue(), creator, labelMap.get(e.getKey()), minAreaPixels, minHoleAreaPixels, doSplit, regionRequest.getPlane()).stream()).collect(Collectors.toList()));
}
pathObjects.sort(DefaultPathObjectComparator.getInstance());
return pathObjects;
}
Aggregations