use of qupath.lib.images.servers.TileRequest in project qupath by qupath.
the class ContourTracing method traceGeometriesImpl.
private static List<GeometryWrapper> traceGeometriesImpl(ImageServer<BufferedImage> server, TileRequest tile, Geometry clipArea, ChannelThreshold... thresholds) throws IOException {
if (thresholds.length == 0)
return Collections.emptyList();
var request = tile.getRegionRequest();
var list = new ArrayList<GeometryWrapper>();
var img = server.readBufferedImage(request);
// Get an image to threshold
var channelType = server.getMetadata().getChannelType();
int h = img.getHeight();
int w = img.getWidth();
// If we have probabilities, then the 'true' classification is the one with the highest values.
// If we have classifications, then the 'true' classification is the value of the pixel (which is expected to have a single band).
boolean doClassification = channelType == ImageServerMetadata.ChannelType.PROBABILITY || channelType == ImageServerMetadata.ChannelType.CLASSIFICATION;
if (doClassification) {
SimpleImage image;
if (channelType == ImageServerMetadata.ChannelType.PROBABILITY) {
// Convert probabilities to classifications
var raster = img.getRaster();
var nChannels = server.nChannels();
float[] output = new float[w * h];
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
int maxInd = 0;
float maxVal = raster.getSampleFloat(x, y, 0);
for (int c = 1; c < nChannels; c++) {
float val = raster.getSampleFloat(x, y, c);
if (val > maxVal) {
maxInd = c;
maxVal = val;
}
output[y * w + x] = (float) maxInd;
}
}
}
image = SimpleImages.createFloatImage(output, w, h);
} else {
// Handle classifications
var raster = img.getRaster();
var pixels = raster.getSamples(0, 0, w, h, 0, (float[]) null);
image = SimpleImages.createFloatImage(pixels, w, h);
}
for (var threshold : thresholds) {
int c = threshold.getChannel();
Geometry geometry = ContourTracing.createTracedGeometry(image, c, c, tile);
if (geometry != null && !geometry.isEmpty()) {
if (clipArea != null) {
geometry = GeometryTools.attemptOperation(geometry, g -> g.intersection(clipArea));
geometry = GeometryTools.homogenizeGeometryCollection(geometry);
}
if (!geometry.isEmpty() && geometry.getArea() > 0) {
// Exclude lines/points that can sometimes arise
list.add(new GeometryWrapper(geometry, c));
}
}
}
} else {
// Apply the provided threshold to all channels
var raster = img.getRaster();
for (var threshold : thresholds) {
Geometry geometry = ContourTracing.createTracedGeometry(raster, threshold.getMinThreshold(), threshold.getMaxThreshold(), threshold.getChannel(), tile);
if (geometry != null) {
if (clipArea != null) {
geometry = GeometryTools.attemptOperation(geometry, g -> g.intersection(clipArea));
geometry = GeometryTools.homogenizeGeometryCollection(geometry);
}
if (!geometry.isEmpty() && geometry.getArea() > 0) {
// Exclude lines/points that can sometimes arise
list.add(new GeometryWrapper(geometry, threshold.getChannel()));
}
}
}
}
return list;
}
use of qupath.lib.images.servers.TileRequest in project qupath by qupath.
the class ContourTracing method traceGeometriesImpl.
@SuppressWarnings("unchecked")
private static Map<Integer, Geometry> traceGeometriesImpl(ImageServer<BufferedImage> server, Collection<TileRequest> tiles, Geometry clipArea, ChannelThreshold... thresholds) throws IOException {
if (thresholds.length == 0)
return Collections.emptyMap();
Map<Integer, Geometry> output = new LinkedHashMap<>();
var pool = Executors.newFixedThreadPool(ThreadTools.getParallelism());
try {
List<List<GeometryWrapper>> wrappers = invokeAll(pool, tiles, t -> traceGeometries(server, t, clipArea, thresholds));
var geometryMap = wrappers.stream().flatMap(p -> p.stream()).collect(Collectors.groupingBy(g -> g.label));
// Determine 'inter-tile boundaries' - union operations can be very slow, so we want to restrict them
// only to geometries that really require them.
var xBoundsSet = new TreeSet<Integer>();
var yBoundsSet = new TreeSet<Integer>();
for (var t : tiles) {
xBoundsSet.add(t.getImageX());
xBoundsSet.add(t.getImageX() + t.getImageWidth());
yBoundsSet.add(t.getImageY());
yBoundsSet.add(t.getImageY() + t.getImageHeight());
}
int[] xBounds = xBoundsSet.stream().mapToInt(x -> x).toArray();
int[] yBounds = yBoundsSet.stream().mapToInt(y -> y).toArray();
var futures = new LinkedHashMap<Integer, Future<Geometry>>();
// Merge objects with the same classification
for (var entry : geometryMap.entrySet()) {
var list = entry.getValue();
if (list.isEmpty())
continue;
futures.put(entry.getKey(), pool.submit(() -> mergeGeometryWrappers(list, xBounds, yBounds)));
}
for (var entry : futures.entrySet()) output.put(entry.getKey(), entry.getValue().get());
} catch (Exception e) {
throw new IOException(e);
} finally {
pool.shutdown();
}
return output;
}
use of qupath.lib.images.servers.TileRequest in project qupath by qupath.
the class ContourTracing method traceGeometries.
/**
* Trace one or more geometries in an image.
* @param server
* @param regionRequest optional region defining the area within which geometries should be traced
* @param clipArea optional clip region, intersected with the created geometries (may be null)
* @param thresholds min/max thresholds (inclusive) to apply to each channel to generate objects
* @return
* @throws IOException
*/
public static Map<Integer, Geometry> traceGeometries(ImageServer<BufferedImage> server, RegionRequest regionRequest, Geometry clipArea, ChannelThreshold... thresholds) throws IOException {
RegionRequest region = regionRequest;
if (region == null) {
if (clipArea == null) {
region = RegionRequest.createInstance(server, server.getDownsampleForResolution(0));
} else {
var env = clipArea.getEnvelopeInternal();
region = RegionRequest.createInstance(server.getPath(), server.getDownsampleForResolution(0), GeometryTools.envelopToRegion(env, 0, 0));
}
} else if (clipArea != null) {
// Ensure we don't compute more than we need to
var env = clipArea.getEnvelopeInternal();
region = region.intersect2D(GeometryTools.envelopToRegion(env, region.getZ(), region.getT()));
}
Collection<TileRequest> tiles = server.getTileRequestManager().getTileRequests(region);
if (thresholds.length == 0 || tiles.isEmpty())
return Collections.emptyMap();
// If the region downsample doesn't match the tile requests, the scaling may be off
// One way to resolve that (without requiring the region to be read in one go) is to generate new tile requests for a pyramidalized server at the correct resolution
double downsample = region.getDownsample();
if (Math.abs(tiles.iterator().next().getDownsample() - downsample) > 1e-3) {
server = ImageServers.pyramidalize(server, downsample);
tiles = server.getTileRequestManager().getTileRequests(region);
}
return traceGeometriesImpl(server, tiles, clipArea, thresholds);
// TODO: Consider restricting parallelization
// int nThreads = Math.min(Math.max(1, Math.max(thresholds.length, tiles.size())), Runtime.getRuntime().availableProcessors());
// var pool = new ForkJoinPool(nThreads);
// var task = pool.submit(() -> traceGeometriesImpl(server, tiles, clipArea, thresholds));
// pool.shutdown();
// try {
// return task.get();
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// } catch (ExecutionException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
}
use of qupath.lib.images.servers.TileRequest in project qupath by qupath.
the class PathHierarchyImageServer method readTile.
@Override
protected BufferedImage readTile(TileRequest tileRequest) throws IOException {
RegionRequest request = tileRequest.getRegionRequest();
// long startTime = System.currentTimeMillis();
// Get connections
Object o = options.getShowConnections() ? imageData.getProperty(DefaultPathObjectConnectionGroup.KEY_OBJECT_CONNECTIONS) : null;
PathObjectConnections connections = (o instanceof PathObjectConnections) ? (PathObjectConnections) o : null;
List<PathObject> pathObjects = new ArrayList<>(getObjectsToPaint(request));
if (pathObjects == null || pathObjects.isEmpty()) {
// We can only return null if no connections - otherwise we might still need to draw something
if (connections == null) {
return null;
}
}
// Because levels *can* change, we need to extract them first to avoid breaking the contract for comparable
// in a multithreaded environment
var levels = pathObjects.stream().collect(Collectors.toMap(p -> p, p -> p.getLevel()));
var comparator = DefaultPathObjectComparator.getInstance().thenComparingInt(p -> levels.get(p));
Collections.sort(pathObjects, comparator);
// Collections.sort(pathObjects, new HierarchyOverlay.DetectionComparator());
double downsampleFactor = request.getDownsample();
int width = tileRequest.getTileWidth();
int height = tileRequest.getTileHeight();
BufferedImage img = createDefaultRGBImage(width, height);
Graphics2D g2d = img.createGraphics();
g2d.setClip(0, 0, width, height);
// g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
double scale = 1.0 / downsampleFactor;
// g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2d.scale(scale, scale);
g2d.translate(-request.getX(), -request.getY());
// Note we don't want to pass a selection model, as selections shouldn't be included
if (pathObjects != null && !pathObjects.isEmpty())
PathHierarchyPaintingHelper.paintSpecifiedObjects(g2d, AwtTools.getBounds(request), pathObjects, options, null, downsampleFactor);
// See if we have any connections to draw
if (connections != null) {
PathHierarchyPaintingHelper.paintConnections(connections, hierarchy, g2d, imageData.isFluorescence() ? ColorToolsAwt.TRANSLUCENT_WHITE : ColorToolsAwt.TRANSLUCENT_BLACK, downsampleFactor);
}
g2d.dispose();
// System.out.println("Single tile image creation time: " + (endTime - startTime)/1000.);
return img;
}
use of qupath.lib.images.servers.TileRequest in project qupath by qupath.
the class PixelClassificationOverlay method paintOverlay.
@Override
public void paintOverlay(Graphics2D g2d, ImageRegion imageRegion, double downsampleFactor, ImageData<BufferedImage> imageData, boolean paintCompletely) {
// For now, bind the display to the display of detections
if (!showOverlay.get())
return;
if (imageData == null)
return;
// ImageServer<BufferedImage> server = imageData.getServer();
var server = getPixelClassificationServer(imageData);
if (server == null)
return;
// Show classified tiles. Without this, opacity can make it hard to see which regions have been processed.
// Note that if the alpha value is too large, tile boundaries can appear at some viewing magnifications (previous default was 32)
var colorComplete = imageData.getImageType() == ImageData.ImageType.FLUORESCENCE ? ColorToolsAwt.getCachedColor(255, 255, 255, 1) : ColorToolsAwt.getCachedColor(0, 0, 0, 1);
// Get the displayed clip bounds for fast checking if ROIs need to be drawn
RegionRequest fullRequest;
Shape shapeRegion = g2d.getClip();
if (shapeRegion == null)
fullRequest = RegionRequest.createInstance(server.getPath(), downsampleFactor, imageRegion);
else
fullRequest = RegionRequest.createInstance(server.getPath(), downsampleFactor, AwtTools.getImageRegion(shapeRegion, imageRegion.getZ(), imageRegion.getT()));
// If we have a filter, we might not need to do anything
var filter = getOverlayOptions().getPixelClassificationRegionFilter();
// Avoid this check; it causes confusion when zoomed in
// if (!filter.test(imageData, fullRequest))
// return;
var renderer = this.renderer.get();
if (renderer != null && rendererLastTimestamp != renderer.getLastChangeTimestamp()) {
clearCache();
rendererLastTimestamp = renderer.getLastChangeTimestamp();
}
// double requestedDownsample = classifier.getMetadata().getInputPixelSizeMicrons() / server.getAveragedPixelSizeMicrons();
double requestedDownsample = ServerTools.getPreferredDownsampleFactor(server, downsampleFactor);
var gCopy = (Graphics2D) g2d.create();
if (requestedDownsample > server.getDownsampleForResolution(0))
gCopy.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
else
// Only use specified interpolation when upsampling
setInterpolation(gCopy);
// gCopy.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR);
var comp = getAlphaComposite();
var previousComposite = gCopy.getComposite();
if (comp != null) {
if (previousComposite instanceof AlphaComposite)
gCopy.setComposite(comp.derive(((AlphaComposite) previousComposite).getAlpha() * comp.getAlpha()));
else
gCopy.setComposite(comp);
}
Collection<TileRequest> tiles = server.getTileRequestManager().getTileRequests(fullRequest);
if (fullRequest != null) {
double x = (Math.max(0, fullRequest.getMinX()) + Math.min(server.getWidth(), fullRequest.getMaxX())) / 2.0;
double y = (Math.max(0, fullRequest.getMinY()) + Math.min(server.getHeight(), fullRequest.getMaxY())) / 2.0;
var p = new Point2(x, y);
tiles = new ArrayList<>(tiles);
((List<TileRequest>) tiles).sort(Comparator.comparingDouble((TileRequest t) -> p.distanceSq(t.getImageX() + t.getImageWidth() / 2.0, t.getImageY() + t.getImageHeight() / 2.0)));
}
// Clear pending requests, since we'll insert new ones (perhaps in a different order)
this.pendingRequests.clear();
// Loop through & paint classified tiles if we have them, or request tiles if we don't
for (TileRequest tile : tiles) {
var request = tile.getRegionRequest();
if (filter != null && !filter.test(imageData, request))
continue;
// Try to get an RGB image, supplying a server that can be queried for a corresponding non-RGB cached tile if needed
BufferedImage imgRGB = getCachedTileRGB(tile, server);
if (imgRGB != null) {
gCopy.setColor(colorComplete);
gCopy.fillRect(request.getX(), request.getY(), request.getWidth(), request.getHeight());
// Get the cached RGB painted version (since painting can be a fairly expensive operation)
gCopy.drawImage(imgRGB, request.getX(), request.getY(), request.getWidth(), request.getHeight(), null);
// System.err.println(request.getHeight() == imgRGB.getHeight());
continue;
}
// Request a tile
if (livePrediction) {
requestTile(tile, imageData, server);
}
}
gCopy.dispose();
}
Aggregations