use of qupath.lib.regions.RegionRequest in project qupath by qupath.
the class PyramidGeneratingImageServer method readTile.
@Override
protected BufferedImage readTile(TileRequest tileRequest) throws IOException {
// Request directly from the original server if that's most efficient
RegionRequest request = tileRequest.getRegionRequest();
double downsample = request.getDownsample();
int level = ServerTools.getPreferredResolutionLevel(this, downsample);
double closestOriginalDownsample = ServerTools.getPreferredDownsampleFactor(server, downsample);
if (level == 0 || closestOriginalDownsample >= getDownsampleForResolution(level - 1))
return server.readBufferedImage(request.updatePath(server.getPath()));
// Read image from the 'previous' resolution
RegionRequest request2 = request.updateDownsample(getDownsampleForResolution(level - 1));
// If we have an empty tile, we should also return an empty tile
BufferedImage img = readBufferedImage(request2);
// return null;
if (img == null || isEmptyTile(img))
return getEmptyTile(tileRequest.getTileWidth(), tileRequest.getTileHeight());
// Resize to the required size
return BufferedImageTools.resize(img, tileRequest.getTileWidth(), tileRequest.getTileHeight(), allowSmoothInterpolation());
}
use of qupath.lib.regions.RegionRequest in project qupath by qupath.
the class QuPathViewer method getImageLocationString.
/**
* Get a string representing the image coordinates for a particular x & y location.
* @param xx x-coordinate in the image space (not the component/viewer space)
* @param yy y-coordinate in the image space (not the component/viewer space)
* @param useCalibratedUnits
* @return
*/
private String getImageLocationString(double xx, double yy, boolean useCalibratedUnits) {
ImageServer<BufferedImage> server = getServer();
if (server == null)
return "";
String units;
if (xx < 0 || yy < 0 || xx > server.getWidth() - 1 || yy > server.getHeight() - 1)
return "";
double xDisplay = xx;
double yDisplay = yy;
PixelCalibration cal = server.getPixelCalibration();
if (useCalibratedUnits && cal.hasPixelSizeMicrons()) {
units = GeneralTools.micrometerSymbol();
xDisplay *= cal.getPixelWidthMicrons();
yDisplay *= cal.getPixelHeightMicrons();
} else {
units = "px";
}
// See if we're on top of a TMA core
String prefix = "";
TMAGrid tmaGrid = getHierarchy().getTMAGrid();
if (tmaGrid != null) {
TMACoreObject core = PathObjectTools.getTMACoreForPixel(tmaGrid, xx, yy);
if (core != null && core.getName() != null)
prefix = "Core: " + core.getName() + "\n";
}
String s = null;
RegionRequest request = ImageRegionStoreHelpers.getTileRequest(server, xx, yy, downsampleFactor.get(), getZPosition(), getTPosition());
if (request != null) {
BufferedImage img = regionStore.getCachedTile(server, request);
int xi = 0, yi = 0;
if (img == null) {
// Try getting a value from the thumbnail for the whole image
BufferedImage imgThumbnail = regionStore.getCachedThumbnail(server, getZPosition(), getTPosition());
if (imgThumbnail != null) {
img = imgThumbnail;
double downsample = (double) server.getWidth() / imgThumbnail.getWidth();
xi = (int) (xx / downsample + .5);
yi = (int) (yy / downsample + .5);
}
} else {
xi = (int) ((xx - request.getX()) / request.getDownsample());
yi = (int) ((yy - request.getY()) / request.getDownsample());
}
if (img != null) {
// Make sure we are within range
xi = Math.min(xi, img.getWidth() - 1);
yi = Math.min(yi, img.getHeight() - 1);
// Get the value, having applied any required color transforms
if (imageDisplay != null)
s = imageDisplay.getTransformedValueAsString(img, xi, yi);
}
}
// Append z, t position if required
String zString = null;
if (server.nZSlices() > 1) {
double zSpacing = server.getPixelCalibration().getZSpacingMicrons();
if (!useCalibratedUnits || Double.isNaN(zSpacing))
zString = "z = " + getZPosition();
else
zString = String.format("z = %.2f %s", getZPosition() * zSpacing, GeneralTools.micrometerSymbol());
}
String tString = null;
if (server.nTimepoints() > 1) {
// TODO: Consider use of TimeUnit
// TimeUnit timeUnit = server.getTimeUnit();
// if (!useMicrons || timeUnit == null)
tString = "t = " + getTPosition();
// else
// tString = String.format("z = %.2f %s", getTPosition(), timeUnit.toString());
}
String dimensionString;
if (tString == null && zString == null)
dimensionString = "";
else {
dimensionString = "\n";
if (zString != null) {
dimensionString += zString;
if (tString != null)
dimensionString += ", " + tString;
} else
dimensionString += tString;
}
if (s != null)
return String.format("%s%.2f, %.2f %s\n%s%s", prefix, xDisplay, yDisplay, units, s, dimensionString);
else
return String.format("%s%.2f, %.2f %s%s", prefix, xDisplay, yDisplay, units, dimensionString);
// if (s != null)
// return String.format("<html><center>%.2f, %.2f %s<br>%s", xDisplay, yDisplay, units, s);
// else
// return String.format("<html><center>%.2f, %.2f %s", xDisplay, yDisplay, units);
}
use of qupath.lib.regions.RegionRequest in project qupath by qupath.
the class ExtractRegionCommand method run.
@Override
public void run() {
QuPathViewer viewer = qupath.getViewer();
ImageServer<BufferedImage> server = null;
if (viewer != null)
server = viewer.getServer();
if (server == null)
return;
List<String> unitOptions = new ArrayList<>();
unitOptions.add(PIXELS_UNIT);
String unit = server.getPixelCalibration().getPixelWidthUnit();
if (unit.equals(server.getPixelCalibration().getPixelHeightUnit()) && !unit.equals(PixelCalibration.PIXEL))
unitOptions.add(unit);
if (!unitOptions.contains(resolutionUnit))
resolutionUnit = PIXELS_UNIT;
ParameterList params = new ParameterList().addDoubleParameter("resolution", "Resolution", resolution, null, "Resolution at which the image will be exported, defined as the 'pixel size' in Resolution units").addChoiceParameter("resolutionUnit", "Resolution unit", resolutionUnit, unitOptions, "Units defining the export resolution; if 'pixels' then the resolution is the same as a downsample value").addBooleanParameter("includeROI", "Include ROI", includeROI, "Include the primary object defining the exported region as an active ROI in ImageJ").addBooleanParameter("includeOverlay", "Include overlay", includeOverlay, "Include any objects overlapping the exported region as ROIs on an ImageJ overlay").addBooleanParameter("doTransforms", "Apply color transforms", doTransforms, "Optionally apply any color transforms when sending the pixels to ImageJ").addBooleanParameter("doZ", "All z-slices", doZ, "Optionally include all slices of a z-stack").addBooleanParameter("doT", "All timepoints", doT, "Optionally include all timepoints of a time series");
// params.setHiddenParameters(unitOptions.size() <= 1, "resolutionUnit");
params.setHiddenParameters(server.nZSlices() == 1, "doZ");
params.setHiddenParameters(server.nTimepoints() == 1, "doT");
if (!Dialogs.showParameterDialog("Send region to ImageJ", params))
return;
// Parse values
resolution = params.getDoubleParameterValue("resolution");
resolutionUnit = (String) params.getChoiceParameterValue("resolutionUnit");
includeROI = params.getBooleanParameterValue("includeROI");
includeOverlay = params.getBooleanParameterValue("includeOverlay");
doTransforms = params.getBooleanParameterValue("doTransforms");
doZ = params.getBooleanParameterValue("doZ");
doT = params.getBooleanParameterValue("doT");
// Calculate downsample
double downsample = resolution;
if (!resolutionUnit.equals(PIXELS_UNIT))
downsample = resolution / (server.getPixelCalibration().getPixelHeight().doubleValue() / 2.0 + server.getPixelCalibration().getPixelWidth().doubleValue() / 2.0);
// Color transforms are (currently) only applied for brightfield images - for fluorescence we always provide everything as unchanged as possible
List<ChannelDisplayInfo> selectedChannels = new ArrayList<>(viewer.getImageDisplay().selectedChannels());
List<ChannelDisplayInfo> channels = doTransforms && !selectedChannels.isEmpty() ? selectedChannels : null;
if (channels != null)
server = ChannelDisplayTransformServer.createColorTransformServer(server, channels);
// Loop through all selected objects
Collection<PathObject> pathObjects = viewer.getHierarchy().getSelectionModel().getSelectedObjects();
if (pathObjects.isEmpty())
pathObjects = Collections.singletonList(viewer.getHierarchy().getRootObject());
List<ImagePlus> imps = new ArrayList<>();
for (PathObject pathObject : pathObjects) {
if (Thread.currentThread().isInterrupted() || IJ.escapePressed())
return;
int width, height;
if (pathObject == null || !pathObject.hasROI()) {
width = server.getWidth();
height = server.getHeight();
} else {
Rectangle bounds = AwtTools.getBounds(pathObject.getROI());
width = bounds.width;
height = bounds.height;
}
RegionRequest region;
ROI roi = pathObject == null ? null : pathObject.getROI();
if (roi == null || PathObjectTools.hasPointROI(pathObject)) {
region = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), viewer.getZPosition(), viewer.getTPosition());
} else
region = RegionRequest.createInstance(server.getPath(), downsample, roi);
// region = RegionRequest.createInstance(server.getPath(), downsample, pathObject.getROI(), viewer.getZPosition(), viewer.getTPosition());
// Minimum size has been removed (v0.2.0-m4); returned regions should be at least 1x1 pixels
// if (region.getWidth() / downsample < 8 || region.getHeight() / downsample < 8) {
// DisplayHelpers.showErrorMessage("Send region to ImageJ", "The width & height of the extracted image must both be >= 8 pixels");
// continue;
// }
// Calculate required z-slices and time-points
int zStart = doZ ? 0 : region.getZ();
int zEnd = doZ ? server.nZSlices() : region.getZ() + 1;
int tStart = doT ? 0 : region.getT();
int tEnd = doT ? server.nTimepoints() : region.getT() + 1;
long nZ = zEnd - zStart;
long nT = tEnd - tStart;
int bytesPerPixel = server.isRGB() ? 4 : server.getPixelType().getBytesPerPixel() * server.nChannels();
double memory = ((long) width * height * nZ * nT * bytesPerPixel) / (downsample * downsample);
// TODO: Perform calculation based on actual amount of available memory
long availableMemory = GeneralTools.estimateAvailableMemory();
if (memory >= availableMemory * 0.95) {
logger.error("Cannot extract region {} - estimated size is too large (approx. {} MB)", pathObject, GeneralTools.formatNumber(memory / (1024.0 * 1024.0), 2));
Dialogs.showErrorMessage("Send region to ImageJ error", "Selected region is too large to extract - please selected a smaller region or use a higher downsample factor");
continue;
}
if (memory / 1024 / 1024 > 100) {
if (pathObjects.size() == 1 && !Dialogs.showYesNoDialog("Send region to ImageJ", String.format("Attempting to extract this region is likely to require > %.2f MB - are you sure you want to continue?", memory / 1024 / 1024)))
return;
}
// We should switch to the event dispatch thread when interacting with ImageJ
try {
ImagePlus imp;
PathObjectHierarchy hierarchy = viewer.getHierarchy();
OverlayOptions options = viewer.getOverlayOptions();
if (zEnd - zStart > 1 || tEnd - tStart > 1) {
// TODO: Handle overlays
imp = IJTools.extractHyperstack(server, region, zStart, zEnd, tStart, tEnd);
if (includeROI && roi != null) {
Roi roiIJ = IJTools.convertToIJRoi(roi, imp.getCalibration(), region.getDownsample());
imp.setRoi(roiIJ);
}
if (includeOverlay) {
Overlay overlay = new Overlay();
for (int t = tStart; t < tEnd; t++) {
for (int z = zStart; z < zEnd; z++) {
RegionRequest request2 = RegionRequest.createInstance(region.getPath(), region.getDownsample(), region.getX(), region.getY(), region.getWidth(), region.getHeight(), z, t);
var regionPredicate = PathObjectTools.createImageRegionPredicate(request2);
Overlay temp = IJExtension.extractOverlay(hierarchy, request2, options, p -> p != pathObject && regionPredicate.test(p));
if (overlay == null)
overlay = temp;
for (int i = 0; i < temp.size(); i++) {
Roi roiIJ = temp.get(i);
roiIJ.setPosition(-1, z + 1, t + 1);
overlay.add(roiIJ);
}
}
}
if (overlay != null && overlay.size() > 0)
imp.setOverlay(overlay);
}
} else if (includeOverlay)
imp = IJExtension.extractROIWithOverlay(server, pathObject, hierarchy, region, includeROI, options).getImage();
else
imp = IJExtension.extractROIWithOverlay(server, pathObject, null, region, includeROI, options).getImage();
// Set display ranges if we can
if (viewer != null && imp instanceof CompositeImage) {
var availableChannels = viewer.getImageDisplay().availableChannels().stream().filter(c -> c instanceof SingleChannelDisplayInfo).map(c -> (SingleChannelDisplayInfo) c).collect(Collectors.toList());
CompositeImage impComp = (CompositeImage) imp;
if (availableChannels.size() == imp.getNChannels()) {
for (int c = 0; c < availableChannels.size(); c++) {
var channel = availableChannels.get(c);
imp.setPosition(c + 1, 1, 1);
impComp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imp.setPosition(1);
}
} else if (selectedChannels.size() == 1 && imp.getType() != ImagePlus.COLOR_RGB) {
// Setting the display range for non-RGB images can give unexpected results (changing pixel values)
var channel = selectedChannels.get(0);
imp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imps.add(imp);
} catch (IOException e) {
Dialogs.showErrorMessage("Send region to ImageJ", e);
return;
}
}
// Show all the images we've got
if (!imps.isEmpty()) {
SwingUtilities.invokeLater(() -> {
boolean batchMode = Interpreter.batchMode;
// Try to start an ImageJ instance, and return if this fails
try {
ImageJ ij = IJExtension.getImageJInstance();
if (ij == null)
return;
ij.setVisible(true);
// Make sure we aren't in batch mode, so that image will display
Interpreter.batchMode = false;
for (ImagePlus imp : imps) {
imp.show();
}
} finally {
Interpreter.batchMode = batchMode;
}
});
}
}
use of qupath.lib.regions.RegionRequest in project qupath by qupath.
the class IntensityFeaturesPlugin method processObject.
static boolean processObject(final PathObject pathObject, final ParameterList params, final ImageData<BufferedImage> imageData) throws IOException {
// Determine amount to downsample
var server = imageData.getServer();
var stains = imageData.getColorDeconvolutionStains();
PixelCalibration cal = server.getPixelCalibration();
double downsample = calculateDownsample(cal, params);
if (downsample <= 0) {
logger.warn("Effective downsample must be > 0 (requested value {})", downsample);
}
// Determine region shape
RegionType regionType = (RegionType) params.getChoiceParameterValue("region");
// Try to get ROI
boolean useROI = regionType == RegionType.ROI || regionType == RegionType.NUCLEUS;
ROI roi = null;
if (regionType == RegionType.NUCLEUS) {
if (pathObject instanceof PathCellObject)
roi = ((PathCellObject) pathObject).getNucleusROI();
} else
roi = pathObject.getROI();
// pathROI = ((PathCellObject)pathObject).getNucleusROI();
if (roi == null)
return false;
// Create a map - this is useful for occasions when tiling is needed
Map<FeatureColorTransform, List<FeatureComputer>> map = new LinkedHashMap<>();
if (server.isRGB()) {
for (FeatureColorTransform transform : FeatureColorTransformEnum.values()) {
List<FeatureComputer> list = new ArrayList<>();
map.put(transform, list);
for (FeatureComputerBuilder builder : builders) {
list.add(builder.build());
}
}
} else {
for (FeatureColorTransform transform : getBasicChannelTransforms(server.nChannels())) {
List<FeatureComputer> list = new ArrayList<>();
map.put(transform, list);
for (FeatureComputerBuilder builder : builders) {
list.add(builder.build());
}
}
}
String prefix = getDiameterString(server, params);
// Create tiled ROIs, if required
ImmutableDimension sizePreferred = ImmutableDimension.getInstance((int) (2000 * downsample), (int) (2000 * downsample));
// ImmutableDimension sizePreferred = new ImmutableDimension((int)(200*downsample), (int)(200*downsample));
Collection<? extends ROI> rois = RoiTools.computeTiledROIs(roi, sizePreferred, sizePreferred, false, 0);
if (rois.size() > 1)
logger.info("Splitting {} into {} tiles for intensity measurements", roi, rois.size());
for (ROI pathROI : rois) {
if (Thread.currentThread().isInterrupted()) {
logger.warn("Measurement skipped - thread interrupted!");
return false;
}
// Get bounds
RegionRequest region;
if (useROI) {
region = RegionRequest.createInstance(server.getPath(), downsample, pathROI);
} else {
ImmutableDimension size = getPreferredTileSizePixels(server, params);
// RegionRequest region = RegionRequest.createInstance(server.getPath(), downsample, (int)(pathROI.getCentroidX() + .5) - size.width/2, (int)(pathROI.getCentroidY() + .5) - size.height/2, size.width, size.height, pathROI.getT(), pathROI.getZ());
// Try to align with pixel boundaries according to the downsample being used - otherwise, interpolation can cause some strange, pattern artefacts
int xStart = (int) (Math.round(pathROI.getCentroidX() / downsample) * downsample) - size.width / 2;
int yStart = (int) (Math.round(pathROI.getCentroidY() / downsample) * downsample) - size.height / 2;
int width = Math.min(server.getWidth(), xStart + size.width) - xStart;
int height = Math.min(server.getHeight(), yStart + size.height) - yStart;
region = RegionRequest.createInstance(server.getPath(), downsample, xStart, yStart, width, height, pathROI.getT(), pathROI.getZ());
}
// // Check image large enough to do *anything* of value
// if (region.getWidth() / downsample < 1 || region.getHeight() / downsample < 1) {
// logger.trace("Requested region is too small! {}", region);
// return false;
// }
// System.out.println(bounds);
// System.out.println("Size: " + size);
BufferedImage img = server.readBufferedImage(region);
if (img == null) {
logger.error("Could not read image - unable to compute intensity features for {}", pathObject);
return false;
}
// Create mask ROI if necessary
// If we just have 1 pixel, we want to use it so that the mean/min/max measurements are valid (even if nothing else is)
byte[] maskBytes = null;
if (useROI && img.getWidth() * img.getHeight() > 1) {
BufferedImage imgMask = BufferedImageTools.createROIMask(img.getWidth(), img.getHeight(), pathROI, region);
maskBytes = ((DataBufferByte) imgMask.getRaster().getDataBuffer()).getData();
}
boolean isRGB = server.isRGB();
List<FeatureColorTransform> transforms;
if (isRGB)
transforms = Arrays.asList(FeatureColorTransformEnum.values());
else
transforms = getBasicChannelTransforms(server.nChannels());
int w = img.getWidth();
int h = img.getHeight();
int[] rgbBuffer = isRGB ? img.getRGB(0, 0, w, h, null, 0, w) : null;
float[] pixels = null;
for (FeatureColorTransform transform : transforms) {
// Check if the color transform is requested
if (params.containsKey(transform.getKey()) && Boolean.TRUE.equals(params.getBooleanParameterValue(transform.getKey()))) {
// Transform the pixels
pixels = transform.getTransformedPixels(img, rgbBuffer, stains, pixels);
// Create the simple image
SimpleModifiableImage pixelImage = SimpleImages.createFloatImage(pixels, w, h);
// Apply any arbitrary mask
if (maskBytes != null) {
for (int i = 0; i < pixels.length; i++) {
if (maskBytes[i] == (byte) 0)
pixelImage.setValue(i % w, i / w, Float.NaN);
}
} else if (regionType == RegionType.CIRCLE) {
// Apply circular tile mask
double cx = (w - 1) / 2;
double cy = (h - 1) / 2;
double radius = Math.max(w, h) * .5;
double distThreshold = radius * radius;
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
if ((cx - x) * (cx - x) + (cy - y) * (cy - y) > distThreshold)
pixelImage.setValue(x, y, Float.NaN);
}
}
}
// Do the computations
for (FeatureComputer computer : map.get(transform)) {
computer.updateFeatures(pixelImage, transform, params);
}
}
}
}
// Add measurements to the parent object
for (Entry<FeatureColorTransform, List<FeatureComputer>> entry : map.entrySet()) {
String name = prefix + ": " + entry.getKey().getName(imageData, false) + ":";
for (FeatureComputer computer : entry.getValue()) computer.addMeasurements(pathObject, name, params);
}
pathObject.getMeasurementList().close();
// Lock any measurements that require it
if (pathObject instanceof PathAnnotationObject)
((PathAnnotationObject) pathObject).setLocked(true);
else if (pathObject instanceof TMACoreObject)
((TMACoreObject) pathObject).setLocked(true);
return true;
}
use of qupath.lib.regions.RegionRequest in project qupath by qupath.
the class PathHierarchyImageServer method readTile.
@Override
protected BufferedImage readTile(TileRequest tileRequest) throws IOException {
RegionRequest request = tileRequest.getRegionRequest();
// long startTime = System.currentTimeMillis();
// Get connections
Object o = options.getShowConnections() ? imageData.getProperty(DefaultPathObjectConnectionGroup.KEY_OBJECT_CONNECTIONS) : null;
PathObjectConnections connections = (o instanceof PathObjectConnections) ? (PathObjectConnections) o : null;
List<PathObject> pathObjects = new ArrayList<>(getObjectsToPaint(request));
if (pathObjects == null || pathObjects.isEmpty()) {
// We can only return null if no connections - otherwise we might still need to draw something
if (connections == null) {
return null;
}
}
// Because levels *can* change, we need to extract them first to avoid breaking the contract for comparable
// in a multithreaded environment
var levels = pathObjects.stream().collect(Collectors.toMap(p -> p, p -> p.getLevel()));
var comparator = DefaultPathObjectComparator.getInstance().thenComparingInt(p -> levels.get(p));
Collections.sort(pathObjects, comparator);
// Collections.sort(pathObjects, new HierarchyOverlay.DetectionComparator());
double downsampleFactor = request.getDownsample();
int width = tileRequest.getTileWidth();
int height = tileRequest.getTileHeight();
BufferedImage img = createDefaultRGBImage(width, height);
Graphics2D g2d = img.createGraphics();
g2d.setClip(0, 0, width, height);
// g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
double scale = 1.0 / downsampleFactor;
// g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2d.scale(scale, scale);
g2d.translate(-request.getX(), -request.getY());
// Note we don't want to pass a selection model, as selections shouldn't be included
if (pathObjects != null && !pathObjects.isEmpty())
PathHierarchyPaintingHelper.paintSpecifiedObjects(g2d, AwtTools.getBounds(request), pathObjects, options, null, downsampleFactor);
// See if we have any connections to draw
if (connections != null) {
PathHierarchyPaintingHelper.paintConnections(connections, hierarchy, g2d, imageData.isFluorescence() ? ColorToolsAwt.TRANSLUCENT_WHITE : ColorToolsAwt.TRANSLUCENT_BLACK, downsampleFactor);
}
g2d.dispose();
// System.out.println("Single tile image creation time: " + (endTime - startTime)/1000.);
return img;
}
Aggregations