use of qupath.lib.objects.PathObject in project qupath by qupath.
the class ImageJMacroRunner method getParentObjects.
@Override
protected Collection<? extends PathObject> getParentObjects(final PluginRunner<BufferedImage> runner) {
// Try to get currently-selected objects
PathObjectHierarchy hierarchy = getHierarchy(runner);
List<PathObject> pathObjects = hierarchy.getSelectionModel().getSelectedObjects().stream().filter(p -> p.isAnnotation() || p.isTMACore()).collect(Collectors.toList());
if (pathObjects.isEmpty()) {
if (GuiTools.promptForParentObjects(this.getName(), runner.getImageData(), false, getSupportedParentObjectClasses()))
pathObjects = new ArrayList<>(hierarchy.getSelectionModel().getSelectedObjects());
}
return pathObjects;
// // TODO: Give option to analyse annotations, even when TMA grid is present
// ImageData<BufferedImage> imageData = runner.getImageData();
// TMAGrid tmaGrid = imageData.getHierarchy().getTMAGrid();
// if (tmaGrid != null && tmaGrid.nCores() > 0)
// return PathObjectTools.getTMACoreObjects(imageData.getHierarchy(), false);
// else
// return imageData.getHierarchy().getObjects(null, PathAnnotationObject.class);
}
use of qupath.lib.objects.PathObject in project qupath by qupath.
the class ExtractRegionCommand method run.
@Override
public void run() {
QuPathViewer viewer = qupath.getViewer();
ImageServer<BufferedImage> server = null;
if (viewer != null)
server = viewer.getServer();
if (server == null)
return;
List<String> unitOptions = new ArrayList<>();
unitOptions.add(PIXELS_UNIT);
String unit = server.getPixelCalibration().getPixelWidthUnit();
if (unit.equals(server.getPixelCalibration().getPixelHeightUnit()) && !unit.equals(PixelCalibration.PIXEL))
unitOptions.add(unit);
if (!unitOptions.contains(resolutionUnit))
resolutionUnit = PIXELS_UNIT;
ParameterList params = new ParameterList().addDoubleParameter("resolution", "Resolution", resolution, null, "Resolution at which the image will be exported, defined as the 'pixel size' in Resolution units").addChoiceParameter("resolutionUnit", "Resolution unit", resolutionUnit, unitOptions, "Units defining the export resolution; if 'pixels' then the resolution is the same as a downsample value").addBooleanParameter("includeROI", "Include ROI", includeROI, "Include the primary object defining the exported region as an active ROI in ImageJ").addBooleanParameter("includeOverlay", "Include overlay", includeOverlay, "Include any objects overlapping the exported region as ROIs on an ImageJ overlay").addBooleanParameter("doTransforms", "Apply color transforms", doTransforms, "Optionally apply any color transforms when sending the pixels to ImageJ").addBooleanParameter("doZ", "All z-slices", doZ, "Optionally include all slices of a z-stack").addBooleanParameter("doT", "All timepoints", doT, "Optionally include all timepoints of a time series");
// params.setHiddenParameters(unitOptions.size() <= 1, "resolutionUnit");
params.setHiddenParameters(server.nZSlices() == 1, "doZ");
params.setHiddenParameters(server.nTimepoints() == 1, "doT");
if (!Dialogs.showParameterDialog("Send region to ImageJ", params))
return;
// Parse values
resolution = params.getDoubleParameterValue("resolution");
resolutionUnit = (String) params.getChoiceParameterValue("resolutionUnit");
includeROI = params.getBooleanParameterValue("includeROI");
includeOverlay = params.getBooleanParameterValue("includeOverlay");
doTransforms = params.getBooleanParameterValue("doTransforms");
doZ = params.getBooleanParameterValue("doZ");
doT = params.getBooleanParameterValue("doT");
// Calculate downsample
double downsample = resolution;
if (!resolutionUnit.equals(PIXELS_UNIT))
downsample = resolution / (server.getPixelCalibration().getPixelHeight().doubleValue() / 2.0 + server.getPixelCalibration().getPixelWidth().doubleValue() / 2.0);
// Color transforms are (currently) only applied for brightfield images - for fluorescence we always provide everything as unchanged as possible
List<ChannelDisplayInfo> selectedChannels = new ArrayList<>(viewer.getImageDisplay().selectedChannels());
List<ChannelDisplayInfo> channels = doTransforms && !selectedChannels.isEmpty() ? selectedChannels : null;
if (channels != null)
server = ChannelDisplayTransformServer.createColorTransformServer(server, channels);
// Loop through all selected objects
Collection<PathObject> pathObjects = viewer.getHierarchy().getSelectionModel().getSelectedObjects();
if (pathObjects.isEmpty())
pathObjects = Collections.singletonList(viewer.getHierarchy().getRootObject());
List<ImagePlus> imps = new ArrayList<>();
for (PathObject pathObject : pathObjects) {
if (Thread.currentThread().isInterrupted() || IJ.escapePressed())
return;
int width, height;
if (pathObject == null || !pathObject.hasROI()) {
width = server.getWidth();
height = server.getHeight();
} else {
Rectangle bounds = AwtTools.getBounds(pathObject.getROI());
width = bounds.width;
height = bounds.height;
}
RegionRequest region;
ROI roi = pathObject == null ? null : pathObject.getROI();
if (roi == null || PathObjectTools.hasPointROI(pathObject)) {
region = RegionRequest.createInstance(server.getPath(), downsample, 0, 0, server.getWidth(), server.getHeight(), viewer.getZPosition(), viewer.getTPosition());
} else
region = RegionRequest.createInstance(server.getPath(), downsample, roi);
// region = RegionRequest.createInstance(server.getPath(), downsample, pathObject.getROI(), viewer.getZPosition(), viewer.getTPosition());
// Minimum size has been removed (v0.2.0-m4); returned regions should be at least 1x1 pixels
// if (region.getWidth() / downsample < 8 || region.getHeight() / downsample < 8) {
// DisplayHelpers.showErrorMessage("Send region to ImageJ", "The width & height of the extracted image must both be >= 8 pixels");
// continue;
// }
// Calculate required z-slices and time-points
int zStart = doZ ? 0 : region.getZ();
int zEnd = doZ ? server.nZSlices() : region.getZ() + 1;
int tStart = doT ? 0 : region.getT();
int tEnd = doT ? server.nTimepoints() : region.getT() + 1;
long nZ = zEnd - zStart;
long nT = tEnd - tStart;
int bytesPerPixel = server.isRGB() ? 4 : server.getPixelType().getBytesPerPixel() * server.nChannels();
double memory = ((long) width * height * nZ * nT * bytesPerPixel) / (downsample * downsample);
// TODO: Perform calculation based on actual amount of available memory
long availableMemory = GeneralTools.estimateAvailableMemory();
if (memory >= availableMemory * 0.95) {
logger.error("Cannot extract region {} - estimated size is too large (approx. {} MB)", pathObject, GeneralTools.formatNumber(memory / (1024.0 * 1024.0), 2));
Dialogs.showErrorMessage("Send region to ImageJ error", "Selected region is too large to extract - please selected a smaller region or use a higher downsample factor");
continue;
}
if (memory / 1024 / 1024 > 100) {
if (pathObjects.size() == 1 && !Dialogs.showYesNoDialog("Send region to ImageJ", String.format("Attempting to extract this region is likely to require > %.2f MB - are you sure you want to continue?", memory / 1024 / 1024)))
return;
}
// We should switch to the event dispatch thread when interacting with ImageJ
try {
ImagePlus imp;
PathObjectHierarchy hierarchy = viewer.getHierarchy();
OverlayOptions options = viewer.getOverlayOptions();
if (zEnd - zStart > 1 || tEnd - tStart > 1) {
// TODO: Handle overlays
imp = IJTools.extractHyperstack(server, region, zStart, zEnd, tStart, tEnd);
if (includeROI && roi != null) {
Roi roiIJ = IJTools.convertToIJRoi(roi, imp.getCalibration(), region.getDownsample());
imp.setRoi(roiIJ);
}
if (includeOverlay) {
Overlay overlay = new Overlay();
for (int t = tStart; t < tEnd; t++) {
for (int z = zStart; z < zEnd; z++) {
RegionRequest request2 = RegionRequest.createInstance(region.getPath(), region.getDownsample(), region.getX(), region.getY(), region.getWidth(), region.getHeight(), z, t);
var regionPredicate = PathObjectTools.createImageRegionPredicate(request2);
Overlay temp = IJExtension.extractOverlay(hierarchy, request2, options, p -> p != pathObject && regionPredicate.test(p));
if (overlay == null)
overlay = temp;
for (int i = 0; i < temp.size(); i++) {
Roi roiIJ = temp.get(i);
roiIJ.setPosition(-1, z + 1, t + 1);
overlay.add(roiIJ);
}
}
}
if (overlay != null && overlay.size() > 0)
imp.setOverlay(overlay);
}
} else if (includeOverlay)
imp = IJExtension.extractROIWithOverlay(server, pathObject, hierarchy, region, includeROI, options).getImage();
else
imp = IJExtension.extractROIWithOverlay(server, pathObject, null, region, includeROI, options).getImage();
// Set display ranges if we can
if (viewer != null && imp instanceof CompositeImage) {
var availableChannels = viewer.getImageDisplay().availableChannels().stream().filter(c -> c instanceof SingleChannelDisplayInfo).map(c -> (SingleChannelDisplayInfo) c).collect(Collectors.toList());
CompositeImage impComp = (CompositeImage) imp;
if (availableChannels.size() == imp.getNChannels()) {
for (int c = 0; c < availableChannels.size(); c++) {
var channel = availableChannels.get(c);
imp.setPosition(c + 1, 1, 1);
impComp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imp.setPosition(1);
}
} else if (selectedChannels.size() == 1 && imp.getType() != ImagePlus.COLOR_RGB) {
// Setting the display range for non-RGB images can give unexpected results (changing pixel values)
var channel = selectedChannels.get(0);
imp.setDisplayRange(channel.getMinDisplay(), channel.getMaxDisplay());
}
imps.add(imp);
} catch (IOException e) {
Dialogs.showErrorMessage("Send region to ImageJ", e);
return;
}
}
// Show all the images we've got
if (!imps.isEmpty()) {
SwingUtilities.invokeLater(() -> {
boolean batchMode = Interpreter.batchMode;
// Try to start an ImageJ instance, and return if this fails
try {
ImageJ ij = IJExtension.getImageJInstance();
if (ij == null)
return;
ij.setVisible(true);
// Make sure we aren't in batch mode, so that image will display
Interpreter.batchMode = false;
for (ImagePlus imp : imps) {
imp.show();
}
} finally {
Interpreter.batchMode = batchMode;
}
});
}
}
use of qupath.lib.objects.PathObject in project qupath by qupath.
the class PixelClassifierTraining method updateTrainingData.
private synchronized ClassifierTrainingData updateTrainingData(Map<PathClass, Integer> labelMap, Collection<ImageData<BufferedImage>> imageDataCollection) throws IOException {
if (imageDataCollection.isEmpty()) {
resetTrainingData();
return null;
}
Map<PathClass, Integer> labels = new LinkedHashMap<>();
boolean hasLockedAnnotations = false;
if (labelMap == null) {
Set<PathClass> pathClasses = new TreeSet<>((p1, p2) -> p1.toString().compareTo(p2.toString()));
for (var imageData : imageDataCollection) {
// Get labels for all annotations
Collection<PathObject> annotations = imageData.getHierarchy().getAnnotationObjects();
for (var annotation : annotations) {
if (isTrainableAnnotation(annotation, true)) {
var pathClass = annotation.getPathClass();
pathClasses.add(pathClass);
// We only use boundary classes for areas
if (annotation.getROI().isArea()) {
var boundaryClass = boundaryStrategy.getBoundaryClass(pathClass);
if (boundaryClass != null)
pathClasses.add(boundaryClass);
}
} else if (isTrainableAnnotation(annotation, false))
hasLockedAnnotations = true;
}
}
int lab = 0;
for (PathClass pathClass : pathClasses) {
Integer temp = Integer.valueOf(lab);
labels.put(pathClass, temp);
lab++;
}
} else {
labels.putAll(labelMap);
}
List<Mat> allFeatures = new ArrayList<>();
List<Mat> allTargets = new ArrayList<>();
for (var imageData : imageDataCollection) {
// Get features & targets for all the tiles that we need
var featureServer = getFeatureServer(imageData);
if (featureServer != null) {
var tiles = featureServer.getTileRequestManager().getAllTileRequests();
for (var tile : tiles) {
var tileFeatures = getTileFeatures(tile.getRegionRequest(), featureServer, boundaryStrategy, labels);
if (tileFeatures != null) {
allFeatures.add(tileFeatures.getFeatures());
allTargets.add(tileFeatures.getTargets());
}
}
} else {
logger.warn("Unable to generate features for {}", imageData);
}
}
// We need at least two classes for anything very meaningful to happen
int nTargets = labels.size();
if (nTargets <= 1) {
logger.warn("Unlocked annotations for at least two classes are required to train a classifier!");
if (hasLockedAnnotations)
logger.warn("Image contains annotations that *could* be used for training, except they are currently locked. Please unlock them if they should be used.");
resetTrainingData();
return null;
}
if (matTraining == null)
matTraining = new Mat();
if (matTargets == null)
matTargets = new Mat();
opencv_core.vconcat(new MatVector(allFeatures.toArray(Mat[]::new)), matTraining);
opencv_core.vconcat(new MatVector(allTargets.toArray(Mat[]::new)), matTargets);
logger.debug("Training data: {} x {}, Target data: {} x {}", matTraining.rows(), matTraining.cols(), matTargets.rows(), matTargets.cols());
if (matTraining.rows() == 0) {
logger.warn("No training data found - if you have training annotations, check the features are compatible with the current image.");
return null;
}
return new ClassifierTrainingData(labels, matTraining, matTargets);
}
use of qupath.lib.objects.PathObject in project qupath by qupath.
the class QP method resetClassifications.
/**
* Reset the PathClass for all objects of the specified type in the specified hierarchy.
*
* @param hierarchy
* @param cls
*/
public static void resetClassifications(final PathObjectHierarchy hierarchy, final Class<? extends PathObject> cls) {
if (hierarchy == null)
return;
Collection<PathObject> objects = hierarchy.getObjects(null, cls);
if (objects.isEmpty()) {
logger.warn("No objects to reset classifications!");
return;
}
for (PathObject pathObject : objects) {
if (pathObject.getPathClass() != null)
pathObject.setPathClass(null);
}
hierarchy.fireObjectClassificationsChangedEvent(QP.class, objects);
}
use of qupath.lib.objects.PathObject in project qupath by qupath.
the class QP method parsePredicate.
// TODO: Update parsePredicate to something more modern... a proper DSL
@Deprecated
private static Predicate<PathObject> parsePredicate(final String command) throws NoSuchElementException {
String s = command.trim();
if (s.length() == 0)
throw new NoSuchElementException("No command provided!");
Scanner scanner = new Scanner(s);
try {
Map<String, Predicate<Integer>> mapComparison = new HashMap<>();
mapComparison.put(">=", v -> v >= 0);
mapComparison.put("<=", v -> v <= 0);
mapComparison.put(">", v -> v > 0);
mapComparison.put("<", v -> v < 0);
mapComparison.put("=", v -> v == 0);
mapComparison.put("==", v -> v == 0);
mapComparison.put("!=", v -> v != 0);
mapComparison.put("~=", v -> v != 0);
Predicate<PathObject> predicate = null;
Pattern comparePattern = Pattern.compile(">=|<=|==|!=|~=|=|>|<");
Pattern combinePattern = Pattern.compile("and|AND|or|OR");
Pattern notPattern = Pattern.compile("not|NOT");
while (scanner.hasNext()) {
String combine = null;
scanner.reset();
if (predicate != null) {
if (scanner.hasNext(combinePattern))
combine = scanner.next(combinePattern).trim().toUpperCase();
else
throw new NoSuchElementException("Missing combiner (AND, OR) between comparisons!");
}
boolean negate = false;
if (scanner.hasNext(notPattern)) {
negate = true;
scanner.next(notPattern);
}
scanner.useDelimiter(comparePattern);
String measurement = scanner.next().trim();
scanner.reset();
if (!scanner.hasNext(comparePattern))
throw new NoSuchElementException("Missing comparison operator (<, >, <=, >=, ==) for measurement \"" + measurement + "\"");
String comparison = scanner.next(comparePattern).trim();
if (!scanner.hasNextDouble())
throw new NoSuchElementException("Missing comparison value after \"" + measurement + " " + comparison + "\"");
double value = scanner.nextDouble();
Predicate<PathObject> predicateNew = p -> {
double v = p.getMeasurementList().getMeasurementValue(measurement);
return !Double.isNaN(v) && mapComparison.get(comparison).test(Double.compare(p.getMeasurementList().getMeasurementValue(measurement), value));
};
if (negate)
predicateNew = predicateNew.negate();
if (predicate == null) {
predicate = predicateNew;
} else {
if ("AND".equals(combine))
predicate = predicate.and(predicateNew);
else if ("OR".equals(combine))
predicate = predicate.or(predicateNew);
else
throw new NoSuchElementException("Unrecognised combination of predicates: " + combine);
}
}
return predicate;
} finally {
scanner.close();
}
}
Aggregations