use of qupath.lib.color.ColorDeconvolutionStains in project qupath by qupath.
the class EstimateStainVectorsCommand method showStainEditor.
@SuppressWarnings("unchecked")
public static ColorDeconvolutionStains showStainEditor(final BufferedImage img, final ColorDeconvolutionStains stains) {
//
int[] buf = img.getRGB(0, 0, img.getWidth(), img.getHeight(), null, 0, img.getWidth());
// int[] rgb = buf;
int[] rgb = EstimateStainVectors.subsample(buf, 10000);
float[] red = ColorDeconvolutionHelper.getRedOpticalDensities(rgb, stains.getMaxRed(), null);
float[] green = ColorDeconvolutionHelper.getGreenOpticalDensities(rgb, stains.getMaxGreen(), null);
float[] blue = ColorDeconvolutionHelper.getBlueOpticalDensities(rgb, stains.getMaxBlue(), null);
// panelPlots.setBorder(BorderFactory.createTitledBorder(null, "Stain vector scatterplots", TitledBorder.CENTER, TitledBorder.TOP));
StainsWrapper stainsWrapper = new StainsWrapper(stains);
Node panelRedGreen = createScatterPanel(new ScatterPlot(red, green, null, rgb), stainsWrapper, AxisColor.RED, AxisColor.GREEN);
Node panelRedBlue = createScatterPanel(new ScatterPlot(red, blue, null, rgb), stainsWrapper, AxisColor.RED, AxisColor.BLUE);
Node panelGreenBlue = createScatterPanel(new ScatterPlot(green, blue, null, rgb), stainsWrapper, AxisColor.GREEN, AxisColor.BLUE);
// GridPane panelPlots = PanelToolsFX.createColumnGrid(panelRedGreen, panelRedBlue, panelGreenBlue);
GridPane panelPlots = new GridPane();
panelPlots.setHgap(10);
panelPlots.add(panelRedGreen, 0, 0);
panelPlots.add(panelRedBlue, 1, 0);
panelPlots.add(panelGreenBlue, 2, 0);
// panelPlots.getChildren().addAll(panelRedGreen, panelRedBlue, panelGreenBlue);
panelPlots.setPadding(new Insets(0, 0, 10, 0));
BorderPane panelSouth = new BorderPane();
TableView<Integer> table = new TableView<>();
table.getItems().setAll(1, 2, 3);
stainsWrapper.addStainListener(new StainChangeListener() {
@Override
public void stainChanged(StainsWrapper stainsWrapper) {
table.refresh();
}
});
TableColumn<Integer, String> colName = new TableColumn<>("Name");
colName.setCellValueFactory(v -> new SimpleStringProperty(stainsWrapper.getStains().getStain(v.getValue()).getName()));
TableColumn<Integer, String> colOrig = new TableColumn<>("Original");
colOrig.setCellValueFactory(v -> new SimpleStringProperty(stainArrayAsString(Locale.getDefault(Category.FORMAT), stainsWrapper.getOriginalStains().getStain(v.getValue()), " | ", 3)));
TableColumn<Integer, String> colCurrent = new TableColumn<>("Current");
colCurrent.setCellValueFactory(v -> new SimpleStringProperty(stainArrayAsString(Locale.getDefault(Category.FORMAT), stainsWrapper.getStains().getStain(v.getValue()), " | ", 3)));
TableColumn<Integer, String> colAngle = new TableColumn<>("Angle");
colAngle.setCellValueFactory(v -> {
return new SimpleStringProperty(GeneralTools.formatNumber(StainVector.computeAngle(stainsWrapper.getOriginalStains().getStain(v.getValue()), stainsWrapper.getStains().getStain(v.getValue())), 2));
});
// new SimpleStringProperty(stainsWrapper.getStains().getStain(v.getValue()).arrayAsString(", ", 3)));
table.getColumns().addAll(colName, colOrig, colCurrent, colAngle);
table.setColumnResizePolicy(TableView.UNCONSTRAINED_RESIZE_POLICY);
table.setPrefHeight(120);
// // Fix first & preferred column sizes
// int widthName = 0, widthStain = 0;
// for (int row = 0; row < table.getRowCount(); row++) {
// TableCellRenderer renderer = table.getCellRenderer(row, 0);
// Component comp = table.prepareRenderer(renderer, row, 0);
// widthName = Math.max(comp.getPreferredSize().width, widthName);
//
// renderer = table.getCellRenderer(row, 1);
// comp = table.prepareRenderer(renderer, row, 1);
// widthStain = Math.max(comp.getPreferredSize().width, widthStain);
// renderer = table.getCellRenderer(row, 2);
// comp = table.prepareRenderer(renderer, row, 2);
// widthStain = Math.max(comp.getPreferredSize().width, widthStain);
// }
// table.getColumnModel().getColumn(0).setMaxWidth(widthName + 10);
// table.getColumnModel().getColumn(0).setPreferredWidth(widthName + 10);
// table.getColumnModel().getColumn(1).setPreferredWidth(widthStain + 20);
// table.getColumnModel().getColumn(2).setPreferredWidth(widthStain + 20);
// Create auto detection parameters
ParameterList params = new ParameterList().addDoubleParameter("minStainOD", "Min channel OD", 0.05, "", "Minimum staining OD - pixels with a lower OD in any channel (RGB) are ignored (default = 0.05)").addDoubleParameter("maxStainOD", "Max total OD", 1., "", "Maximum staining OD - more densely stained pixels are ignored (default = 1)").addDoubleParameter("ignorePercentage", "Ignore extrema", 1., "%", "Percentage of extreme pixels to ignore, to improve robustness in the presence of noise/other artefacts (default = 1)").addBooleanParameter("checkColors", "Exclude unrecognised colors (H&E only)", false, "Exclude unexpected colors (e.g. green) that are likely to be caused by artefacts and not true staining");
// .addDoubleParameter("ignorePercentage", "Ignore extrema", 1., "%", 0, 20, "Percentage of extreme pixels to ignore, to improve robustness in the presence of noise/other artefacts");
Button btnAuto = new Button("Auto");
btnAuto.setOnAction(e -> {
double minOD = params.getDoubleParameterValue("minStainOD");
double maxOD = params.getDoubleParameterValue("maxStainOD");
double ignore = params.getDoubleParameterValue("ignorePercentage");
// Only accept if H&E
boolean checkColors = params.getBooleanParameterValue("checkColors") && stainsWrapper.getOriginalStains().isH_E();
ignore = Math.max(0, Math.min(ignore, 100));
// ColorDeconvolutionStains stains = estimateStains(imgFinal, stainsWrapper.getStains(), minOD, maxOD, ignore);
try {
ColorDeconvolutionStains stainsNew = EstimateStainVectors.estimateStains(img, stainsWrapper.getStains(), minOD, maxOD, ignore, checkColors);
stainsWrapper.setStains(stainsNew);
} catch (Exception e2) {
Dialogs.showErrorMessage("Estimate stain vectors", e2);
}
});
ParameterPanelFX panelParams = new ParameterPanelFX(params);
// panelParams.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
BorderPane panelAuto = new BorderPane();
// panelAuto.setBorder(BorderFactory.createTitledBorder("Auto detect"));
panelAuto.setCenter(panelParams.getPane());
panelAuto.setBottom(btnAuto);
// JScrollPane scrollPane = new JScrollPane(table);
// JPanel panelTable = new JPanel(new BorderLayout());
// panelTable.add(scrollPane, BorderLayout.CENTER);
// // JTextArea textInstructions = new JTextArea();
// // textInstructions.setWrapStyleWord(true);
// // textInstructions.setLineWrap(true);
// // textInstructions.setText(
// // "Viewer for manually and automatically adjusting stain vectors used for stain separation.\n\n" +
// // "Each stain vector is 3 values describing the red, green and blue components that define the colour of each " +
// // "stain (e.g. hematoxylin, DAB, eosin). The scatterplots show how these relate to pixel colours for each " +
// // "combination of red, green and blue.\n\n" +
// // "'Good' stain vectors should point along the edges of the scattered points, ignoring any artefacts resulting from " +
// // "pixels that don't belong to normal staining patterns."
// // );
// // panelTable.add(new JScrollPane(textInstructions), BorderLayout.SOUTH);
// panelTable.setBorder(BorderFactory.createTitledBorder("Stain vectors"));
panelSouth.setCenter(new TitledPane("Stain vectors", table));
panelSouth.setBottom(new TitledPane("Auto detect", panelAuto));
BorderPane panelMain = new BorderPane();
panelMain.setCenter(panelPlots);
panelMain.setBottom(panelSouth);
if (Dialogs.showConfirmDialog("Visual Stain Editor", panelMain)) {
return stainsWrapper.getStains();
} else {
stainsWrapper.resetStains();
return stainsWrapper.getStains();
}
}
use of qupath.lib.color.ColorDeconvolutionStains in project qupath by qupath.
the class EstimateStainVectorsCommand method promptToEstimateStainVectors.
public static void promptToEstimateStainVectors(ImageData<BufferedImage> imageData) {
if (imageData == null) {
Dialogs.showNoImageError(TITLE);
return;
}
if (imageData == null || !imageData.isBrightfield() || imageData.getServer() == null || !imageData.getServer().isRGB()) {
Dialogs.showErrorMessage(TITLE, "No brightfield, RGB image selected!");
return;
}
ColorDeconvolutionStains stains = imageData.getColorDeconvolutionStains();
if (stains == null || !stains.getStain(3).isResidual()) {
Dialogs.showErrorMessage(TITLE, "Sorry, stain editing is only possible for brightfield, RGB images with 2 stains");
return;
}
PathObject pathObject = imageData.getHierarchy().getSelectionModel().getSelectedObject();
ROI roi = pathObject == null ? null : pathObject.getROI();
if (roi == null)
roi = ROIs.createRectangleROI(0, 0, imageData.getServer().getWidth(), imageData.getServer().getHeight(), ImagePlane.getDefaultPlane());
double downsample = Math.max(1, Math.sqrt((roi.getBoundsWidth() * roi.getBoundsHeight()) / MAX_PIXELS));
RegionRequest request = RegionRequest.createInstance(imageData.getServerPath(), downsample, roi);
BufferedImage img = null;
try {
img = imageData.getServer().readBufferedImage(request);
} catch (IOException e) {
Dialogs.showErrorMessage("Estimate stain vectors", e);
logger.error("Unable to obtain pixels for " + request.toString(), e);
}
// Apply small amount of smoothing to reduce compression artefacts
img = EstimateStainVectors.smoothImage(img);
// Check modes for background
int[] rgb = img.getRGB(0, 0, img.getWidth(), img.getHeight(), null, 0, img.getWidth());
int[] rgbMode = EstimateStainVectors.getModeRGB(rgb);
int rMax = rgbMode[0];
int gMax = rgbMode[1];
int bMax = rgbMode[2];
// Check if the background values may need to be changed
if (rMax != stains.getMaxRed() || gMax != stains.getMaxGreen() || bMax != stains.getMaxBlue()) {
DialogButton response = Dialogs.showYesNoCancelDialog(TITLE, String.format("Modal RGB values %d, %d, %d do not match current background values - do you want to use the modal values?", rMax, gMax, bMax));
if (response == DialogButton.CANCEL)
return;
else if (response == DialogButton.YES) {
stains = stains.changeMaxValues(rMax, gMax, bMax);
imageData.setColorDeconvolutionStains(stains);
}
}
ColorDeconvolutionStains stainsUpdated = null;
logger.info("Requesting region for stain vector editing: ", request);
try {
stainsUpdated = showStainEditor(img, stains);
} catch (Exception e) {
Dialogs.showErrorMessage(TITLE, "Error with stain estimation: " + e.getLocalizedMessage());
logger.error("{}", e.getLocalizedMessage(), e);
// JOptionPane.showMessageDialog(qupath.getFrame(), "Error with stain estimation: " + e.getLocalizedMessage(), "Estimate stain vectors", JOptionPane.ERROR_MESSAGE, null);
return;
}
if (!stains.equals(stainsUpdated)) {
String suggestedName;
String collectiveNameBefore = stainsUpdated.getName();
if (collectiveNameBefore.endsWith("default"))
suggestedName = collectiveNameBefore.substring(0, collectiveNameBefore.lastIndexOf("default")) + "estimated";
else
suggestedName = collectiveNameBefore;
String newName = Dialogs.showInputDialog(TITLE, "Set name for stain vectors", suggestedName);
if (newName == null)
return;
if (!newName.isBlank())
stainsUpdated = stainsUpdated.changeName(newName);
imageData.setColorDeconvolutionStains(stainsUpdated);
}
}
use of qupath.lib.color.ColorDeconvolutionStains in project qupath by qupath.
the class PathIO method readImageDataSerialized.
@SuppressWarnings("unchecked")
private static <T> ImageData<T> readImageDataSerialized(final InputStream stream, ImageData<T> imageData, ImageServer<T> server, Class<T> cls) throws IOException {
long startTime = System.currentTimeMillis();
Locale locale = Locale.getDefault(Category.FORMAT);
boolean localeChanged = false;
try (ObjectInputStream inStream = new ObjectInputStream(new BufferedInputStream(stream))) {
ServerBuilder<T> serverBuilder = null;
PathObjectHierarchy hierarchy = null;
ImageData.ImageType imageType = null;
ColorDeconvolutionStains stains = null;
Workflow workflow = null;
Map<String, Object> propertyMap = null;
String firstLine = inStream.readUTF();
// int versionNumber = -1;
if (!firstLine.startsWith("Data file version")) {
logger.error("Input stream does not contain valid QuPath data!");
}
// else {
// // Could try to parse version number... although frankly, at this time, we don't really care...
// try {
// versionNumber = NumberFormat.getInstance(Locale.US).parse(firstLine.substring("Data file version".length()).trim()).intValue();
// } catch (Exception e) {
// logger.warn("Unable to parse version number from {}", firstLine);
// }
// }
String serverString = (String) inStream.readObject();
// Don't log warnings if we are provided with a server
serverBuilder = extractServerBuilder(serverString, server == null);
while (true) {
// logger.debug("Starting read: " + inStream.available());
try {
// Try to read a relevant object from the stream
Object input = inStream.readObject();
logger.debug("Read: {}", input);
// If we have a Locale, then set it
if (input instanceof Locale) {
if (input != locale) {
Locale.setDefault(Category.FORMAT, (Locale) input);
localeChanged = true;
}
} else if (input instanceof PathObjectHierarchy)
hierarchy = (PathObjectHierarchy) input;
else if (input instanceof ImageData.ImageType)
imageType = (ImageData.ImageType) input;
else if (input instanceof String && "EOF".equals(input)) {
// else if ("EOF".equals(input)) {
break;
// }
} else if (input instanceof ColorDeconvolutionStains)
stains = (ColorDeconvolutionStains) input;
else if (input instanceof Workflow)
workflow = (Workflow) input;
else if (input instanceof Map)
propertyMap = (Map<String, Object>) input;
else if (input == null) {
logger.debug("Null object will be skipped");
} else
logger.warn("Unsupported object of class {} will be skipped: {}", input.getClass().getName(), input);
} catch (ClassNotFoundException e) {
logger.error("Unable to find class: " + e.getLocalizedMessage(), e);
} catch (EOFException e) {
// Try to recover from EOFExceptions - we may already have enough info
logger.error("Reached end of file...");
if (hierarchy == null)
logger.error(e.getLocalizedMessage(), e);
break;
}
}
// Create an entirely new ImageData if necessary
var existingBuilder = imageData == null || imageData.getServer() == null ? null : imageData.getServer().getBuilder();
if (imageData == null || !Objects.equals(serverBuilder, existingBuilder)) {
// Create a new server if we need to
if (server == null) {
try {
server = serverBuilder.build();
} catch (Exception e) {
logger.error(e.getLocalizedMessage());
}
;
if (server == null) {
logger.error("Warning: Unable to build server with " + serverBuilder);
// throw new RuntimeException("Warning: Unable to create server for path " + serverPath);
}
}
// TODO: Make this less clumsy... but for now we need to ensure we have a fully-initialized hierarchy (which deserialization alone doesn't achieve)
PathObjectHierarchy hierarchy2 = new PathObjectHierarchy();
hierarchy2.setHierarchy(hierarchy);
hierarchy = hierarchy2;
imageData = new ImageData<>(server, hierarchy, imageType);
} else {
if (imageType != null)
imageData.setImageType(imageType);
// Set the new hierarchy
if (hierarchy != null)
imageData.getHierarchy().setHierarchy(hierarchy);
}
// Set the other properties we have just read
if (workflow != null) {
imageData.getHistoryWorkflow().clear();
imageData.getHistoryWorkflow().addSteps(workflow.getSteps());
}
if (stains != null) {
imageData.setColorDeconvolutionStains(stains);
}
if (propertyMap != null) {
for (Entry<String, Object> entry : propertyMap.entrySet()) imageData.setProperty(entry.getKey(), entry.getValue());
}
long endTime = System.currentTimeMillis();
// if (hierarchy == null) {
// logger.error(String.format("%s does not contain a valid QUPath object hierarchy!", file.getAbsolutePath()));
// return null;
// }
logger.debug(String.format("Hierarchy with %d object(s) read in %.2f seconds", hierarchy.nObjects(), (endTime - startTime) / 1000.));
} catch (ClassNotFoundException e1) {
logger.warn("Class not found reading image data", e1);
} finally {
if (localeChanged)
Locale.setDefault(Category.FORMAT, locale);
}
return imageData;
}
use of qupath.lib.color.ColorDeconvolutionStains in project qupath by qupath.
the class GuiTools method estimateImageType.
/**
* Make a semi-educated guess at the image type of a PathImageServer.
*
* @param server
* @param imgThumbnail Thumbnail for the image. This is now a required parameter (previously <= 0.1.2 it was optional).
*
* @return
*/
public static ImageData.ImageType estimateImageType(final ImageServer<BufferedImage> server, final BufferedImage imgThumbnail) {
if (!server.isRGB())
return ImageData.ImageType.FLUORESCENCE;
BufferedImage img = imgThumbnail;
// BufferedImage img;
// if (imgThumbnail == null)
// img = server.getBufferedThumbnail(220, 220, 0);
// else {
// img = imgThumbnail;
// // Rescale if necessary
// if (img.getWidth() * img.getHeight() > 400*400) {
// imgThumbnail.getS
// }
// }
int w = img.getWidth();
int h = img.getHeight();
int[] rgb = img.getRGB(0, 0, w, h, null, 0, w);
long rSum = 0;
long gSum = 0;
long bSum = 0;
int nDark = 0;
int nLight = 0;
int n = 0;
int darkThreshold = 25;
int lightThreshold = 220;
for (int v : rgb) {
int r = ColorTools.red(v);
int g = ColorTools.green(v);
int b = ColorTools.blue(v);
if (r < darkThreshold & g < darkThreshold && b < darkThreshold)
nDark++;
else if (r > lightThreshold & g > lightThreshold && b > lightThreshold)
nLight++;
else {
n++;
rSum += r;
gSum += g;
bSum += b;
}
}
if (nDark == 0 && nLight == 0)
return ImageData.ImageType.OTHER;
// If we have more dark than light pixels, assume fluorescence
if (nDark >= nLight)
return ImageData.ImageType.FLUORESCENCE;
if (n == 0) {
logger.warn("Unable to estimate brightfield stains (no stained pixels found)");
return ImageData.ImageType.BRIGHTFIELD_OTHER;
}
// Color color = new Color(
// (int)(rSum/n + .5),
// (int)(gSum/n + .5),
// (int)(bSum/n + .5));
// logger.debug("Color: " + color.toString());
// Compare optical density vector angles with the defaults for hematoxylin, eosin & DAB
ColorDeconvolutionStains stainsH_E = ColorDeconvolutionStains.makeDefaultColorDeconvolutionStains(DefaultColorDeconvolutionStains.H_E);
double rOD = ColorDeconvolutionHelper.makeOD(rSum / n, stainsH_E.getMaxRed());
double gOD = ColorDeconvolutionHelper.makeOD(gSum / n, stainsH_E.getMaxGreen());
double bOD = ColorDeconvolutionHelper.makeOD(bSum / n, stainsH_E.getMaxBlue());
StainVector stainMean = StainVector.createStainVector("Mean Stain", rOD, gOD, bOD);
double angleH = StainVector.computeAngle(stainMean, stainsH_E.getStain(1));
double angleE = StainVector.computeAngle(stainMean, stainsH_E.getStain(2));
ColorDeconvolutionStains stainsH_DAB = ColorDeconvolutionStains.makeDefaultColorDeconvolutionStains(DefaultColorDeconvolutionStains.H_DAB);
double angleDAB = StainVector.computeAngle(stainMean, stainsH_DAB.getStain(2));
// For H&E staining, eosin is expected to predominate... if it doesn't, assume H-DAB
logger.debug("Angle hematoxylin: " + angleH);
logger.debug("Angle eosin: " + angleE);
logger.debug("Angle DAB: " + angleDAB);
if (angleDAB < angleE || angleH < angleE) {
logger.info("Estimating H-DAB staining");
return ImageData.ImageType.BRIGHTFIELD_H_DAB;
} else {
logger.info("Estimating H & E staining");
return ImageData.ImageType.BRIGHTFIELD_H_E;
}
}
use of qupath.lib.color.ColorDeconvolutionStains in project qupath by qupath.
the class TestGeneralTools method test_parseArgStringValues.
@Test
public void test_parseArgStringValues() {
// Generate some Strings to parse
ColorDeconvolutionStains stains = ColorDeconvolutionStains.makeDefaultColorDeconvolutionStains(DefaultColorDeconvolutionStains.H_E);
String argsStains = ColorDeconvolutionStains.getColorDeconvolutionStainsAsString(stains, 3);
String argsStains2 = "{\"Name\" : \"H-DAB default\", \"Stain 1\" : \"Hematoxylin\", \"Values 1\" : \"0.65111 0.70119 0.29049 \", \"Stain 2\" : \"DAB\", \"Values 2\" : \"0.26917 0.56824 0.77759 \", \"Background\" : \" 255 255 255 \"}";
String argsDetection = "{\"detectionImageBrightfield\": \"Hematoxylin OD\", \"requestedPixelSizeMicrons\": 0.5, \"backgroundRadiusMicrons\": 8.0, \"medianRadiusMicrons\": 0.0, \"sigmaMicrons\": 1.5, \"minAreaMicrons\": 10.0, \"maxAreaMicrons\": 400.0, \"threshold\": 0.1, \"maxBackground\": 2.0, \"watershedPostProcess\": true, \"excludeDAB\": false, \"cellExpansionMicrons\": 5.0, \"includeNuclei\": true, \"smoothBoundaries\": true, \"makeMeasurements\": true}";
// Compare the parsed maps
Map<String, String> mapLegacy = parseArgStringValuesLegacy(argsStains);
Map<String, String> mapCurrent = GeneralTools.parseArgStringValues(argsStains);
assertEquals(mapLegacy, mapCurrent);
assertFalse(mapCurrent.isEmpty());
mapLegacy = parseArgStringValuesLegacy(argsStains2);
mapCurrent = GeneralTools.parseArgStringValues(argsStains2);
assertEquals(mapLegacy, mapCurrent);
assertFalse(mapCurrent.isEmpty());
mapLegacy = parseArgStringValuesLegacy(argsDetection);
mapCurrent = GeneralTools.parseArgStringValues(argsDetection);
assertEquals(mapLegacy, mapCurrent);
assertFalse(mapCurrent.isEmpty());
assertTrue(GeneralTools.parseArgStringValues(null).isEmpty());
// Check that we can handle newlines (before <= 0.1.2 we couldn't...)
String argsNewlines = argsDetection.replace(",", "," + System.lineSeparator());
Map<String, String> mapCurrentNewlines = GeneralTools.parseArgStringValues(argsNewlines);
assertEquals(mapCurrentNewlines, mapCurrent);
}
Aggregations