use of qupath.lib.analysis.stats.RunningStatistics in project qupath by qupath.
the class HaralickFeatures method computeFeatures.
private void computeFeatures() {
if (matrix == null)
return;
// IJ.log(matrix.toString());
double[] px, py, px_and_y, px_y;
matrix.finalizeMatrix();
int n = matrix.getN();
// Normalize to sum while computing required vectors
px = new double[n];
py = new double[n];
px_and_y = new double[2 * n + 1];
px_y = new double[n];
double mx = 0;
double my = 0;
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
double val = matrix.get(i, j);
px[i] += val;
py[j] += val;
px_and_y[i + j] += val;
px_y[Math.abs(i - j)] += val;
mx += (i + 1) * val;
my += (j + 1) * val;
}
}
// Standard deviations for marginal-probability matrices
double sx = 0;
double sy = 0;
for (int i = 1; i <= n; i++) {
for (int j = 1; j <= n; j++) {
double val = matrix.get(i - 1, j - 1);
sx += (i - mx) * (i - mx) * val;
sy += (j - my) * (j - my) * val;
}
}
sx = Math.sqrt(sx);
sy = Math.sqrt(sy);
// Compute textural features
// Angular second moment (f1)
// Correlation (f3)
// Inverse difference moment (f5)
// Entropy (f9)
double f1 = 0;
double f3 = 0;
double f5 = 0;
double f9 = 0;
// Hxy1 & Hxy2 for (more) entropies
double Hxy1 = 0;
double Hxy2 = 0;
for (int i = 1; i <= n; i++) {
for (int j = 1; j <= n; j++) {
double val = matrix.get(i - 1, j - 1);
double logVal = matrix.getLog(i - 1, j - 1) / LOG2;
f1 += val * val;
f3 += i * j * val;
f5 += val / (1 + (i - j) * (i - j));
f9 -= val * logVal;
double temp = px[i - 1] * py[j - 1];
if (temp != 0) {
double logTemp = Math.log(temp) / LOG2;
Hxy1 -= val * logTemp;
Hxy2 -= temp * logTemp;
}
}
}
double Hxy = f9;
f3 -= mx * my;
f3 /= sx * sy;
// Sum of squares (f4)
double f4 = sx * sx;
// Contrast (f2)
double f2 = 0;
for (int nn = 0; nn < n; nn++) {
f2 += nn * nn * px_y[nn];
}
// Sum average (f6)
// Sum entropy (f8)
double f6 = 0;
double f8 = 0;
for (int i = 2; i <= 2 * n; i++) {
double val = px_and_y[i];
if (val != 0) {
// J double logVal = Math.log(val) / LOG2;
f6 += i * val;
// J
f8 -= val * (Math.log(val) / LOG2);
}
}
// Sum variance (f7)
double f7 = 0;
for (int i = 2; i <= 2 * n; i++) {
// J double val = px_and_y[i];
// f6 rather than f8 in Haralick's original paper... see
// https://github.com/CellProfiler/CellProfiler/blob/master/cellprofiler/cpmath/haralick.py and
// http://xy-27.pythonxy.googlecode.com/hg-history/fec21bbbbd9f0a71cca43858991f4c468c6ce211/src/python/mahotas/PLATLIB/mahotas/features/texture.py
// J
f7 += (i - f6) * (i - f6) * px_and_y[i];
// f7 += (i - f8)*(i - f8)*val;
}
// Difference entropy (f11)
double f11 = 0;
for (int i = 0; i < n; i++) {
double val = px_y[i];
if (val != 0)
// J
f11 -= val * (Math.log(val) / LOG2);
}
// Difference variance (f10)
RunningStatistics px_yStats = getStatistics(px_y);
double f10 = px_yStats.getVariance();
// Hx & Hy for entropies
double Hx = 0;
double Hy = 0;
for (int i = 0; i < n; i++) {
double val = px[i];
if (val != 0)
Hx -= val * Math.log(val) / LOG2;
val = py[i];
if (val != 0)
Hy -= val * Math.log(val) / LOG2;
}
// IJ.log(String.format("%.3f, %.3f, %.3f, %.3f, %.3f, ", Hx, Hy, Hxy, Hxy1, Hxy2));
// Information measures of correlation
double f12 = (Hxy - Hxy1) / Math.max(Hx, Hy);
double f13 = Math.sqrt(1 - Math.exp(-2 * (Hxy2 - Hxy)));
// Agrees with Mahotas
f[0] = f1;
// Agrees
f[1] = f2;
f[2] = f3;
f[3] = f4;
// Agrees
f[4] = f5;
// Agrees (almost?)
f[5] = f6;
// Agrees
f[6] = f7;
// Agrees
f[7] = f8;
// Agrees
f[8] = f9;
// Agrees except for variance definition (I think)
f[9] = f10;
// Agrees
f[10] = f11;
// Agrees (apart from eps)
f[11] = f12;
// Agrees (apart from eps)
f[12] = f13;
}
use of qupath.lib.analysis.stats.RunningStatistics in project qupath by qupath.
the class HaralickFeatures method computeFeaturesJ.
// J alternative function with different mean and SD calculations - to compare with above for performance
private void computeFeaturesJ() {
if (matrix == null)
return;
// IJ.log(matrix.toString());
double[] px, py, px_and_y, px_y;
matrix.finalizeMatrix();
int n = matrix.getN();
// Normalize to sum while computing required vectors
px = new double[n];
py = new double[n];
px_and_y = new double[2 * n + 1];
px_y = new double[n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
double val = matrix.get(i, j);
px[i] += val;
py[j] += val;
px_and_y[i + j] += val;
px_y[Math.abs(i - j)] += val;
// J mx += (i + 1) * val;
// J my += (j + 1) * val;
}
}
// Mean values for marginal-probability matrices
// J
double mx = 0;
// J
double my = 0;
for (int i = 0; i < n; i++) {
// J
// J
mx += px[i];
// J
my += py[i];
}
// J
// J there is an implicit cast here - keep an eye on this
mx /= n;
// J
my /= n;
// Standard deviations for marginal-probability matrices
double sx = 0;
double sy = 0;
// J sy = Math.sqrt(sy);
for (int i = 0; i < n; i++) {
// J
// J
sx += (px[i] - mx) * (px[i] - mx);
// J
sy += (py[i] - my) * (py[i] - my);
}
// J
// J
sx = Math.sqrt(sx / (n - 1));
// J
sy = Math.sqrt(sy / (n - 1));
// Compute textural features
// Angular second moment (f1)
// Correlation (f3)
// Inverse difference moment (f5)
// Entropy (f9)
double f1 = 0;
double f3 = 0;
double f5 = 0;
double f9 = 0;
// Hxy1 & Hxy2 for (more) entropies
double Hxy1 = 0;
double Hxy2 = 0;
for (int i = 1; i <= n; i++) {
for (int j = 1; j <= n; j++) {
double val = matrix.get(i - 1, j - 1);
double logVal = matrix.getLog(i - 1, j - 1) / LOG2;
f1 += val * val;
f3 += i * j * val;
f5 += val / (1 + (i - j) * (i - j));
f9 -= val * logVal;
double temp = px[i - 1] * py[j - 1];
if (temp != 0) {
double logTemp = Math.log(temp) / LOG2;
Hxy1 -= val * logTemp;
Hxy2 -= temp * logTemp;
}
}
}
double Hxy = f9;
f3 -= mx * my;
f3 /= sx * sy;
// Sum of squares (f4)
double f4 = sx * sx;
// Contrast (f2)
double f2 = 0;
for (int nn = 0; nn < n; nn++) {
f2 += nn * nn * px_y[nn];
}
// Sum average (f6)
// Sum entropy (f8)
double f6 = 0;
double f8 = 0;
for (int i = 2; i <= 2 * n; i++) {
double val = px_and_y[i];
if (val != 0) {
// J double logVal = Math.log(val) / LOG2;
f6 += i * val;
// J
f8 -= val * (Math.log(val) / LOG2);
}
}
// Sum variance (f7)
double f7 = 0;
for (int i = 2; i <= 2 * n; i++) {
// J double val = px_and_y[i];
// f6 rather than f8 in Haralick's original paper... see
// https://github.com/CellProfiler/CellProfiler/blob/master/cellprofiler/cpmath/haralick.py and
// http://xy-27.pythonxy.googlecode.com/hg-history/fec21bbbbd9f0a71cca43858991f4c468c6ce211/src/python/mahotas/PLATLIB/mahotas/features/texture.py
// J
f7 += (i - f6) * (i - f6) * px_and_y[i];
// f7 += (i - f8)*(i - f8)*val;
}
// Difference entropy (f11)
double f11 = 0;
for (int i = 0; i < n; i++) {
double val = px_y[i];
if (val != 0)
// J
f11 -= val * (Math.log(val) / LOG2);
}
// Difference variance (f10)
RunningStatistics px_yStats = getStatistics(px_y);
double f10 = px_yStats.getVariance();
// Hx & Hy for entropies
double Hx = 0;
double Hy = 0;
for (int i = 0; i < n; i++) {
double val = px[i];
if (val != 0)
Hx -= val * Math.log(val) / LOG2;
val = py[i];
if (val != 0)
Hy -= val * Math.log(val) / LOG2;
}
// IJ.log(String.format("%.3f, %.3f, %.3f, %.3f, %.3f, ", Hx, Hy, Hxy, Hxy1, Hxy2));
// Information measures of correlation
double f12 = (Hxy - Hxy1) / Math.max(Hx, Hy);
double f13 = Math.sqrt(1 - Math.exp(-2 * (Hxy2 - Hxy)));
// Agrees with Mahotas
f[0] = f1;
// Agrees
f[1] = f2;
f[2] = f3;
f[3] = f4;
// Agrees
f[4] = f5;
// Agrees (almost?)
f[5] = f6;
// Agrees
f[6] = f7;
// Agrees
f[7] = f8;
// Agrees
f[8] = f9;
// Agrees except for variance definition (I think)
f[9] = f10;
// Agrees
f[10] = f11;
// Agrees (apart from eps)
f[11] = f12;
// Agrees (apart from eps)
f[12] = f13;
}
use of qupath.lib.analysis.stats.RunningStatistics in project qupath by qupath.
the class TMACommands method createAugmentedTMAGrid.
/**
* Add a new row or column to a TMA grid.
*
* @param hierarchy
* @param selectedCore
* @param type
* @return
*/
private static TMAGrid createAugmentedTMAGrid(final PathObjectHierarchy hierarchy, final TMACoreObject selectedCore, final TMAAddType type) {
TMAGrid grid = hierarchy.getTMAGrid();
// Convert to easier form
boolean addAfter = type == TMAAddType.COLUMN_AFTER || type == TMAAddType.ROW_AFTER;
boolean addColumn = type == TMAAddType.COLUMN_AFTER || type == TMAAddType.COLUMN_BEFORE;
// Try to identify the row/column that we want
int row = -1;
int col = -1;
if (selectedCore != null) {
for (int y = 0; y < grid.getGridHeight(); y++) {
for (int x = 0; x < grid.getGridWidth(); x++) {
if (grid.getTMACore(y, x) == selectedCore) {
if (addAfter) {
row = y + 1;
col = x + 1;
} else {
row = y;
col = x;
}
break;
}
}
}
}
// If we don't have a row or column, choose based on the add type
if (row < 0) {
switch(type) {
case COLUMN_AFTER:
col = grid.getGridWidth();
break;
case COLUMN_BEFORE:
col = 0;
break;
case ROW_AFTER:
row = grid.getGridHeight();
break;
case ROW_BEFORE:
row = 0;
break;
default:
break;
}
}
// Compute the width of the new grid
int newWidth = addColumn ? grid.getGridWidth() + 1 : grid.getGridWidth();
int newHeight = addColumn ? grid.getGridHeight() : grid.getGridHeight() + 1;
// Loop through cores, getting mean widths, heights & displacements
RunningStatistics statsWidth = new RunningStatistics();
RunningStatistics statsHeight = new RunningStatistics();
RunningStatistics statsDx = new RunningStatistics();
RunningStatistics statsDy = new RunningStatistics();
for (int r = 0; r < grid.getGridHeight(); r++) {
TMACoreObject coreColBefore = null;
for (int c = 0; c < grid.getGridWidth(); c++) {
TMACoreObject core = grid.getTMACore(r, c);
if (!core.hasROI())
continue;
statsWidth.addValue(core.getROI().getBoundsWidth());
statsHeight.addValue(core.getROI().getBoundsHeight());
if (coreColBefore != null && coreColBefore.hasROI()) {
statsDx.addValue(core.getROI().getCentroidX() - coreColBefore.getROI().getCentroidX());
}
if (r > 0) {
TMACoreObject coreRowBefore = grid.getTMACore(r - 1, c);
if (coreRowBefore != null && coreRowBefore.hasROI()) {
statsDy.addValue(core.getROI().getCentroidY() - coreRowBefore.getROI().getCentroidY());
}
}
coreColBefore = core;
}
}
double meanWidth = statsWidth.getMean();
double meanHeight = statsHeight.getMean();
double meanDx = statsDx.getMean();
double meanDy = statsDy.getMean();
double diameter = (meanWidth + meanHeight) / 2;
// Create a new list of cores, adding where necessary
List<TMACoreObject> coresNew = new ArrayList<>();
for (int r = 0; r < newHeight; r++) {
for (int c = 0; c < newWidth; c++) {
// Copy existing rows & columns, or create new cores if required
if (addColumn) {
if (c < col) {
coresNew.add(grid.getTMACore(r, c));
} else if (c > col) {
coresNew.add(grid.getTMACore(r, c - 1));
} else if (c == col) {
// Try to get average x & y coordinates between surrounding columns
double x1, x2, y;
if (col == 0) {
x2 = grid.getTMACore(r, c).getROI().getCentroidX();
x1 = x2 - meanDx * 2;
y = grid.getTMACore(r, c).getROI().getCentroidY();
} else if (col == grid.getGridWidth()) {
x1 = grid.getTMACore(r, c - 1).getROI().getCentroidX();
x2 = x1 + meanDx * 2;
y = grid.getTMACore(r, c - 1).getROI().getCentroidY();
} else {
x1 = grid.getTMACore(r, c - 1).getROI().getCentroidX();
x2 = grid.getTMACore(r, c).getROI().getCentroidX();
y = (grid.getTMACore(r, c - 1).getROI().getCentroidY() + grid.getTMACore(r, c).getROI().getCentroidY()) / 2;
}
TMACoreObject coreNew = PathObjects.createTMACoreObject((x1 + x2) / 2, y, diameter, true);
coresNew.add(coreNew);
}
} else {
if (r < row) {
coresNew.add(grid.getTMACore(r, c));
} else if (r > row) {
coresNew.add(grid.getTMACore(r - 1, c));
} else if (r == row) {
// Try to get average x & y coordinates between surrounding columns
double x, y1, y2;
if (row == 0) {
y2 = grid.getTMACore(r, c).getROI().getCentroidY();
y1 = y2 - meanDy * 2;
x = grid.getTMACore(r, c).getROI().getCentroidX();
} else if (row == grid.getGridHeight()) {
y1 = grid.getTMACore(r - 1, c).getROI().getCentroidY();
y2 = y1 + meanDy * 2;
x = grid.getTMACore(r - 1, c).getROI().getCentroidX();
} else {
y1 = grid.getTMACore(r - 1, c).getROI().getCentroidY();
y2 = grid.getTMACore(r, c).getROI().getCentroidY();
x = (grid.getTMACore(r - 1, c).getROI().getCentroidX() + grid.getTMACore(r, c).getROI().getCentroidX()) / 2;
}
TMACoreObject coreNew = PathObjects.createTMACoreObject(x, (y1 + y2) / 2, diameter, true);
coresNew.add(coreNew);
}
}
}
}
// Update with a new TMAGrid
return DefaultTMAGrid.create(coresNew, newWidth);
}
use of qupath.lib.analysis.stats.RunningStatistics in project qupath by qupath.
the class TMAExplorer method createAndShowStage.
private void createAndShowStage() {
Project<BufferedImage> project = qupath.getProject();
entries.clear();
if (project != null) {
// Create an output directory for the images
File dirBaseImageOutput = Projects.getBaseDirectory(project);
dirBaseImageOutput = new File(dirBaseImageOutput, "TMA");
dirBaseImageOutput = new File(dirBaseImageOutput, "images");
if (!dirBaseImageOutput.exists())
dirBaseImageOutput.mkdirs();
Map<String, RunningStatistics> statsMap = new HashMap<>();
for (ProjectImageEntry<BufferedImage> imageEntry : project.getImageList()) {
// Look for data file
if (!imageEntry.hasImageData())
continue;
File dirImageOutput = new File(dirBaseImageOutput, imageEntry.getImageName());
if (!dirImageOutput.exists())
dirImageOutput.mkdirs();
// Read data
ImageData<BufferedImage> imageData;
try {
imageData = imageEntry.readImageData();
} catch (IOException e) {
logger.error("Error reading ImageData for " + imageEntry.getImageName(), e);
continue;
}
TMAGrid tmaGrid = imageData.getHierarchy().getTMAGrid();
if (tmaGrid == null) {
logger.warn("No TMA data for {}", imageEntry.getImageName());
continue;
}
// Figure out downsample value
ImageServer<BufferedImage> server = imageData.getServer();
double downsample = Math.round(5 / server.getPixelCalibration().getAveragedPixelSizeMicrons());
// Read the TMA entries
int counter = 0;
for (TMACoreObject core : tmaGrid.getTMACoreList()) {
counter++;
String name = core.getName();
if (name == null)
name = Integer.toString(counter);
File fileOutput = new File(dirImageOutput, name + ".jpg");
if (!fileOutput.exists()) {
try {
RegionRequest request = RegionRequest.createInstance(server.getPath(), downsample, core.getROI());
BufferedImage img = server.readBufferedImage(request);
ImageIO.write(img, "jpg", fileOutput);
} catch (Exception e) {
logger.error("Unable to write {}", fileOutput.getAbsolutePath());
}
}
var entry = TMAEntries.createDefaultTMAEntry(imageEntry.getImageName(), fileOutput.getAbsolutePath(), null, core.getName(), core.isMissing());
MeasurementList ml = core.getMeasurementList();
for (int i = 0; i < ml.size(); i++) {
String measurement = ml.getMeasurementName(i);
double val = ml.getMeasurementValue(i);
entry.putMeasurement(measurement, val);
if (!Double.isNaN(val)) {
RunningStatistics stats = statsMap.get(measurement);
if (stats == null) {
stats = new RunningStatistics();
statsMap.put(measurement, stats);
}
stats.addValue(val);
}
}
entries.add(entry);
}
try {
server.close();
} catch (Exception e) {
logger.warn("Problem closing server", e);
}
}
// Loop through all entries and perform outlier count
double k = 3;
for (TMAEntry entry : entries) {
int outlierCount = 0;
for (Entry<String, RunningStatistics> statsEntry : statsMap.entrySet()) {
RunningStatistics stats = statsEntry.getValue();
double val = entry.getMeasurementAsDouble(statsEntry.getKey());
if (!(val >= stats.getMean() - stats.getStdDev() * k && val <= stats.getMean() + stats.getStdDev() * k))
outlierCount++;
}
entry.putMeasurement("Outlier count", outlierCount);
}
}
Stage stage = new Stage();
stage.initOwner(qupath.getStage());
TMASummaryViewer summaryViewer = new TMASummaryViewer(stage);
summaryViewer.setTMAEntries(entries);
summaryViewer.getStage().show();
}
Aggregations