use of org.apache.commons.math3.stat.descriptive.DescriptiveStatistics in project nabl by metaborg.
the class StatixGenerate method run.
public void run(String file) throws MetaborgException, InterruptedException {
final FileObject resource = STX.S.resolve(file);
TermFormatter tf = ITerm::toString;
try {
final ILanguageImpl lang = STX.cli.loadLanguage(STX.project.location());
final IContext context = STX.S.contextService.get(resource, STX.project, lang);
tf = StatixGenerator.pretty(STX.S, context, "pp-generated");
} catch (MetaborgException e) {
// ignore
}
final TermFormatter _tf = tf;
final Function1<SearchState, String> pretty = (s) -> _tf.format(project(VAR, s));
final DescriptiveStatistics hitStats = new DescriptiveStatistics();
final DescriptiveStatistics missStats = new DescriptiveStatistics();
final StreamProgressPrinter progress = new StreamProgressPrinter(System.err, 80, out -> {
long hits = hitStats.getN();
long all = hits + missStats.getN();
out.println(" " + hits + "/" + all + " " + summary(hitStats));
});
final SearchLogger<SearchState, SearchState> searchLog = new SearchLogger<SearchState, SearchState>() {
@Override
public void init(long seed, SearchStrategy<SearchState, SearchState> strategy, Iterable<IConstraint> constraints) {
log.info("seed {}", seed);
log.info("strategy {}", strategy);
log.info("constraints {}", constraints);
}
@Override
public void success(SearchNode<SearchState> n) {
progress.step('+');
addSize(n.output(), hitStats);
logSuccess(log, Level.Debug, n, pretty);
}
@Override
public void failure(SearchNodes<?> nodes) {
progress.step('.');
SearchNode<?> parentNode = nodes.parent();
if (parentNode != null && parentNode.output() instanceof SearchState) {
addSize((SearchState) parentNode.output(), missStats);
}
logFailure(log, Level.Debug, nodes, pretty);
}
private void addSize(SearchState s, DescriptiveStatistics stats) {
s.state().unifier().size(project(VAR, s)).ifFinite(size -> {
stats.addValue(size.doubleValue());
});
}
};
final StatixGenerator statixGen = new StatixGenerator(STX.S, STX.context, resource);
// Paret.addFragments(statixGen.spec());
final Spec spec = statixGen.spec();
final RandomTermGenerator rtg = new RandomTermGenerator(spec, statixGen.constraint(), new Paret(spec).search(), searchLog);
final Stream<SearchState> resultStream = rtg.apply().nodes().map(sn -> {
searchLog.success(sn);
return sn.output();
});
log.info("Generating random terms.");
final List<SearchState> results = Lists.newArrayList(resultStream.limit(COUNT).iterator());
progress.done();
results.forEach(s -> {
System.out.println(pretty.apply(s));
});
log.info("Generated {} random terms.", results.size());
logStatsInfo("hits", hitStats);
logStatsInfo("misses", missStats);
}
use of org.apache.commons.math3.stat.descriptive.DescriptiveStatistics in project MPW by shineangelic.
the class BlocksActivity method doApacheMath.
private SummaryStatistics doApacheMath(List<Matured> maturi) {
ArrayList<Matured> revElements = new ArrayList<>(maturi);
Collections.reverse(revElements);
long[] intervals = new long[maturi.size()];
Date prevDate = revElements.get(0).getTimestamp();
int i = 0;
// parto da 1 a calcolare il 1o intervallo
for (int t = 1; t < revElements.size(); t++) {
Date curDate = revElements.get(t).getTimestamp();
intervals[i++] = curDate.getTime() - prevDate.getTime();
prevDate = curDate;
}
// a oggi, calcolo ultimo intervallo aperto
intervals[maturi.size() - 1] = (new Date().getTime() - revElements.get(maturi.size() - 1).getTimestamp().getTime());
// Get a DescriptiveStatistics instance
SummaryStatistics stats = new SummaryStatistics();
// Add the data from the array
for (long interval : intervals) {
stats.addValue(interval);
}
return stats;
}
use of org.apache.commons.math3.stat.descriptive.DescriptiveStatistics in project qupath by qupath.
the class TestOpenCVTools method testPercentiles.
@Test
public void testPercentiles() {
int[] minValues = { -2, 0, 1 };
int[] maxValues = { 1, 10, 101 };
opencv_core.setRNGSeed(100);
for (int min : minValues) {
for (int max : maxValues) {
var values = IntStream.range(min, max + 1).asDoubleStream().toArray();
var stats = new DescriptiveStatistics(values);
var mat = new Mat(values);
opencv_core.randShuffle(mat);
assertEquals(stats.getPercentile(50), OpenCVTools.median(mat));
assertEquals((min + max) / 2.0, OpenCVTools.median(mat));
assertEquals(max, OpenCVTools.maximum(mat));
assertEquals(min, OpenCVTools.minimum(mat));
assertArrayEquals(new double[] { min, stats.getPercentile(50), max }, OpenCVTools.percentiles(mat, 1e-9, 50, 100));
double[] newValues = new double[values.length + 30];
Arrays.fill(newValues, Double.NaN);
System.arraycopy(values, 0, newValues, 0, values.length);
mat.close();
mat = new Mat(newValues);
opencv_core.randShuffle(mat);
assertEquals(stats.getPercentile(50), OpenCVTools.median(mat));
assertEquals((min + max) / 2.0, OpenCVTools.median(mat));
assertEquals(max, OpenCVTools.maximum(mat));
assertEquals(min, OpenCVTools.minimum(mat));
assertArrayEquals(new double[] { min, stats.getPercentile(50), max }, OpenCVTools.percentiles(mat, 1e-9, 50, 100));
mat.close();
}
}
}
use of org.apache.commons.math3.stat.descriptive.DescriptiveStatistics in project qupath by qupath.
the class TestImageOps method testNormalize.
@Test
public void testNormalize() {
try (var scope = new PointerScope()) {
opencv_core.setRNGSeed(100);
int type = opencv_core.CV_32FC1;
int rows = 25;
int cols = 30;
var matList = Arrays.asList(new Mat(rows, cols, type, Scalar.all(1)), new Mat(rows, cols, type, Scalar.all(5)), new Mat(rows, cols, type, Scalar.all(2)), new Mat(rows, cols, type, Scalar.all(10)));
var mat = OpenCVTools.mergeChannels(matList, null);
// Add noise
OpenCVTools.addNoise(mat, 10, 5);
double eps = 1e-3;
// Check mean/variance normalization
var matZeroUnitGlobal = ImageOps.Normalize.zeroMeanUnitVariance(false).apply(mat.clone());
var values = OpenCVTools.extractDoubles(matZeroUnitGlobal);
var stats = new DescriptiveStatistics(values);
assertEquals(stats.getMean(), 0.0, eps);
assertEquals(stats.getStandardDeviation(), 1.0, eps);
// When normalizing globally, don't expert zero mean and unit variance per channel
for (var matTemp : OpenCVTools.splitChannels(matZeroUnitGlobal)) {
values = OpenCVTools.extractDoubles(matTemp);
stats = new DescriptiveStatistics(values);
assertNotEquals(stats.getMean(), 0.0, eps);
assertNotEquals(stats.getStandardDeviation(), 1.0, eps);
}
// When normalizing per-channel, expect to find zero mean & unit variance across the image as well
var matZeroUnitChannels = ImageOps.Normalize.zeroMeanUnitVariance(true).apply(mat.clone());
values = OpenCVTools.extractDoubles(matZeroUnitChannels);
stats = new DescriptiveStatistics(values);
assertEquals(stats.getMean(), 0.0, eps);
assertEquals(stats.getStandardDeviation(), 1.0, eps);
for (var matTemp : OpenCVTools.splitChannels(matZeroUnitChannels)) {
values = OpenCVTools.extractDoubles(matTemp);
stats = new DescriptiveStatistics(values);
assertEquals(stats.getMean(), 0.0, eps);
assertEquals(stats.getStandardDeviation(), 1.0, eps);
}
}
}
use of org.apache.commons.math3.stat.descriptive.DescriptiveStatistics in project qupath by qupath.
the class CellIntensityClassificationCommand method run.
@Override
public void run() {
// Dialogs.showErrorNotification(title, "Not implemented yet!");
var imageData = qupath.getImageData();
if (imageData == null) {
Dialogs.showNoImageError(title);
return;
}
var hierarchy = imageData.getHierarchy();
// Try to operate on cells, but accept operating on all detections if necessary
var cells = imageData.getHierarchy().getCellObjects();
boolean allDetections = cells.isEmpty();
if (allDetections)
logger.debug("No cells found - will try using all detections");
var detections = allDetections ? imageData.getHierarchy().getDetectionObjects() : cells;
if (detections.isEmpty()) {
Dialogs.showErrorMessage(title, "No cells found in the current image!");
return;
}
var measurements = PathClassifierTools.getAvailableFeatures(detections);
if (measurements.isEmpty()) {
Dialogs.showErrorMessage(title, "No cell measurements found in the current image!");
return;
}
var currentClassifications = PathClassifierTools.createClassificationMap(detections);
var comboMeasurements = new ComboBox<String>();
comboMeasurements.getItems().setAll(measurements);
PaneTools.setToExpandGridPaneWidth(comboMeasurements);
var selectedMeasurement = comboMeasurements.getSelectionModel().selectedItemProperty();
var cbSingleThreshold = new CheckBox("Single threshold");
cbSingleThreshold.setSelected(true);
var singleThreshold = cbSingleThreshold.selectedProperty();
var sliders = new ArrayList<Slider>();
var textFields = new ArrayList<TextField>();
for (int i = 0; i < 3; i++) {
var slider = new Slider();
var tf = new TextField();
tf.setPrefColumnCount(6);
textFields.add(tf);
GuiTools.bindSliderAndTextField(slider, tf, true);
GuiTools.installRangePrompt(slider);
slider.valueProperty().addListener((v, o, n) -> {
updateClassifications(hierarchy, allDetections, selectedMeasurement.get(), parseValues(sliders, singleThreshold.get()));
});
PaneTools.setToExpandGridPaneWidth(slider);
sliders.add(slider);
}
var map = new HashMap<String, double[]>();
var histogramPanel = new HistogramPanelFX();
var chartWrapper = new ThresholdedChartWrapper(histogramPanel.getChart());
chartWrapper.setIsInteractive(true);
singleThreshold.addListener((v, o, n) -> {
chartWrapper.clearThresholds();
Color color = Color.rgb(0, 0, 0, 0.2);
if (!n) {
for (int i = 0; i < sliders.size(); i++) {
chartWrapper.addThreshold(sliders.get(i).valueProperty(), color);
}
} else
chartWrapper.addThreshold(sliders.get(0).valueProperty(), color);
});
selectedMeasurement.addListener((v, o, n) -> {
if (o != null)
map.put(o, parseValues(sliders));
double[] measurementValues = detections.stream().mapToDouble(p -> p.getMeasurementList().getMeasurementValue(n)).filter(d -> Double.isFinite(d)).toArray();
var stats = new DescriptiveStatistics(measurementValues);
var histogram = new Histogram(measurementValues, 100, stats.getMin(), stats.getMax());
histogramPanel.getHistogramData().setAll(HistogramPanelFX.createHistogramData(histogram, false, ColorTools.packARGB(100, 200, 20, 20)));
double[] values = map.get(n);
for (int i = 0; i < sliders.size(); i++) {
var slider = sliders.get(i);
slider.setMin(stats.getMin());
slider.setMax(stats.getMax());
double val = values == null ? stats.getMean() + stats.getStandardDeviation() * i : values[i];
slider.setValue(val);
// Add first threshold to histogram
if (i == 0) {
Color color = Color.rgb(0, 0, 0, 0.2);
chartWrapper.addThreshold(sliders.get(i).valueProperty(), color);
}
}
});
selectedMeasurement.addListener((v, o, n) -> updateClassifications(hierarchy, allDetections, n, parseValues(sliders, singleThreshold.get())));
singleThreshold.addListener((v, o, n) -> updateClassifications(hierarchy, allDetections, selectedMeasurement.get(), parseValues(sliders, singleThreshold.get())));
var pane = new GridPane();
int row = 0;
var labelMeasurements = new Label("Measurement");
PaneTools.addGridRow(pane, row++, 0, "Select measurement to threshold", labelMeasurements, comboMeasurements, comboMeasurements);
for (int i = 0; i < sliders.size(); i++) {
var labelThreshold = new Label("Threshold " + (i + 1) + "+");
var slider = sliders.get(i);
var tf = textFields.get(i);
if (i > 0) {
slider.disableProperty().bind(singleThreshold);
tf.disableProperty().bind(singleThreshold);
}
PaneTools.addGridRow(pane, row++, 0, "Select threshold value", labelThreshold, slider, tf);
}
PaneTools.addGridRow(pane, row++, 0, "Toggle between using a single threshold (Negative/Positive) or three threshold Negative/1+/2+/3+)", cbSingleThreshold, cbSingleThreshold, cbSingleThreshold);
pane.setHgap(5.0);
pane.setVgap(5.0);
PaneTools.setToExpandGridPaneHeight(chartWrapper.getPane());
PaneTools.setToExpandGridPaneWidth(chartWrapper.getPane());
histogramPanel.getChart().getYAxis().setTickLabelsVisible(false);
histogramPanel.getChart().setAnimated(false);
chartWrapper.getPane().setPrefSize(200, 80);
pane.add(chartWrapper.getPane(), pane.getColumnCount(), 0, 1, pane.getRowCount());
var dialog = new Dialog<ButtonType>();
dialog.initOwner(qupath.getStage());
dialog.setTitle(title);
dialog.getDialogPane().setContent(pane);
dialog.getDialogPane().getButtonTypes().setAll(ButtonType.APPLY, ButtonType.CANCEL);
var response = dialog.showAndWait().orElse(ButtonType.CANCEL);
if (pool != null) {
pool.shutdown();
try {
pool.awaitTermination(5000L, TimeUnit.SECONDS);
} catch (InterruptedException e) {
logger.debug("Exception waiting for classification to complete: " + e.getLocalizedMessage(), e);
}
}
// Check if we did anything, if not return
if (nextRequest == null)
return;
if (ButtonType.APPLY.equals(response)) {
// Make sure we ran the last command, then log it in the workflow
if (!nextRequest.isComplete())
nextRequest.doClassification();
imageData.getHistoryWorkflow().addStep(nextRequest.toWorkflowStep());
} else {
// Restore classifications if the user cancelled
var changed = PathClassifierTools.restoreClassificationsFromMap(currentClassifications);
if (!changed.isEmpty())
hierarchy.fireObjectClassificationsChangedEvent(this, changed);
}
}
Aggregations