use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class Filter method fractionScore2.
/**
* Filter the results and return the performance score. Allows benchmarking the filter by marking the results as
* true or false.
* <p>
* Input PeakResults must be allocated a score for true positive, false positive, true negative and false negative
* (accessed via the object property get methods). The filter is run and results that pass accumulate scores for
* true positive and false positive, otherwise the scores are accumulated for true negative and false negative. The
* simplest scoring scheme is to mark valid results as tp=fn=1 and fp=tn=0 and invalid results the opposite.
* <p>
* The number of consecutive rejections are counted per frame. When the configured number of failures is reached all
* remaining results for the frame are rejected. This assumes the results are ordered by the frame.
* <p>
* Note that this method is to be used to score a set of results that may have been extracted from a larger set
* since the number of consecutive failures before each peak are expected to be stored in the origY property. Set
* this to zero and the results should be identical to {@link #fractionScore(List, int)}.
*
* @param resultsList
* a list of results to analyse
* @param failures
* the number of failures to allow per frame before all peaks are rejected
* @return the score
*/
public FractionClassificationResult fractionScore2(List<MemoryPeakResults> resultsList, int failures) {
int p = 0, n = 0;
double fp = 0, fn = 0;
double tp = 0, tn = 0;
for (MemoryPeakResults peakResults : resultsList) {
setup(peakResults);
int frame = -1;
int failCount = 0;
for (PeakResult peak : peakResults.getResults()) {
// Reset fail count for new frames
if (frame != peak.getFrame()) {
frame = peak.getFrame();
failCount = 0;
}
failCount += peak.origY;
// Reject all peaks if we have exceeded the fail count
final boolean isPositive;
if (failCount > failures) {
isPositive = false;
} else {
// Otherwise assess the peak
isPositive = accept(peak);
}
if (isPositive) {
failCount = 0;
} else {
failCount++;
}
if (isPositive) {
p++;
tp += peak.getTruePositiveScore();
fp += peak.getFalsePositiveScore();
} else {
fn += peak.getFalseNegativeScore();
tn += peak.getTrueNegativeScore();
}
}
n += peakResults.size();
end();
}
n -= p;
return new FractionClassificationResult(tp, fp, tn, fn, p, n);
}
use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class Filter method filter2.
/**
* Filter the results
* <p>
* The number of consecutive rejections are counted per frame. When the configured number of failures is reached all
* remaining results for the frame are rejected. This assumes the results are ordered by the frame.
* <p>
* Note that this method is to be used to score a set of results that may have been extracted from a larger set
* since the number of consecutive failures before each peak are expected to be stored in the origY property. Set
* this to zero and the results should be identical to {@link #filter(MemoryPeakResults, int)}
*
* @param results
* @param failures
* the number of failures to allow per frame before all peaks are rejected
* @return the filtered results
*/
public MemoryPeakResults filter2(MemoryPeakResults results, int failures) {
MemoryPeakResults newResults = new MemoryPeakResults();
newResults.copySettings(results);
setup(results);
int frame = -1;
int failCount = 0;
for (PeakResult peak : results.getResults()) {
if (frame != peak.getFrame()) {
frame = peak.getFrame();
failCount = 0;
}
failCount += peak.origY;
// Reject all peaks if we have exceeded the fail count
final boolean isPositive;
if (failCount > failures) {
isPositive = false;
} else {
// Otherwise assess the peak
isPositive = accept(peak);
}
if (isPositive) {
failCount = 0;
newResults.add(peak);
} else {
failCount++;
}
}
end();
return newResults;
}
use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class Filter method filterSubset.
/**
* Filter the results
* <p>
* Input PeakResults must be allocated a score for true positive, false positive, true negative and false negative
* (accessed via the object property get methods). The filter is run and results that pass accumulate scores for
* true positive and false positive, otherwise the scores are accumulated for true negative and false negative. The
* simplest scoring scheme is to mark valid results as tp=fn=1 and fp=tn=0 and invalid results the opposite.
* <p>
* The number of consecutive rejections are counted per frame. When the configured number of failures is reached all
* remaining results for the frame are rejected. This assumes the results are ordered by the frame.
* <p>
* The number of failures before each peak is stored in the origX property of the PeakResult.
*
* @param results
* @param score
* If not null will be populated with the fraction score [ tp, fp, tn, fn, p, n ]
* @return the filtered results
*/
public MemoryPeakResults filterSubset(MemoryPeakResults results, double[] score) {
MemoryPeakResults newResults = new MemoryPeakResults();
newResults.copySettings(results);
setup(results);
int frame = -1;
int failCount = 0;
double fp = 0, fn = 0;
double tp = 0, tn = 0;
int p = 0;
for (PeakResult peak : results.getResults()) {
if (frame != peak.getFrame()) {
frame = peak.getFrame();
failCount = 0;
}
// Reject all peaks if we have exceeded the fail count
final boolean isPositive = accept(peak);
if (isPositive) {
peak.origX = failCount;
failCount = 0;
newResults.add(peak);
} else {
failCount++;
}
if (isPositive) {
p++;
tp += peak.getTruePositiveScore();
fp += peak.getFalsePositiveScore();
} else {
fn += peak.getFalseNegativeScore();
tn += peak.getTrueNegativeScore();
}
}
end();
if (score != null && score.length > 5) {
score[0] = tp;
score[1] = fp;
score[2] = tn;
score[3] = fn;
score[4] = p;
score[5] = results.size() - p;
}
return newResults;
}
use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class BlinkEstimatorTest method estimateBlinking.
private TIntHashSet estimateBlinking(double nBlinks, double tOn, double tOff, int particles, double fixedFraction, boolean timeAtLowerBound, boolean doAssert) {
SpatialIllumination activationIllumination = new UniformIllumination(100);
int totalSteps = 100;
double eAct = totalSteps * 0.3 * activationIllumination.getAveragePhotons();
ImageModel imageModel = new ActivationEnergyImageModel(eAct, activationIllumination, tOn, 0, tOff, 0, nBlinks);
imageModel.setRandomGenerator(rand);
double[] max = new double[] { 256, 256, 32 };
double[] min = new double[3];
SpatialDistribution distribution = new UniformDistribution(min, max, rand.nextInt());
List<CompoundMoleculeModel> compounds = new ArrayList<CompoundMoleculeModel>(1);
CompoundMoleculeModel c = new CompoundMoleculeModel(1, 0, 0, 0, Arrays.asList(new MoleculeModel(0, 0, 0, 0)));
c.setDiffusionRate(diffusionRate);
c.setDiffusionType(DiffusionType.RANDOM_WALK);
compounds.add(c);
List<CompoundMoleculeModel> molecules = imageModel.createMolecules(compounds, particles, distribution, false);
// Activate fluorophores
List<? extends FluorophoreSequenceModel> fluorophores = imageModel.createFluorophores(molecules, totalSteps);
totalSteps = checkTotalSteps(totalSteps, fluorophores);
List<LocalisationModel> localisations = imageModel.createImage(molecules, fixedFraction, totalSteps, photons, 0.5, false);
// // Remove localisations to simulate missed counts.
// List<LocalisationModel> newLocalisations = new ArrayList<LocalisationModel>(localisations.size());
// boolean[] id = new boolean[fluorophores.size() + 1];
// Statistics photonStats = new Statistics();
// for (LocalisationModel l : localisations)
// {
// photonStats.add(l.getIntensity());
// // Remove by intensity threshold and optionally at random.
// if (l.getIntensity() < minPhotons || rand.nextDouble() < pDelete)
// continue;
// newLocalisations.add(l);
// id[l.getId()] = true;
// }
// localisations = newLocalisations;
// System.out.printf("Photons = %f\n", photonStats.getMean());
//
// List<FluorophoreSequenceModel> newFluorophores = new ArrayList<FluorophoreSequenceModel>(fluorophores.size());
// for (FluorophoreSequenceModel f : fluorophores)
// {
// if (id[f.getId()])
// newFluorophores.add(f);
// }
// fluorophores = newFluorophores;
MemoryPeakResults results = new MemoryPeakResults();
results.setCalibration(new Calibration(pixelPitch, 1, msPerFrame));
for (LocalisationModel l : localisations) {
// Remove by intensity threshold and optionally at random.
if (l.getIntensity() < minPhotons || rand.nextDouble() < pDelete)
continue;
float[] params = new float[7];
params[Gaussian2DFunction.X_POSITION] = (float) l.getX();
params[Gaussian2DFunction.Y_POSITION] = (float) l.getY();
params[Gaussian2DFunction.X_SD] = params[Gaussian2DFunction.Y_SD] = psfWidth;
params[Gaussian2DFunction.SIGNAL] = (float) (l.getIntensity());
results.addf(l.getTime(), 0, 0, 0, 0, 0, params, null);
}
// Add random localisations
for (int i = (int) (localisations.size() * pAdd); i-- > 0; ) {
float[] params = new float[7];
params[Gaussian2DFunction.X_POSITION] = (float) (rand.nextDouble() * max[0]);
params[Gaussian2DFunction.Y_POSITION] = (float) (rand.nextDouble() * max[1]);
params[Gaussian2DFunction.X_SD] = params[Gaussian2DFunction.Y_SD] = psfWidth;
// Intensity doesn't matter at the moment for tracing
params[Gaussian2DFunction.SIGNAL] = (float) (photons);
results.addf(1 + rand.nextInt(totalSteps), 0, 0, 0, 0, 0, params, null);
}
// Get actual simulated stats ...
Statistics statsNBlinks = new Statistics();
Statistics statsTOn = new Statistics();
Statistics statsTOff = new Statistics();
Statistics statsSampledNBlinks = new Statistics();
Statistics statsSampledTOn = new Statistics();
StoredDataStatistics statsSampledTOff = new StoredDataStatistics();
for (FluorophoreSequenceModel f : fluorophores) {
statsNBlinks.add(f.getNumberOfBlinks());
statsTOn.add(f.getOnTimes());
statsTOff.add(f.getOffTimes());
int[] on = f.getSampledOnTimes();
statsSampledNBlinks.add(on.length);
statsSampledTOn.add(on);
statsSampledTOff.add(f.getSampledOffTimes());
}
System.out.printf("N = %d (%d), N-blinks = %f, tOn = %f, tOff = %f, Fixed = %f\n", fluorophores.size(), localisations.size(), nBlinks, tOn, tOff, fixedFraction);
System.out.printf("Actual N-blinks = %f (%f), tOn = %f (%f), tOff = %f (%f), 95%% = %f, max = %f\n", statsNBlinks.getMean(), statsSampledNBlinks.getMean(), statsTOn.getMean(), statsSampledTOn.getMean(), statsTOff.getMean(), statsSampledTOff.getMean(), statsSampledTOff.getStatistics().getPercentile(95), statsSampledTOff.getStatistics().getMax());
System.out.printf("-=-=--=-\n");
BlinkEstimator be = new BlinkEstimator();
be.maxDarkTime = (int) (tOff * 10);
be.msPerFrame = msPerFrame;
be.relativeDistance = false;
double d = ImageModel.getRandomMoveDistance(diffusionRate);
be.searchDistance = (fixedFraction < 1) ? Math.sqrt(2 * d * d) * 3 : 0;
be.timeAtLowerBound = timeAtLowerBound;
be.showPlots = false;
//Assert.assertTrue("Max dark time must exceed the dark time of the data (otherwise no plateau)",
// be.maxDarkTime > statsSampledTOff.getStatistics().getMax());
int nMolecules = fluorophores.size();
if (usePopulationStatistics) {
nBlinks = statsNBlinks.getMean();
tOff = statsTOff.getMean();
} else {
nBlinks = statsSampledNBlinks.getMean();
tOff = statsSampledTOff.getMean();
}
// See if any fitting regime gets a correct answer
TIntHashSet ok = new TIntHashSet();
for (int nFittedPoints = MIN_FITTED_POINTS; nFittedPoints <= MAX_FITTED_POINTS; nFittedPoints++) {
be.nFittedPoints = nFittedPoints;
be.computeBlinkingRate(results, true);
double moleculesError = DoubleEquality.relativeError(nMolecules, be.getNMolecules());
double blinksError = DoubleEquality.relativeError(nBlinks, be.getNBlinks());
double offError = DoubleEquality.relativeError(tOff * msPerFrame, be.getTOff());
System.out.printf("Error %d: N = %f, blinks = %f, tOff = %f : %f\n", nFittedPoints, moleculesError, blinksError, offError, (moleculesError + blinksError + offError) / 3);
if (moleculesError < relativeError && blinksError < relativeError && offError < relativeError) {
ok.add(nFittedPoints);
System.out.printf("-=-=--=-\n");
System.out.printf("*** Correct at %d fitted points ***\n", nFittedPoints);
if (doAssert)
break;
}
//if (!be.isIncreaseNFittedPoints())
// break;
}
System.out.printf("-=-=--=-\n");
if (doAssert)
Assert.assertFalse(ok.isEmpty());
//Assert.assertEquals("Invalid t-off", tOff * msPerFrame, be.getTOff(), tOff * msPerFrame * relativeError);
return ok;
}
use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class ResultsManagerTest method checkEqual.
private void checkEqual(Spot[] spots, int channel, int slice, int position, int type, MemoryPeakResults actualResults) throws ArrayComparisonFailure {
Assert.assertNotNull("Input results are null", actualResults);
MemoryPeakResults expectedResults = extract(spots, channel, slice, position, type);
Assert.assertEquals("Size differ", expectedResults.size(), actualResults.size());
final float delta = 0;
List<PeakResult> expected = expectedResults.getResults();
List<PeakResult> actual = actualResults.getResults();
for (int i = 0; i < actualResults.size(); i++) {
PeakResult p1 = expected.get(i);
PeakResult p2 = actual.get(i);
Assert.assertEquals("Peak mismatch @ " + i, p1.getFrame(), p2.getFrame());
Assert.assertEquals("Orig X mismatch @ " + i, p1.origX, p2.origX);
Assert.assertEquals("Orig Y mismatch @ " + i, p1.origY, p2.origY);
Assert.assertEquals("Orig value mismatch @ " + i, p1.origValue, p2.origValue, delta);
Assert.assertEquals("Error mismatch @ " + i, p1.error, p2.error, 1e-6);
Assert.assertEquals("Noise mismatch @ " + i, p1.noise, p2.noise, delta);
Assert.assertNotNull("Params is null @ " + i, p2.params);
Assert.assertEquals("Background mismatch @ " + i, p1.getBackground(), p2.getBackground(), delta);
Assert.assertEquals("Signal mismatch @ " + i, p1.getSignal(), p2.getSignal(), delta);
Assert.assertEquals("XPosition mismatch @ " + i, p1.getXPosition(), p2.getXPosition(), delta);
Assert.assertEquals("YPosition mismatch @ " + i, p1.getYPosition(), p2.getYPosition(), delta);
Assert.assertEquals("XSD mismatch @ " + i, p1.getXSD(), p2.getXSD(), 1e-6);
Assert.assertEquals("YSD mismatch @ " + i, p1.getYSD(), p2.getYSD(), 1e-6);
Assert.assertEquals("ID mismatch @ " + i, p1.getId(), p2.getId());
}
}
Aggregations