Search in sources :

Example 26 with TIntHashSet

use of gnu.trove.set.hash.TIntHashSet in project GDSC-SMLM by aherbert.

the class CreateData method removeFilteredFluorophores.

/**
	 * Remove all fluorophores which were not drawn
	 * 
	 * @param fluorophores
	 * @param localisations
	 * @return
	 */
private List<? extends FluorophoreSequenceModel> removeFilteredFluorophores(List<? extends FluorophoreSequenceModel> fluorophores, List<LocalisationModel> localisations) {
    if (fluorophores == null)
        return null;
    // movingMolecules will be created with an initial capacity to hold all the unique IDs
    TIntHashSet idSet = new TIntHashSet((movingMolecules != null) ? movingMolecules.capacity() : 0);
    for (LocalisationModel l : localisations) idSet.add(l.getId());
    List<FluorophoreSequenceModel> newFluorophores = new ArrayList<FluorophoreSequenceModel>(idSet.size());
    for (FluorophoreSequenceModel f : fluorophores) {
        if (idSet.contains(f.getId()))
            newFluorophores.add(f);
    }
    return newFluorophores;
}
Also used : LocalisationModel(gdsc.smlm.model.LocalisationModel) FluorophoreSequenceModel(gdsc.smlm.model.FluorophoreSequenceModel) ArrayList(java.util.ArrayList) TIntHashSet(gnu.trove.set.hash.TIntHashSet)

Example 27 with TIntHashSet

use of gnu.trove.set.hash.TIntHashSet in project GDSC-SMLM by aherbert.

the class ResultsMatchCalculator method compareCoordinates.

private void compareCoordinates(MemoryPeakResults results1, MemoryPeakResults results2, double dThreshold, int increments, double delta) {
    boolean requirePairs = showPairs || saveClassifications;
    FilePeakResults fileResults = createFilePeakResults(results2);
    List<PointPair> allMatches = new LinkedList<PointPair>();
    List<PointPair> pairs = (requirePairs) ? new LinkedList<PointPair>() : null;
    List<PeakResult> actualPoints = results1.getResults();
    List<PeakResult> predictedPoints = results2.getResults();
    double maxDistance = dThreshold + increments * delta;
    // Old implementation
    //// Process each time point
    //for (Integer t : getTimepoints(actualPoints, predictedPoints))
    //{
    //	Coordinate[] actual = getCoordinates(actualPoints, t);
    //	Coordinate[] predicted = getCoordinates(predictedPoints, t);
    // Divide the results into time points
    TIntObjectHashMap<ArrayList<Coordinate>> actualCoordinates = getCoordinates(actualPoints);
    TIntObjectHashMap<ArrayList<Coordinate>> predictedCoordinates = getCoordinates(predictedPoints);
    int n1 = 0;
    int n2 = 0;
    // Process each time point
    for (Integer t : getTimepoints(actualCoordinates, predictedCoordinates)) {
        Coordinate[] actual = getCoordinates(actualCoordinates, t);
        Coordinate[] predicted = getCoordinates(predictedCoordinates, t);
        List<Coordinate> TP = null;
        List<Coordinate> FP = null;
        List<Coordinate> FN = null;
        List<PointPair> matches = new LinkedList<PointPair>();
        if (requirePairs) {
            FP = new LinkedList<Coordinate>();
            FN = new LinkedList<Coordinate>();
        }
        MatchCalculator.analyseResults2D(actual, predicted, maxDistance, TP, FP, FN, matches);
        // Aggregate
        n1 += actual.length;
        n2 += predicted.length;
        allMatches.addAll(matches);
        if (showPairs) {
            pairs.addAll(matches);
            for (Coordinate c : FN) pairs.add(new PointPair(c, null));
            for (Coordinate c : FP) pairs.add(new PointPair(null, c));
        }
        if (fileResults != null) {
            // Matches are marked in the original value with 1 for true, 0 for false 
            for (PointPair pair : matches) {
                PeakResult p = ((PeakResultPoint) pair.getPoint2()).peakResult;
                fileResults.add(p.getFrame(), p.origX, p.origY, 1, p.error, p.noise, p.params, null);
            }
            for (Coordinate c : FP) {
                PeakResult p = ((PeakResultPoint) c).peakResult;
                fileResults.add(p.getFrame(), p.origX, p.origY, 0, p.error, p.noise, p.params, null);
            }
        }
    }
    if (fileResults != null)
        fileResults.end();
    // XXX : DEBUGGING : Output for signal correlation and fitting analysis
    /*
		 * try
		 * {
		 * OutputStreamWriter o = new OutputStreamWriter(new FileOutputStream("/tmp/ResultsMatchCalculator.txt"));
		 * FilePeakResults r1 = new FilePeakResults("/tmp/" + results1.getName() + ".txt", false);
		 * FilePeakResults r2 = new FilePeakResults("/tmp/" + results2.getName() + ".txt", false);
		 * r1.begin();
		 * r2.begin();
		 * //OutputStreamWriter o2 = new OutputStreamWriter(new FileOutputStream("/tmp/"+results1.getName()+".txt"));
		 * //OutputStreamWriter o3 = new OutputStreamWriter(new FileOutputStream("/tmp/"+results2.getName()+".txt"));
		 * for (PointPair pair : allMatches)
		 * {
		 * PeakResult p1 = ((PeakResultPoint) pair.getPoint1()).peakResult;
		 * PeakResult p2 = ((PeakResultPoint) pair.getPoint2()).peakResult;
		 * r1.add(p1);
		 * r2.add(p2);
		 * o.write(Float.toString(p1.getSignal()));
		 * o.write('\t');
		 * o.write(Float.toString(p2.getSignal()));
		 * o.write('\n');
		 * }
		 * o.close();
		 * r1.end();
		 * r2.end();
		 * }
		 * catch (Exception e)
		 * {
		 * e.printStackTrace();
		 * }
		 */
    boolean doIdAnalysis1 = (idAnalysis) ? haveIds(results1) : false;
    boolean doIdAnalysis2 = (idAnalysis) ? haveIds(results2) : false;
    boolean doIdAnalysis = doIdAnalysis1 || doIdAnalysis2;
    // Create output
    if (!java.awt.GraphicsEnvironment.isHeadless()) {
        String header = createResultsHeader(doIdAnalysis);
        Utils.refreshHeadings(resultsWindow, header, true);
        if (showTable && (resultsWindow == null || !resultsWindow.isShowing())) {
            resultsWindow = new TextWindow(TITLE + " Results", header, "", 900, 300);
        }
        if (showPairs) {
            if (pairsWindow == null || !pairsWindow.isShowing()) {
                pairsWindow = new TextWindow(TITLE + " Pairs", createPairsHeader(pairs), "", 900, 300);
                if (resultsWindow != null) {
                    Point p = resultsWindow.getLocation();
                    p.y += resultsWindow.getHeight();
                    pairsWindow.setLocation(p);
                }
                pairPainter = new ImageROIPainter(pairsWindow.getTextPanel(), "", this);
            }
            pairsWindow.getTextPanel().clear();
            String title = "Results 1";
            if (results1.getSource() != null && results1.getSource().getOriginal().getName().length() > 0)
                title = results1.getSource().getOriginal().getName();
            pairPainter.setTitle(title);
            IJ.showStatus("Writing pairs table");
            IJ.showProgress(0);
            int c = 0;
            final int total = pairs.size();
            final int step = Utils.getProgressInterval(total);
            final ArrayList<String> list = new ArrayList<String>(total);
            boolean flush = true;
            for (PointPair pair : pairs) {
                if (++c % step == 0)
                    IJ.showProgress(c, total);
                list.add(addPairResult(pair));
                if (flush && c == 9) {
                    pairsWindow.getTextPanel().append(list);
                    list.clear();
                    flush = false;
                }
            }
            pairsWindow.getTextPanel().append(list);
            IJ.showProgress(1);
        }
    } else {
        if (writeHeader && showTable) {
            writeHeader = false;
            IJ.log(createResultsHeader(idAnalysis));
        }
    }
    if (!showTable)
        return;
    // We have the results for the largest distance.
    // Now reduce the distance threshold and recalculate the results
    double[] distanceThresholds = getDistances(dThreshold, increments, delta);
    double[] pairDistances = getPairDistances(allMatches);
    // Re-use storage for the ID analysis
    TIntHashSet id1 = null, id2 = null, matchId1 = null, matchId2 = null;
    if (doIdAnalysis) {
        if (doIdAnalysis1) {
            id1 = getIds(results1);
            matchId1 = new TIntHashSet(id1.size());
        }
        if (doIdAnalysis2) {
            id2 = getIds(results2);
            matchId2 = new TIntHashSet(id2.size());
        }
    }
    for (double distanceThreshold : distanceThresholds) {
        double rms = 0;
        int tp2 = 0;
        final double d2 = distanceThreshold * distanceThreshold;
        for (double d : pairDistances) {
            if (d <= d2) {
                rms += d;
                tp2++;
            }
        }
        // All non-true positives must be added to the false totals.
        int fp2 = n2 - tp2;
        int fn2 = n1 - tp2;
        MatchResult result = new MatchResult(tp2, fp2, fn2, (tp2 > 0) ? Math.sqrt(rms / tp2) : 0);
        MatchResult idResult1 = null, idResult2 = null;
        if (doIdAnalysis) {
            if (doIdAnalysis1)
                matchId1.clear();
            if (doIdAnalysis2)
                matchId2.clear();
            int i = 0;
            for (PointPair pair : allMatches) {
                if (pairDistances[i++] <= d2) {
                    if (doIdAnalysis1)
                        matchId1.add(((PeakResultPoint) pair.getPoint1()).peakResult.getId());
                    if (doIdAnalysis2)
                        matchId2.add(((PeakResultPoint) pair.getPoint2()).peakResult.getId());
                }
            }
            // => Only the recall will be valid: tp / (tp + fn)
            if (doIdAnalysis1)
                idResult1 = new MatchResult(matchId1.size(), 0, id1.size() - matchId1.size(), 0);
            if (doIdAnalysis2)
                idResult2 = new MatchResult(matchId2.size(), 0, id2.size() - matchId2.size(), 0);
        }
        addResult(inputOption1, inputOption2, distanceThreshold, result, idResult1, idResult2);
    }
}
Also used : ArrayList(java.util.ArrayList) MatchResult(gdsc.core.match.MatchResult) PeakResult(gdsc.smlm.results.PeakResult) TIntHashSet(gnu.trove.set.hash.TIntHashSet) PointPair(gdsc.core.match.PointPair) ImageROIPainter(gdsc.smlm.ij.utils.ImageROIPainter) Point(java.awt.Point) BasePoint(gdsc.core.match.BasePoint) LinkedList(java.util.LinkedList) Point(java.awt.Point) BasePoint(gdsc.core.match.BasePoint) FilePeakResults(gdsc.smlm.results.FilePeakResults) TextWindow(ij.text.TextWindow) Coordinate(gdsc.core.match.Coordinate)

Example 28 with TIntHashSet

use of gnu.trove.set.hash.TIntHashSet in project GDSC-SMLM by aherbert.

the class BlinkEstimatorTest method estimateBlinking.

private TIntHashSet estimateBlinking(double nBlinks, double tOn, double tOff, int particles, double fixedFraction, boolean timeAtLowerBound, boolean doAssert) {
    SpatialIllumination activationIllumination = new UniformIllumination(100);
    int totalSteps = 100;
    double eAct = totalSteps * 0.3 * activationIllumination.getAveragePhotons();
    ImageModel imageModel = new ActivationEnergyImageModel(eAct, activationIllumination, tOn, 0, tOff, 0, nBlinks);
    imageModel.setRandomGenerator(rand);
    double[] max = new double[] { 256, 256, 32 };
    double[] min = new double[3];
    SpatialDistribution distribution = new UniformDistribution(min, max, rand.nextInt());
    List<CompoundMoleculeModel> compounds = new ArrayList<CompoundMoleculeModel>(1);
    CompoundMoleculeModel c = new CompoundMoleculeModel(1, 0, 0, 0, Arrays.asList(new MoleculeModel(0, 0, 0, 0)));
    c.setDiffusionRate(diffusionRate);
    c.setDiffusionType(DiffusionType.RANDOM_WALK);
    compounds.add(c);
    List<CompoundMoleculeModel> molecules = imageModel.createMolecules(compounds, particles, distribution, false);
    // Activate fluorophores
    List<? extends FluorophoreSequenceModel> fluorophores = imageModel.createFluorophores(molecules, totalSteps);
    totalSteps = checkTotalSteps(totalSteps, fluorophores);
    List<LocalisationModel> localisations = imageModel.createImage(molecules, fixedFraction, totalSteps, photons, 0.5, false);
    //		// Remove localisations to simulate missed counts. 
    //		List<LocalisationModel> newLocalisations = new ArrayList<LocalisationModel>(localisations.size());
    //		boolean[] id = new boolean[fluorophores.size() + 1];
    //		Statistics photonStats = new Statistics();
    //		for (LocalisationModel l : localisations)
    //		{
    //			photonStats.add(l.getIntensity());
    //			// Remove by intensity threshold and optionally at random.
    //			if (l.getIntensity() < minPhotons || rand.nextDouble() < pDelete)
    //				continue;
    //			newLocalisations.add(l);
    //			id[l.getId()] = true;
    //		}
    //		localisations = newLocalisations;
    //		System.out.printf("Photons = %f\n", photonStats.getMean());
    //
    //		List<FluorophoreSequenceModel> newFluorophores = new ArrayList<FluorophoreSequenceModel>(fluorophores.size());
    //		for (FluorophoreSequenceModel f : fluorophores)
    //		{
    //			if (id[f.getId()])
    //				newFluorophores.add(f);
    //		}
    //		fluorophores = newFluorophores;
    MemoryPeakResults results = new MemoryPeakResults();
    results.setCalibration(new Calibration(pixelPitch, 1, msPerFrame));
    for (LocalisationModel l : localisations) {
        // Remove by intensity threshold and optionally at random.
        if (l.getIntensity() < minPhotons || rand.nextDouble() < pDelete)
            continue;
        float[] params = new float[7];
        params[Gaussian2DFunction.X_POSITION] = (float) l.getX();
        params[Gaussian2DFunction.Y_POSITION] = (float) l.getY();
        params[Gaussian2DFunction.X_SD] = params[Gaussian2DFunction.Y_SD] = psfWidth;
        params[Gaussian2DFunction.SIGNAL] = (float) (l.getIntensity());
        results.addf(l.getTime(), 0, 0, 0, 0, 0, params, null);
    }
    // Add random localisations
    for (int i = (int) (localisations.size() * pAdd); i-- > 0; ) {
        float[] params = new float[7];
        params[Gaussian2DFunction.X_POSITION] = (float) (rand.nextDouble() * max[0]);
        params[Gaussian2DFunction.Y_POSITION] = (float) (rand.nextDouble() * max[1]);
        params[Gaussian2DFunction.X_SD] = params[Gaussian2DFunction.Y_SD] = psfWidth;
        // Intensity doesn't matter at the moment for tracing
        params[Gaussian2DFunction.SIGNAL] = (float) (photons);
        results.addf(1 + rand.nextInt(totalSteps), 0, 0, 0, 0, 0, params, null);
    }
    // Get actual simulated stats ...
    Statistics statsNBlinks = new Statistics();
    Statistics statsTOn = new Statistics();
    Statistics statsTOff = new Statistics();
    Statistics statsSampledNBlinks = new Statistics();
    Statistics statsSampledTOn = new Statistics();
    StoredDataStatistics statsSampledTOff = new StoredDataStatistics();
    for (FluorophoreSequenceModel f : fluorophores) {
        statsNBlinks.add(f.getNumberOfBlinks());
        statsTOn.add(f.getOnTimes());
        statsTOff.add(f.getOffTimes());
        int[] on = f.getSampledOnTimes();
        statsSampledNBlinks.add(on.length);
        statsSampledTOn.add(on);
        statsSampledTOff.add(f.getSampledOffTimes());
    }
    System.out.printf("N = %d (%d), N-blinks = %f, tOn = %f, tOff = %f, Fixed = %f\n", fluorophores.size(), localisations.size(), nBlinks, tOn, tOff, fixedFraction);
    System.out.printf("Actual N-blinks = %f (%f), tOn = %f (%f), tOff = %f (%f), 95%% = %f, max = %f\n", statsNBlinks.getMean(), statsSampledNBlinks.getMean(), statsTOn.getMean(), statsSampledTOn.getMean(), statsTOff.getMean(), statsSampledTOff.getMean(), statsSampledTOff.getStatistics().getPercentile(95), statsSampledTOff.getStatistics().getMax());
    System.out.printf("-=-=--=-\n");
    BlinkEstimator be = new BlinkEstimator();
    be.maxDarkTime = (int) (tOff * 10);
    be.msPerFrame = msPerFrame;
    be.relativeDistance = false;
    double d = ImageModel.getRandomMoveDistance(diffusionRate);
    be.searchDistance = (fixedFraction < 1) ? Math.sqrt(2 * d * d) * 3 : 0;
    be.timeAtLowerBound = timeAtLowerBound;
    be.showPlots = false;
    //Assert.assertTrue("Max dark time must exceed the dark time of the data (otherwise no plateau)",
    //		be.maxDarkTime > statsSampledTOff.getStatistics().getMax());
    int nMolecules = fluorophores.size();
    if (usePopulationStatistics) {
        nBlinks = statsNBlinks.getMean();
        tOff = statsTOff.getMean();
    } else {
        nBlinks = statsSampledNBlinks.getMean();
        tOff = statsSampledTOff.getMean();
    }
    // See if any fitting regime gets a correct answer
    TIntHashSet ok = new TIntHashSet();
    for (int nFittedPoints = MIN_FITTED_POINTS; nFittedPoints <= MAX_FITTED_POINTS; nFittedPoints++) {
        be.nFittedPoints = nFittedPoints;
        be.computeBlinkingRate(results, true);
        double moleculesError = DoubleEquality.relativeError(nMolecules, be.getNMolecules());
        double blinksError = DoubleEquality.relativeError(nBlinks, be.getNBlinks());
        double offError = DoubleEquality.relativeError(tOff * msPerFrame, be.getTOff());
        System.out.printf("Error %d: N = %f, blinks = %f, tOff = %f : %f\n", nFittedPoints, moleculesError, blinksError, offError, (moleculesError + blinksError + offError) / 3);
        if (moleculesError < relativeError && blinksError < relativeError && offError < relativeError) {
            ok.add(nFittedPoints);
            System.out.printf("-=-=--=-\n");
            System.out.printf("*** Correct at %d fitted points ***\n", nFittedPoints);
            if (doAssert)
                break;
        }
    //if (!be.isIncreaseNFittedPoints())
    //	break;
    }
    System.out.printf("-=-=--=-\n");
    if (doAssert)
        Assert.assertFalse(ok.isEmpty());
    //Assert.assertEquals("Invalid t-off", tOff * msPerFrame, be.getTOff(), tOff * msPerFrame * relativeError);
    return ok;
}
Also used : ActivationEnergyImageModel(gdsc.smlm.model.ActivationEnergyImageModel) CompoundMoleculeModel(gdsc.smlm.model.CompoundMoleculeModel) ArrayList(java.util.ArrayList) TIntHashSet(gnu.trove.set.hash.TIntHashSet) CompoundMoleculeModel(gdsc.smlm.model.CompoundMoleculeModel) MoleculeModel(gdsc.smlm.model.MoleculeModel) SpatialIllumination(gdsc.smlm.model.SpatialIllumination) MemoryPeakResults(gdsc.smlm.results.MemoryPeakResults) SpatialDistribution(gdsc.smlm.model.SpatialDistribution) UniformDistribution(gdsc.smlm.model.UniformDistribution) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) Calibration(gdsc.smlm.results.Calibration) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) Statistics(gdsc.core.utils.Statistics) UniformIllumination(gdsc.smlm.model.UniformIllumination) LocalisationModel(gdsc.smlm.model.LocalisationModel) FluorophoreSequenceModel(gdsc.smlm.model.FluorophoreSequenceModel) ImageModel(gdsc.smlm.model.ImageModel) ActivationEnergyImageModel(gdsc.smlm.model.ActivationEnergyImageModel)

Example 29 with TIntHashSet

use of gnu.trove.set.hash.TIntHashSet in project GDSC-SMLM by aherbert.

the class BenchmarkSpotFit method summariseResults.

private void summariseResults(TIntObjectHashMap<FilterCandidates> filterCandidates, long runTime, final PreprocessedPeakResult[] preprocessedPeakResults, int nUniqueIDs) {
    createTable();
    // Summarise the fitting results. N fits, N failures. 
    // Optimal match statistics if filtering is perfect (since fitting is not perfect).
    StoredDataStatistics distanceStats = new StoredDataStatistics();
    StoredDataStatistics depthStats = new StoredDataStatistics();
    // Get stats for all fitted results and those that match 
    // Signal, SNR, Width, xShift, yShift, Precision
    createFilterCriteria();
    StoredDataStatistics[][] stats = new StoredDataStatistics[3][filterCriteria.length];
    for (int i = 0; i < stats.length; i++) for (int j = 0; j < stats[i].length; j++) stats[i][j] = new StoredDataStatistics();
    final double nmPerPixel = simulationParameters.a;
    double tp = 0, fp = 0;
    int failcTP = 0, failcFP = 0;
    int cTP = 0, cFP = 0;
    int[] singleStatus = null, multiStatus = null, doubletStatus = null, multiDoubletStatus = null;
    singleStatus = new int[FitStatus.values().length];
    multiStatus = new int[singleStatus.length];
    doubletStatus = new int[singleStatus.length];
    multiDoubletStatus = new int[singleStatus.length];
    // Easier to materialise the values since we have a lot of non final variables to manipulate
    final int[] frames = new int[filterCandidates.size()];
    final FilterCandidates[] candidates = new FilterCandidates[filterCandidates.size()];
    final int[] counter = new int[1];
    filterCandidates.forEachEntry(new TIntObjectProcedure<FilterCandidates>() {

        public boolean execute(int a, FilterCandidates b) {
            frames[counter[0]] = a;
            candidates[counter[0]] = b;
            counter[0]++;
            return true;
        }
    });
    for (FilterCandidates result : candidates) {
        // Count the number of fit results that matched (tp) and did not match (fp)
        tp += result.tp;
        fp += result.fp;
        for (int i = 0; i < result.fitResult.length; i++) {
            if (result.spots[i].match)
                cTP++;
            else
                cFP++;
            final MultiPathFitResult fitResult = result.fitResult[i];
            if (singleStatus != null && result.spots[i].match) {
                // Debugging reasons for fit failure
                addStatus(singleStatus, fitResult.getSingleFitResult());
                addStatus(multiStatus, fitResult.getMultiFitResult());
                addStatus(doubletStatus, fitResult.getDoubletFitResult());
                addStatus(multiDoubletStatus, fitResult.getMultiDoubletFitResult());
            }
            if (noMatch(fitResult)) {
                if (result.spots[i].match)
                    failcTP++;
                else
                    failcFP++;
            }
            // We have multi-path results.
            // We want statistics for:
            // [0] all fitted spots
            // [1] fitted spots that match a result
            // [2] fitted spots that do not match a result
            addToStats(fitResult.getSingleFitResult(), stats);
            addToStats(fitResult.getMultiFitResult(), stats);
            addToStats(fitResult.getDoubletFitResult(), stats);
            addToStats(fitResult.getMultiDoubletFitResult(), stats);
        }
        // Statistics on spots that fit an actual result
        for (int i = 0; i < result.match.length; i++) {
            if (!result.match[i].isFitResult())
                // For now just ignore the candidates that matched
                continue;
            FitMatch fitMatch = (FitMatch) result.match[i];
            distanceStats.add(fitMatch.d * nmPerPixel);
            depthStats.add(fitMatch.z * nmPerPixel);
        }
    }
    // Store data for computing correlation
    double[] i1 = new double[depthStats.getN()];
    double[] i2 = new double[i1.length];
    double[] is = new double[i1.length];
    int ci = 0;
    for (FilterCandidates result : candidates) {
        for (int i = 0; i < result.match.length; i++) {
            if (!result.match[i].isFitResult())
                // For now just ignore the candidates that matched
                continue;
            FitMatch fitMatch = (FitMatch) result.match[i];
            ScoredSpot spot = result.spots[fitMatch.i];
            i1[ci] = fitMatch.predictedSignal;
            i2[ci] = fitMatch.actualSignal;
            is[ci] = spot.spot.intensity;
            ci++;
        }
    }
    // We want to compute the Jaccard against the spot metric
    // Filter the results using the multi-path filter
    ArrayList<MultiPathFitResults> multiPathResults = new ArrayList<MultiPathFitResults>(filterCandidates.size());
    for (int i = 0; i < frames.length; i++) {
        int frame = frames[i];
        MultiPathFitResult[] multiPathFitResults = candidates[i].fitResult;
        int totalCandidates = candidates[i].spots.length;
        int nActual = actualCoordinates.get(frame).size();
        multiPathResults.add(new MultiPathFitResults(frame, multiPathFitResults, totalCandidates, nActual));
    }
    // Score the results and count the number returned
    List<FractionalAssignment[]> assignments = new ArrayList<FractionalAssignment[]>();
    final TIntHashSet set = new TIntHashSet(nUniqueIDs);
    FractionScoreStore scoreStore = new FractionScoreStore() {

        public void add(int uniqueId) {
            set.add(uniqueId);
        }
    };
    MultiPathFitResults[] multiResults = multiPathResults.toArray(new MultiPathFitResults[multiPathResults.size()]);
    // Filter with no filter
    MultiPathFilter mpf = new MultiPathFilter(new SignalFilter(0), null, multiFilter.residualsThreshold);
    FractionClassificationResult fractionResult = mpf.fractionScoreSubset(multiResults, Integer.MAX_VALUE, this.results.size(), assignments, scoreStore, CoordinateStoreFactory.create(imp.getWidth(), imp.getHeight(), fitConfig.getDuplicateDistance()));
    double nPredicted = fractionResult.getTP() + fractionResult.getFP();
    final double[][] matchScores = new double[set.size()][];
    int count = 0;
    for (int i = 0; i < assignments.size(); i++) {
        FractionalAssignment[] a = assignments.get(i);
        if (a == null)
            continue;
        for (int j = 0; j < a.length; j++) {
            final PreprocessedPeakResult r = ((PeakFractionalAssignment) a[j]).peakResult;
            set.remove(r.getUniqueId());
            final double precision = Math.sqrt(r.getLocationVariance());
            final double signal = r.getSignal();
            final double snr = r.getSNR();
            final double width = r.getXSDFactor();
            final double xShift = r.getXRelativeShift2();
            final double yShift = r.getYRelativeShift2();
            // Since these two are combined for filtering and the max is what matters.
            final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
            final double eshift = Math.sqrt(xShift + yShift);
            final double[] score = new double[8];
            score[FILTER_SIGNAL] = signal;
            score[FILTER_SNR] = snr;
            score[FILTER_MIN_WIDTH] = width;
            score[FILTER_MAX_WIDTH] = width;
            score[FILTER_SHIFT] = shift;
            score[FILTER_ESHIFT] = eshift;
            score[FILTER_PRECISION] = precision;
            score[FILTER_PRECISION + 1] = a[j].getScore();
            matchScores[count++] = score;
        }
    }
    // Add the rest
    set.forEach(new CustomTIntProcedure(count) {

        public boolean execute(int uniqueId) {
            // This should not be null or something has gone wrong
            PreprocessedPeakResult r = preprocessedPeakResults[uniqueId];
            if (r == null)
                throw new RuntimeException("Missing result: " + uniqueId);
            final double precision = Math.sqrt(r.getLocationVariance());
            final double signal = r.getSignal();
            final double snr = r.getSNR();
            final double width = r.getXSDFactor();
            final double xShift = r.getXRelativeShift2();
            final double yShift = r.getYRelativeShift2();
            // Since these two are combined for filtering and the max is what matters.
            final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
            final double eshift = Math.sqrt(xShift + yShift);
            final double[] score = new double[8];
            score[FILTER_SIGNAL] = signal;
            score[FILTER_SNR] = snr;
            score[FILTER_MIN_WIDTH] = width;
            score[FILTER_MAX_WIDTH] = width;
            score[FILTER_SHIFT] = shift;
            score[FILTER_ESHIFT] = eshift;
            score[FILTER_PRECISION] = precision;
            matchScores[c++] = score;
            return true;
        }
    });
    // Debug the reasons the fit failed
    if (singleStatus != null) {
        String name = PeakFit.getSolverName(fitConfig);
        if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera())
            name += " Camera";
        System.out.println("Failure counts: " + name);
        printFailures("Single", singleStatus);
        printFailures("Multi", multiStatus);
        printFailures("Doublet", doubletStatus);
        printFailures("Multi doublet", multiDoubletStatus);
    }
    StringBuilder sb = new StringBuilder(300);
    // Add information about the simulation
    //(simulationParameters.minSignal + simulationParameters.maxSignal) * 0.5;
    final double signal = simulationParameters.signalPerFrame;
    final int n = results.size();
    sb.append(imp.getStackSize()).append("\t");
    final int w = imp.getWidth();
    final int h = imp.getHeight();
    sb.append(w).append("\t");
    sb.append(h).append("\t");
    sb.append(n).append("\t");
    double density = ((double) n / imp.getStackSize()) / (w * h) / (simulationParameters.a * simulationParameters.a / 1e6);
    sb.append(Utils.rounded(density)).append("\t");
    sb.append(Utils.rounded(signal)).append("\t");
    sb.append(Utils.rounded(simulationParameters.s)).append("\t");
    sb.append(Utils.rounded(simulationParameters.a)).append("\t");
    sb.append(Utils.rounded(simulationParameters.depth)).append("\t");
    sb.append(simulationParameters.fixedDepth).append("\t");
    sb.append(Utils.rounded(simulationParameters.gain)).append("\t");
    sb.append(Utils.rounded(simulationParameters.readNoise)).append("\t");
    sb.append(Utils.rounded(simulationParameters.b)).append("\t");
    sb.append(Utils.rounded(simulationParameters.b2)).append("\t");
    // Compute the noise
    double noise = simulationParameters.b2;
    if (simulationParameters.emCCD) {
        // The b2 parameter was computed without application of the EM-CCD noise factor of 2.
        //final double b2 = backgroundVariance + readVariance
        //                = simulationParameters.b + readVariance
        // This should be applied only to the background variance.
        final double readVariance = noise - simulationParameters.b;
        noise = simulationParameters.b * 2 + readVariance;
    }
    if (simulationParameters.fullSimulation) {
    // The total signal is spread over frames
    }
    sb.append(Utils.rounded(signal / Math.sqrt(noise))).append("\t");
    sb.append(Utils.rounded(simulationParameters.s / simulationParameters.a)).append("\t");
    sb.append(spotFilter.getDescription());
    // nP and nN is the fractional score of the spot candidates 
    addCount(sb, nP + nN);
    addCount(sb, nP);
    addCount(sb, nN);
    addCount(sb, fP);
    addCount(sb, fN);
    String name = PeakFit.getSolverName(fitConfig);
    if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera())
        name += " Camera";
    add(sb, name);
    add(sb, config.getFitting());
    resultPrefix = sb.toString();
    // Q. Should I add other fit configuration here?
    // The fraction of positive and negative candidates that were included
    add(sb, (100.0 * cTP) / nP);
    add(sb, (100.0 * cFP) / nN);
    // Score the fitting results compared to the original simulation.
    // Score the candidate selection:
    add(sb, cTP + cFP);
    add(sb, cTP);
    add(sb, cFP);
    // TP are all candidates that can be matched to a spot
    // FP are all candidates that cannot be matched to a spot
    // FN = The number of missed spots
    FractionClassificationResult m = new FractionClassificationResult(cTP, cFP, 0, simulationParameters.molecules - cTP);
    add(sb, m.getRecall());
    add(sb, m.getPrecision());
    add(sb, m.getF1Score());
    add(sb, m.getJaccard());
    // Score the fitting results:
    add(sb, failcTP);
    add(sb, failcFP);
    // TP are all fit results that can be matched to a spot
    // FP are all fit results that cannot be matched to a spot
    // FN = The number of missed spots
    add(sb, tp);
    add(sb, fp);
    m = new FractionClassificationResult(tp, fp, 0, simulationParameters.molecules - tp);
    add(sb, m.getRecall());
    add(sb, m.getPrecision());
    add(sb, m.getF1Score());
    add(sb, m.getJaccard());
    // Do it again but pretend we can perfectly filter all the false positives
    //add(sb, tp);
    m = new FractionClassificationResult(tp, 0, 0, simulationParameters.molecules - tp);
    // Recall is unchanged
    // Precision will be 100%
    add(sb, m.getF1Score());
    add(sb, m.getJaccard());
    // The mean may be subject to extreme outliers so use the median
    double median = distanceStats.getMedian();
    add(sb, median);
    WindowOrganiser wo = new WindowOrganiser();
    String label = String.format("Recall = %s. n = %d. Median = %s nm. SD = %s nm", Utils.rounded(m.getRecall()), distanceStats.getN(), Utils.rounded(median), Utils.rounded(distanceStats.getStandardDeviation()));
    int id = Utils.showHistogram(TITLE, distanceStats, "Match Distance (nm)", 0, 0, 0, label);
    if (Utils.isNewWindow())
        wo.add(id);
    median = depthStats.getMedian();
    add(sb, median);
    // Sort by spot intensity and produce correlation
    int[] indices = Utils.newArray(i1.length, 0, 1);
    if (showCorrelation)
        Sort.sort(indices, is, rankByIntensity);
    double[] r = (showCorrelation) ? new double[i1.length] : null;
    double[] sr = (showCorrelation) ? new double[i1.length] : null;
    double[] rank = (showCorrelation) ? new double[i1.length] : null;
    ci = 0;
    FastCorrelator fastCorrelator = new FastCorrelator();
    ArrayList<Ranking> pc1 = new ArrayList<Ranking>();
    ArrayList<Ranking> pc2 = new ArrayList<Ranking>();
    for (int ci2 : indices) {
        fastCorrelator.add((long) Math.round(i1[ci2]), (long) Math.round(i2[ci2]));
        pc1.add(new Ranking(i1[ci2], ci));
        pc2.add(new Ranking(i2[ci2], ci));
        if (showCorrelation) {
            r[ci] = fastCorrelator.getCorrelation();
            sr[ci] = Correlator.correlation(rank(pc1), rank(pc2));
            if (rankByIntensity)
                rank[ci] = is[0] - is[ci];
            else
                rank[ci] = ci;
        }
        ci++;
    }
    final double pearsonCorr = fastCorrelator.getCorrelation();
    final double rankedCorr = Correlator.correlation(rank(pc1), rank(pc2));
    // Get the regression
    SimpleRegression regression = new SimpleRegression(false);
    for (int i = 0; i < pc1.size(); i++) regression.addData(pc1.get(i).value, pc2.get(i).value);
    //final double intercept = regression.getIntercept();
    final double slope = regression.getSlope();
    if (showCorrelation) {
        String title = TITLE + " Intensity";
        Plot plot = new Plot(title, "Candidate", "Spot");
        double[] limits1 = Maths.limits(i1);
        double[] limits2 = Maths.limits(i2);
        plot.setLimits(limits1[0], limits1[1], limits2[0], limits2[1]);
        label = String.format("Correlation=%s; Ranked=%s; Slope=%s", Utils.rounded(pearsonCorr), Utils.rounded(rankedCorr), Utils.rounded(slope));
        plot.addLabel(0, 0, label);
        plot.setColor(Color.red);
        plot.addPoints(i1, i2, Plot.DOT);
        if (slope > 1)
            plot.drawLine(limits1[0], limits1[0] * slope, limits1[1], limits1[1] * slope);
        else
            plot.drawLine(limits2[0] / slope, limits2[0], limits2[1] / slope, limits2[1]);
        PlotWindow pw = Utils.display(title, plot);
        if (Utils.isNewWindow())
            wo.add(pw);
        title = TITLE + " Correlation";
        plot = new Plot(title, "Spot Rank", "Correlation");
        double[] xlimits = Maths.limits(rank);
        double[] ylimits = Maths.limits(r);
        ylimits = Maths.limits(ylimits, sr);
        plot.setLimits(xlimits[0], xlimits[1], ylimits[0], ylimits[1]);
        plot.setColor(Color.red);
        plot.addPoints(rank, r, Plot.LINE);
        plot.setColor(Color.blue);
        plot.addPoints(rank, sr, Plot.LINE);
        plot.setColor(Color.black);
        plot.addLabel(0, 0, label);
        pw = Utils.display(title, plot);
        if (Utils.isNewWindow())
            wo.add(pw);
    }
    add(sb, pearsonCorr);
    add(sb, rankedCorr);
    add(sb, slope);
    label = String.format("n = %d. Median = %s nm", depthStats.getN(), Utils.rounded(median));
    id = Utils.showHistogram(TITLE, depthStats, "Match Depth (nm)", 0, 1, 0, label);
    if (Utils.isNewWindow())
        wo.add(id);
    // Plot histograms of the stats on the same window
    double[] lower = new double[filterCriteria.length];
    double[] upper = new double[lower.length];
    min = new double[lower.length];
    max = new double[lower.length];
    for (int i = 0; i < stats[0].length; i++) {
        double[] limits = showDoubleHistogram(stats, i, wo, matchScores, nPredicted);
        lower[i] = limits[0];
        upper[i] = limits[1];
        min[i] = limits[2];
        max[i] = limits[3];
    }
    // Reconfigure some of the range limits
    // Make this a bit bigger
    upper[FILTER_SIGNAL] *= 2;
    // Make this a bit bigger
    upper[FILTER_SNR] *= 2;
    double factor = 0.25;
    if (lower[FILTER_MIN_WIDTH] != 0)
        // (assuming lower is less than 1)
        upper[FILTER_MIN_WIDTH] = 1 - Math.max(0, factor * (1 - lower[FILTER_MIN_WIDTH]));
    if (upper[FILTER_MIN_WIDTH] != 0)
        // (assuming upper is more than 1)
        lower[FILTER_MAX_WIDTH] = 1 + Math.max(0, factor * (upper[FILTER_MAX_WIDTH] - 1));
    // Round the ranges
    final double[] interval = new double[stats[0].length];
    interval[FILTER_SIGNAL] = SignalFilter.DEFAULT_INCREMENT;
    interval[FILTER_SNR] = SNRFilter.DEFAULT_INCREMENT;
    interval[FILTER_MIN_WIDTH] = WidthFilter2.DEFAULT_MIN_INCREMENT;
    interval[FILTER_MAX_WIDTH] = WidthFilter.DEFAULT_INCREMENT;
    interval[FILTER_SHIFT] = ShiftFilter.DEFAULT_INCREMENT;
    interval[FILTER_ESHIFT] = EShiftFilter.DEFAULT_INCREMENT;
    interval[FILTER_PRECISION] = PrecisionFilter.DEFAULT_INCREMENT;
    interval[FILTER_ITERATIONS] = 0.1;
    interval[FILTER_EVALUATIONS] = 0.1;
    // Create a range increment
    double[] increment = new double[lower.length];
    for (int i = 0; i < increment.length; i++) {
        lower[i] = Maths.floor(lower[i], interval[i]);
        upper[i] = Maths.ceil(upper[i], interval[i]);
        double range = upper[i] - lower[i];
        // Allow clipping if the range is small compared to the min increment
        double multiples = range / interval[i];
        // Use 8 multiples for the equivalent of +/- 4 steps around the centre
        if (multiples < 8) {
            multiples = Math.ceil(multiples);
        } else
            multiples = 8;
        increment[i] = Maths.ceil(range / multiples, interval[i]);
        if (i == FILTER_MIN_WIDTH)
            // Requires clipping based on the upper limit
            lower[i] = upper[i] - increment[i] * multiples;
        else
            upper[i] = lower[i] + increment[i] * multiples;
    }
    for (int i = 0; i < stats[0].length; i++) {
        lower[i] = Maths.round(lower[i]);
        upper[i] = Maths.round(upper[i]);
        min[i] = Maths.round(min[i]);
        max[i] = Maths.round(max[i]);
        increment[i] = Maths.round(increment[i]);
        sb.append("\t").append(min[i]).append(':').append(lower[i]).append('-').append(upper[i]).append(':').append(max[i]);
    }
    // Disable some filters
    increment[FILTER_SIGNAL] = Double.POSITIVE_INFINITY;
    //increment[FILTER_SHIFT] = Double.POSITIVE_INFINITY;
    increment[FILTER_ESHIFT] = Double.POSITIVE_INFINITY;
    wo.tile();
    sb.append("\t").append(Utils.timeToString(runTime / 1000000.0));
    summaryTable.append(sb.toString());
    if (saveFilterRange) {
        GlobalSettings gs = SettingsManager.loadSettings();
        FilterSettings filterSettings = gs.getFilterSettings();
        String filename = (silent) ? filterSettings.filterSetFilename : Utils.getFilename("Filter_range_file", filterSettings.filterSetFilename);
        if (filename == null)
            return;
        // Remove extension to store the filename
        filename = Utils.replaceExtension(filename, ".xml");
        filterSettings.filterSetFilename = filename;
        // Create a filter set using the ranges
        ArrayList<Filter> filters = new ArrayList<Filter>(3);
        filters.add(new MultiFilter2(lower[0], (float) lower[1], lower[2], lower[3], lower[4], lower[5], lower[6]));
        filters.add(new MultiFilter2(upper[0], (float) upper[1], upper[2], upper[3], upper[4], upper[5], upper[6]));
        filters.add(new MultiFilter2(increment[0], (float) increment[1], increment[2], increment[3], increment[4], increment[5], increment[6]));
        if (saveFilters(filename, filters))
            SettingsManager.saveSettings(gs);
        // Create a filter set using the min/max and the initial bounds.
        // Set sensible limits
        min[FILTER_SIGNAL] = Math.max(min[FILTER_SIGNAL], 30);
        max[FILTER_PRECISION] = Math.min(max[FILTER_PRECISION], 100);
        // Commented this out so that the 4-set filters are the same as the 3-set filters.
        // The difference leads to differences when optimising.
        //			// Use half the initial bounds (hoping this is a good starting guess for the optimum)
        //			final boolean[] limitToLower = new boolean[min.length];
        //			limitToLower[FILTER_SIGNAL] = true;
        //			limitToLower[FILTER_SNR] = true;
        //			limitToLower[FILTER_MIN_WIDTH] = true;
        //			limitToLower[FILTER_MAX_WIDTH] = false;
        //			limitToLower[FILTER_SHIFT] = false;
        //			limitToLower[FILTER_ESHIFT] = false;
        //			limitToLower[FILTER_PRECISION] = true;
        //			for (int i = 0; i < limitToLower.length; i++)
        //			{
        //				final double range = (upper[i] - lower[i]) / 2;
        //				if (limitToLower[i])
        //					upper[i] = lower[i] + range;
        //				else
        //					lower[i] = upper[i] - range;
        //			}
        filters = new ArrayList<Filter>(4);
        filters.add(new MultiFilter2(min[0], (float) min[1], min[2], min[3], min[4], min[5], min[6]));
        filters.add(new MultiFilter2(lower[0], (float) lower[1], lower[2], lower[3], lower[4], lower[5], lower[6]));
        filters.add(new MultiFilter2(upper[0], (float) upper[1], upper[2], upper[3], upper[4], upper[5], upper[6]));
        filters.add(new MultiFilter2(max[0], (float) max[1], max[2], max[3], max[4], max[5], max[6]));
        saveFilters(Utils.replaceExtension(filename, ".4.xml"), filters);
    }
}
Also used : ArrayList(java.util.ArrayList) TIntHashSet(gnu.trove.set.hash.TIntHashSet) MultiPathFitResult(gdsc.smlm.results.filter.MultiPathFitResult) FractionalAssignment(gdsc.core.match.FractionalAssignment) PeakFractionalAssignment(gdsc.smlm.results.filter.PeakFractionalAssignment) ImmutableFractionalAssignment(gdsc.core.match.ImmutableFractionalAssignment) FractionClassificationResult(gdsc.core.match.FractionClassificationResult) BasePreprocessedPeakResult(gdsc.smlm.results.filter.BasePreprocessedPeakResult) PreprocessedPeakResult(gdsc.smlm.results.filter.PreprocessedPeakResult) SignalFilter(gdsc.smlm.results.filter.SignalFilter) FilterSettings(gdsc.smlm.ij.settings.FilterSettings) ScoredSpot(gdsc.smlm.ij.plugins.BenchmarkSpotFilter.ScoredSpot) FastCorrelator(gdsc.core.utils.FastCorrelator) Plot(ij.gui.Plot) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) PlotWindow(ij.gui.PlotWindow) GlobalSettings(gdsc.smlm.ij.settings.GlobalSettings) WindowOrganiser(ij.plugin.WindowOrganiser) PeakResultPoint(gdsc.smlm.ij.plugins.ResultsMatchCalculator.PeakResultPoint) BasePoint(gdsc.core.match.BasePoint) PeakFractionalAssignment(gdsc.smlm.results.filter.PeakFractionalAssignment) FractionScoreStore(gdsc.smlm.results.filter.MultiPathFilter.FractionScoreStore) SimpleRegression(org.apache.commons.math3.stat.regression.SimpleRegression) SignalFilter(gdsc.smlm.results.filter.SignalFilter) DirectFilter(gdsc.smlm.results.filter.DirectFilter) ShiftFilter(gdsc.smlm.results.filter.ShiftFilter) PrecisionFilter(gdsc.smlm.results.filter.PrecisionFilter) Filter(gdsc.smlm.results.filter.Filter) EShiftFilter(gdsc.smlm.results.filter.EShiftFilter) WidthFilter(gdsc.smlm.results.filter.WidthFilter) SNRFilter(gdsc.smlm.results.filter.SNRFilter) MultiPathFilter(gdsc.smlm.results.filter.MultiPathFilter) MaximaSpotFilter(gdsc.smlm.filters.MaximaSpotFilter) MultiFilter2(gdsc.smlm.results.filter.MultiFilter2) MultiPathFitResults(gdsc.smlm.results.filter.MultiPathFitResults) MultiPathFilter(gdsc.smlm.results.filter.MultiPathFilter)

Example 30 with TIntHashSet

use of gnu.trove.set.hash.TIntHashSet in project GDSC-SMLM by aherbert.

the class BenchmarkFilterAnalysis method showOverlay.

/**
	 * Show overlay.
	 *
	 * @param allAssignments
	 *            The assignments generated from running the filter (or null)
	 * @param filter
	 *            the filter
	 * @return The results from running the filter (or null)
	 */
private PreprocessedPeakResult[] showOverlay(ArrayList<FractionalAssignment[]> allAssignments, DirectFilter filter) {
    ImagePlus imp = CreateData.getImage();
    if (imp == null)
        return null;
    // Run the filter manually to get the results that pass.
    if (allAssignments == null)
        allAssignments = getAssignments(filter);
    final Overlay o = new Overlay();
    // Do TP
    final TIntHashSet actual = new TIntHashSet();
    final TIntHashSet predicted = new TIntHashSet();
    //int tp = 0, fp = 0, fn = 0;
    for (FractionalAssignment[] assignments : allAssignments) {
        if (assignments == null || assignments.length == 0)
            continue;
        float[] tx = null, ty = null;
        int t = 0;
        //tp += assignments.length;
        if (showTP) {
            tx = new float[assignments.length];
            ty = new float[assignments.length];
        }
        int frame = 0;
        for (int i = 0; i < assignments.length; i++) {
            CustomFractionalAssignment c = (CustomFractionalAssignment) assignments[i];
            IdPeakResult peak = (IdPeakResult) c.peak;
            BasePreprocessedPeakResult spot = (BasePreprocessedPeakResult) c.peakResult;
            actual.add(peak.uniqueId);
            predicted.add(spot.getUniqueId());
            frame = spot.getFrame();
            if (showTP) {
                tx[t] = spot.getX();
                ty[t++] = spot.getY();
            }
        }
        if (showTP)
            SpotFinderPreview.addRoi(frame, o, tx, ty, t, Color.green);
    }
    float[] x = new float[10];
    float[] y = new float[x.length];
    float[] x2 = new float[10];
    float[] y2 = new float[x2.length];
    // Do FP (all remaining results that are not a TP)
    PreprocessedPeakResult[] filterResults = null;
    if (showFP) {
        final MultiPathFilter multiPathFilter = createMPF(filter, minimalFilter);
        //multiPathFilter.setDebugFile("/tmp/filter.txt");
        filterResults = filterResults(multiPathFilter);
        int frame = 0;
        int c = 0;
        int c2 = 0;
        for (int i = 0; i < filterResults.length; i++) {
            if (frame != filterResults[i].getFrame()) {
                if (c != 0)
                    SpotFinderPreview.addRoi(frame, o, x, y, c, Color.red);
                if (c2 != 0)
                    SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.magenta);
                c = c2 = 0;
            }
            frame = filterResults[i].getFrame();
            if (predicted.contains(filterResults[i].getUniqueId()))
                continue;
            if (filterResults[i].ignore()) {
                if (x2.length == c2) {
                    x2 = Arrays.copyOf(x2, c2 * 2);
                    y2 = Arrays.copyOf(y2, c2 * 2);
                }
                x2[c2] = filterResults[i].getX();
                y2[c2++] = filterResults[i].getY();
            } else {
                if (x.length == c) {
                    x = Arrays.copyOf(x, c * 2);
                    y = Arrays.copyOf(y, c * 2);
                }
                x[c] = filterResults[i].getX();
                y[c++] = filterResults[i].getY();
            }
        }
        //fp += c;
        if (c != 0)
            SpotFinderPreview.addRoi(frame, o, x, y, c, Color.red);
        if (c2 != 0)
            SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.magenta);
    }
    // Do TN (all remaining peaks that have not been matched)
    if (showFN) {
        final boolean checkBorder = (BenchmarkSpotFilter.lastAnalysisBorder != null && BenchmarkSpotFilter.lastAnalysisBorder.x != 0);
        final float border, xlimit, ylimit;
        if (checkBorder) {
            final Rectangle lastAnalysisBorder = BenchmarkSpotFilter.lastAnalysisBorder;
            border = lastAnalysisBorder.x;
            xlimit = lastAnalysisBorder.x + lastAnalysisBorder.width;
            ylimit = lastAnalysisBorder.y + lastAnalysisBorder.height;
        } else
            border = xlimit = ylimit = 0;
        // Add the results to the lists
        actualCoordinates.forEachEntry(new CustomTIntObjectProcedure(x, y, x2, y2) {

            public boolean execute(int frame, IdPeakResult[] results) {
                int c = 0, c2 = 0;
                if (x.length <= results.length) {
                    x = new float[results.length];
                    y = new float[results.length];
                }
                if (x2.length <= results.length) {
                    x2 = new float[results.length];
                    y2 = new float[results.length];
                }
                for (int i = 0; i < results.length; i++) {
                    // Ignore those that were matched by TP
                    if (actual.contains(results[i].uniqueId))
                        continue;
                    if (checkBorder && outsideBorder(results[i], border, xlimit, ylimit)) {
                        x2[c2] = results[i].getXPosition();
                        y2[c2++] = results[i].getYPosition();
                    } else {
                        x[c] = results[i].getXPosition();
                        y[c++] = results[i].getYPosition();
                    }
                }
                //fn += c;
                if (c != 0)
                    SpotFinderPreview.addRoi(frame, o, x, y, c, Color.yellow);
                if (c2 != 0)
                    SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.orange);
                return true;
            }
        });
    }
    //System.out.printf("TP=%d, FP=%d, FN=%d, N=%d (%d)\n", tp, fp, fn, tp + fn, results.size());
    imp.setOverlay(o);
    return filterResults;
}
Also used : BasePreprocessedPeakResult(gdsc.smlm.results.filter.BasePreprocessedPeakResult) Rectangle(java.awt.Rectangle) ImagePlus(ij.ImagePlus) TIntHashSet(gnu.trove.set.hash.TIntHashSet) FractionalAssignment(gdsc.core.match.FractionalAssignment) PeakFractionalAssignment(gdsc.smlm.results.filter.PeakFractionalAssignment) BasePreprocessedPeakResult(gdsc.smlm.results.filter.BasePreprocessedPeakResult) PreprocessedPeakResult(gdsc.smlm.results.filter.PreprocessedPeakResult) MultiPathFilter(gdsc.smlm.results.filter.MultiPathFilter) Overlay(ij.gui.Overlay)

Aggregations

TIntHashSet (gnu.trove.set.hash.TIntHashSet)49 ArrayList (java.util.ArrayList)16 TIntSet (gnu.trove.set.TIntSet)8 TIntArrayList (gnu.trove.list.array.TIntArrayList)7 TIntProcedure (gnu.trove.procedure.TIntProcedure)7 IJ (ij.IJ)6 PointPair (uk.ac.sussex.gdsc.core.match.PointPair)6 MemoryPeakResults (uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults)6 PeakResultPoint (uk.ac.sussex.gdsc.smlm.results.PeakResultPoint)6 PlugIn (ij.plugin.PlugIn)5 TextWindow (ij.text.TextWindow)5 List (java.util.List)5 AtomicReference (java.util.concurrent.atomic.AtomicReference)5 ImageJUtils (uk.ac.sussex.gdsc.core.ij.ImageJUtils)5 ExtendedGenericDialog (uk.ac.sussex.gdsc.core.ij.gui.ExtendedGenericDialog)5 TextUtils (uk.ac.sussex.gdsc.core.utils.TextUtils)5 TIntIterator (gnu.trove.iterator.TIntIterator)4 TIntObjectHashMap (gnu.trove.map.hash.TIntObjectHashMap)4 Prefs (ij.Prefs)4 Point (java.awt.Point)4