Search in sources :

Example 1 with PreprocessedPeakResult

use of gdsc.smlm.results.filter.PreprocessedPeakResult in project GDSC-SMLM by aherbert.

the class FitConfiguration method validatePeak.

/**
	 * Check peak to see if the fit was sensible
	 * 
	 * @param n
	 *            The peak number
	 * @param initialParams
	 *            The initial peak parameters
	 * @param params
	 *            The fitted peak parameters
	 * @return True if the fit fails the criteria
	 */
public FitStatus validatePeak(int n, double[] initialParams, double[] params) {
    if (isDirectFilter()) {
        // Always specify a new result and we have no local background or offset
        PreprocessedPeakResult peak = createPreprocessedPeakResult(0, n, initialParams, params, 0, ResultType.NEW, 0, 0, false);
        if (directFilter.accept(peak))
            return setValidationResult(FitStatus.OK, null);
        if (log != null) {
            log.info("Bad peak %d: %s", peak.getId(), DirectFilter.getStatusMessage(peak, directFilter.getResult()));
        }
        if (DirectFilter.anySet(directFilter.getResult(), V_X_SD_FACTOR | V_Y_SD_FACTOR)) {
            return setValidationResult(FitStatus.WIDTH_DIVERGED, null);
        }
        // At the moment we do not get any other validation data
        return setValidationResult(FitStatus.FAILED_SMART_FILTER, null);
    }
    // additional peaks will be neighbours. In the future we may want to control this better.
    if (isRegionValidation()) {
        final int offset = n * 6;
        final double x = params[Gaussian2DFunction.X_POSITION + offset] + coordinateOffset;
        final double y = params[Gaussian2DFunction.Y_POSITION + offset] + coordinateOffset;
        if (x <= 0 || x >= fitRegionWidth || y <= 0 || y >= fitRegionHeight) {
            if (log != null) {
                log.info("Bad peak %d: Coordinates outside fit region (x=%g,y=%g) <> %d,%d", n, x, y, fitRegionWidth, fitRegionHeight);
            }
            return setValidationResult(FitStatus.OUTSIDE_FIT_REGION, new double[] { x, y, fitRegionWidth, fitRegionHeight });
        }
    }
    if (isDisableSimpleFilter())
        return setValidationResult(FitStatus.OK, null);
    final int offset = n * 6;
    // Check spot movement
    final double xShift = params[Gaussian2DFunction.X_POSITION + offset] - initialParams[Gaussian2DFunction.X_POSITION + offset];
    final double yShift = params[Gaussian2DFunction.Y_POSITION + offset] - initialParams[Gaussian2DFunction.Y_POSITION + offset];
    final double maxShift = coordinateShift;
    if (Math.abs(xShift) > maxShift || Math.abs(yShift) > maxShift) {
        if (log != null) {
            log.info("Bad peak %d: Fitted coordinates moved (x=%g,y=%g) > %g", n, xShift, yShift, maxShift);
        }
        return setValidationResult(FitStatus.COORDINATES_MOVED, new double[] { xShift, yShift });
    }
    // Check signal threshold
    final double signal = params[Gaussian2DFunction.SIGNAL + offset];
    // Compare the signal to the desired signal strength
    if (signal < signalThreshold) {
        if (log != null) {
            log.info("Bad peak %d: Insufficient signal %g (SNR=%g)\n", n, signal / ((gain > 0) ? gain : 1), signal / noise);
        }
        //	System.out.printf("Bad peak %d: Insufficient signal (%gx)\n", n, signal / noise);
        return setValidationResult(FitStatus.INSUFFICIENT_SIGNAL, signal);
    }
    // Check widths
    if (isWidth0Fitting()) {
        boolean badWidth = false;
        double xFactor = 0, yFactor = 0;
        xFactor = params[Gaussian2DFunction.X_SD + offset] / initialParams[Gaussian2DFunction.X_SD + offset];
        badWidth = (xFactor > widthFactor || xFactor < minWidthFactor);
        // Always do this (even if badWidth=true) since we need the factor for the return value
        if (isWidth1Fitting()) {
            yFactor = params[Gaussian2DFunction.Y_SD + offset] / initialParams[Gaussian2DFunction.Y_SD + offset];
            badWidth = (yFactor > widthFactor || yFactor < minWidthFactor);
        } else {
            yFactor = xFactor;
        }
        if (badWidth) {
            if (log != null) {
                log.info("Bad peak %d: Fitted width diverged (x=%gx,y=%gx)\n", n, xFactor, yFactor);
            }
            return setValidationResult(FitStatus.WIDTH_DIVERGED, new double[] { xFactor, yFactor });
        }
    }
    // Check precision
    if (precisionThreshold > 0 && nmPerPixel > 0 && gain > 0) {
        final double sd = (params[Gaussian2DFunction.X_SD + offset] + params[Gaussian2DFunction.Y_SD + offset]) * 0.5;
        final double variance = getVariance(params[Gaussian2DFunction.BACKGROUND], signal, sd, this.precisionUsingBackground);
        if (variance > precisionThreshold) {
            final double precision = Math.sqrt(variance);
            if (log != null) {
                log.info("Bad peak %d: Insufficient precision (%gx)\n", n, precision);
            }
            return setValidationResult(FitStatus.INSUFFICIENT_PRECISION, precision);
        }
    }
    return setValidationResult(FitStatus.OK, null);
}
Also used : PreprocessedPeakResult(gdsc.smlm.results.filter.PreprocessedPeakResult) BasePreprocessedPeakResult(gdsc.smlm.results.filter.BasePreprocessedPeakResult)

Example 2 with PreprocessedPeakResult

use of gdsc.smlm.results.filter.PreprocessedPeakResult in project GDSC-SMLM by aherbert.

the class BenchmarkFilterAnalysis method createResults.

/**
	 * Create peak results.
	 *
	 * @param filterResults
	 *            The results from running the filter (or null)
	 * @param filter
	 *            the filter
	 */
private MemoryPeakResults createResults(PreprocessedPeakResult[] filterResults, DirectFilter filter, boolean withBorder) {
    if (filterResults == null) {
        final MultiPathFilter multiPathFilter = createMPF(filter, minimalFilter);
        //multiPathFilter.setDebugFile("/tmp/filter.txt");
        filterResults = filterResults(multiPathFilter);
    }
    MemoryPeakResults results = new MemoryPeakResults();
    results.copySettings(this.results);
    results.setName(TITLE);
    if (withBorder) {
        // To produce the same results as the PeakFit plugin we must implement the border
        // functionality used in the FitWorker. This respects the border of the spot filter.
        FitEngineConfiguration config = new FitEngineConfiguration(new FitConfiguration());
        updateAllConfiguration(config);
        MaximaSpotFilter spotFilter = config.createSpotFilter(true);
        final int border = spotFilter.getBorder();
        int[] bounds = getBounds();
        final int borderLimitX = bounds[0] - border;
        final int borderLimitY = bounds[1] - border;
        for (PreprocessedPeakResult spot : filterResults) {
            if (spot.getX() > border && spot.getX() < borderLimitX && spot.getY() > border && spot.getY() < borderLimitY) {
                double[] p = spot.toGaussian2DParameters();
                float[] params = new float[p.length];
                for (int j = 0; j < p.length; j++) params[j] = (float) p[j];
                int frame = spot.getFrame();
                int origX = (int) p[Gaussian2DFunction.X_POSITION];
                int origY = (int) p[Gaussian2DFunction.Y_POSITION];
                results.addf(frame, origX, origY, 0, 0, spot.getNoise(), params, null);
            }
        }
    } else {
        for (PreprocessedPeakResult spot : filterResults) {
            double[] p = spot.toGaussian2DParameters();
            float[] params = new float[p.length];
            for (int j = 0; j < p.length; j++) params[j] = (float) p[j];
            int frame = spot.getFrame();
            int origX = (int) p[Gaussian2DFunction.X_POSITION];
            int origY = (int) p[Gaussian2DFunction.Y_POSITION];
            results.addf(frame, origX, origY, 0, 0, spot.getNoise(), params, null);
        }
    }
    return results;
}
Also used : FitEngineConfiguration(gdsc.smlm.engine.FitEngineConfiguration) FitConfiguration(gdsc.smlm.fitting.FitConfiguration) MaximaSpotFilter(gdsc.smlm.filters.MaximaSpotFilter) MultiPathFilter(gdsc.smlm.results.filter.MultiPathFilter) BasePreprocessedPeakResult(gdsc.smlm.results.filter.BasePreprocessedPeakResult) PreprocessedPeakResult(gdsc.smlm.results.filter.PreprocessedPeakResult) MemoryPeakResults(gdsc.smlm.results.MemoryPeakResults)

Example 3 with PreprocessedPeakResult

use of gdsc.smlm.results.filter.PreprocessedPeakResult in project GDSC-SMLM by aherbert.

the class FitWorker method add.

/*
	 * (non-Javadoc)
	 * 
	 * @see gdsc.smlm.results.filter.MultiPathFilter.SelectedResultStore#add(gdsc.smlm.results.filter.MultiPathFilter.
	 * SelectedResult)
	 */
public void add(SelectedResult selectedResult) {
    // TODO - Print the current state of the dynamicMultiPathFitResult to file.
    // This will allow debugging what is different between the benchmark fit and the PeakFit.
    // Output:
    // slice
    // candidate Id
    // Initial and final params for each fit result.
    // Details of the selected result.
    // Then try to figure out why the benchmark fit deviates from PeakFit.
    // Add to the slice results.
    final PreprocessedPeakResult[] results = selectedResult.results;
    if (results == null)
        return;
    final int currrentSize = sliceResults.size();
    final int candidateId = dynamicMultiPathFitResult.candidateId;
    final FitResult fitResult = (FitResult) selectedResult.fitResult.data;
    // The background for each result was the local background. We want the fitted global background
    final float background = (float) fitResult.getParameters()[0];
    final double[] dev = fitResult.getParameterStdDev();
    if (queueSize != 0)
        throw new RuntimeException("There are results queued already!");
    for (int i = 0; i < results.length; i++) {
        if (results[i].isExistingResult())
            continue;
        if (results[i].isNewResult()) {
            final double[] p = results[i].toGaussian2DParameters();
            // Store slice results relative to the data frame (not the global bounds)
            // Convert back so that 0,0 is the top left of the data bounds
            p[Gaussian2DFunction.X_POSITION] -= cc.dataBounds.x;
            p[Gaussian2DFunction.Y_POSITION] -= cc.dataBounds.y;
            final float[] params = new float[7];
            params[Gaussian2DFunction.BACKGROUND] = background;
            for (int j = 1; j < 7; j++) params[j] = (float) p[j];
            final float[] paramsDev;
            if (dev == null) {
                paramsDev = null;
            } else {
                paramsDev = new float[7];
                paramsDev[Gaussian2DFunction.BACKGROUND] = (float) dev[Gaussian2DFunction.BACKGROUND];
                final int offset = results[i].getId() * 6;
                for (int j = 1; j < 7; j++) paramsDev[j] = (float) dev[offset + j];
            }
            addSingleResult(results[i].getCandidateId(), params, paramsDev, fitResult.getError(), results[i].getNoise());
            if (logger != null) {
                // Show the shift, signal and width spread
                PreprocessedPeakResult peak = results[i];
                logger.info("Fit OK %d (%.1f,%.1f) [%d]: Shift = %.3f,%.3f : SNR = %.2f : Width = %.2f,%.2f", peak.getCandidateId(), peak.getX(), peak.getY(), peak.getId(), Math.sqrt(peak.getXRelativeShift2()), Math.sqrt(peak.getYRelativeShift2()), peak.getSNR(), peak.getXSDFactor(), peak.getYSDFactor());
            }
        } else {
        // This is a candidate that passed validation. Store the estimate as passing the primary filter.
        // We now do this is the pass() method.
        //storeEstimate(results[i].getCandidateId(), results[i], FILTER_RANK_PRIMARY);
        }
    }
    job.setFitResult(candidateId, fitResult);
    // Reporting
    if (this.counter != null) {
        FitType fitType = dynamicMultiPathFitResult.fitType;
        if (selectedResult.fitResult.getStatus() == 0) {
            fitType.setOK(true);
            if (dynamicMultiPathFitResult.getSuperMultiFitResult() == selectedResult.fitResult)
                fitType.setMultiOK(true);
            else if (dynamicMultiPathFitResult.getSuperMultiDoubletFitResult() == selectedResult.fitResult)
                fitType.setMultiDoubletOK(true);
            else if (dynamicMultiPathFitResult.getSuperDoubletFitResult() == selectedResult.fitResult)
                fitType.setDoubletOK(true);
        }
        add(fitType);
    }
    if (logger != null) {
        switch(fitResult.getStatus()) {
            case OK:
                // We log good results in the loop above. 
                break;
            case BAD_PARAMETERS:
            case FAILED_TO_ESTIMATE_WIDTH:
                logger.info("Bad parameters: %s", Arrays.toString(fitResult.getInitialParameters()));
                break;
            default:
                logger.info(fitResult.getStatus().toString());
                break;
        }
    }
    // Debugging
    if (logger2 != null) {
        double[] peakParams = fitResult.getParameters();
        if (peakParams != null) {
            // Parameters are the raw values from fitting the region. Convert for logging.
            peakParams = Arrays.copyOf(peakParams, peakParams.length);
            int npeaks = peakParams.length / 6;
            for (int i = 0; i < npeaks; i++) {
                peakParams[i * 6 + Gaussian2DFunction.X_POSITION] += cc.fromFitRegionToGlobalX();
                peakParams[i * 6 + Gaussian2DFunction.Y_POSITION] += cc.fromFitRegionToGlobalY();
                peakParams[i * 6 + Gaussian2DFunction.SHAPE] *= 180.0 / Math.PI;
            }
        }
        final int x = candidates.get(candidateId).x;
        final int y = candidates.get(candidateId).y;
        logger2.debug("%d:%d [%d,%d] %s (%s) = %s\n", slice, candidateId, cc.fromDataToGlobalX(x), cc.fromDataToGlobalY(y), fitResult.getStatus(), fitResult.getStatusData(), Arrays.toString(peakParams));
    }
    // Check if there were any new results
    int npeaks = sliceResults.size() - currrentSize;
    if (npeaks != 0) {
        success++;
        // Support for post-processing filter 
        if (resultFilter != null) {
            // Check all result peaks for the distance to the filter positions
            PeakResult[] peakResults = new PeakResult[npeaks];
            for (int i = sliceResults.size(); npeaks-- > 0; ) {
                peakResults[npeaks] = sliceResults.get(--i);
            }
            resultFilter.filter(fitResult, candidateId, peakResults);
        }
    }
}
Also used : PreprocessedPeakResult(gdsc.smlm.results.filter.PreprocessedPeakResult) FitResult(gdsc.smlm.fitting.FitResult) MultiPathFitResult(gdsc.smlm.results.filter.MultiPathFitResult) PreprocessedPeakResult(gdsc.smlm.results.filter.PreprocessedPeakResult) PeakResult(gdsc.smlm.results.PeakResult) IdPeakResult(gdsc.smlm.results.IdPeakResult) ExtendedPeakResult(gdsc.smlm.results.ExtendedPeakResult)

Example 4 with PreprocessedPeakResult

use of gdsc.smlm.results.filter.PreprocessedPeakResult in project GDSC-SMLM by aherbert.

the class BenchmarkSpotFit method summariseResults.

private void summariseResults(TIntObjectHashMap<FilterCandidates> filterCandidates, long runTime, final PreprocessedPeakResult[] preprocessedPeakResults, int nUniqueIDs) {
    createTable();
    // Summarise the fitting results. N fits, N failures. 
    // Optimal match statistics if filtering is perfect (since fitting is not perfect).
    StoredDataStatistics distanceStats = new StoredDataStatistics();
    StoredDataStatistics depthStats = new StoredDataStatistics();
    // Get stats for all fitted results and those that match 
    // Signal, SNR, Width, xShift, yShift, Precision
    createFilterCriteria();
    StoredDataStatistics[][] stats = new StoredDataStatistics[3][filterCriteria.length];
    for (int i = 0; i < stats.length; i++) for (int j = 0; j < stats[i].length; j++) stats[i][j] = new StoredDataStatistics();
    final double nmPerPixel = simulationParameters.a;
    double tp = 0, fp = 0;
    int failcTP = 0, failcFP = 0;
    int cTP = 0, cFP = 0;
    int[] singleStatus = null, multiStatus = null, doubletStatus = null, multiDoubletStatus = null;
    singleStatus = new int[FitStatus.values().length];
    multiStatus = new int[singleStatus.length];
    doubletStatus = new int[singleStatus.length];
    multiDoubletStatus = new int[singleStatus.length];
    // Easier to materialise the values since we have a lot of non final variables to manipulate
    final int[] frames = new int[filterCandidates.size()];
    final FilterCandidates[] candidates = new FilterCandidates[filterCandidates.size()];
    final int[] counter = new int[1];
    filterCandidates.forEachEntry(new TIntObjectProcedure<FilterCandidates>() {

        public boolean execute(int a, FilterCandidates b) {
            frames[counter[0]] = a;
            candidates[counter[0]] = b;
            counter[0]++;
            return true;
        }
    });
    for (FilterCandidates result : candidates) {
        // Count the number of fit results that matched (tp) and did not match (fp)
        tp += result.tp;
        fp += result.fp;
        for (int i = 0; i < result.fitResult.length; i++) {
            if (result.spots[i].match)
                cTP++;
            else
                cFP++;
            final MultiPathFitResult fitResult = result.fitResult[i];
            if (singleStatus != null && result.spots[i].match) {
                // Debugging reasons for fit failure
                addStatus(singleStatus, fitResult.getSingleFitResult());
                addStatus(multiStatus, fitResult.getMultiFitResult());
                addStatus(doubletStatus, fitResult.getDoubletFitResult());
                addStatus(multiDoubletStatus, fitResult.getMultiDoubletFitResult());
            }
            if (noMatch(fitResult)) {
                if (result.spots[i].match)
                    failcTP++;
                else
                    failcFP++;
            }
            // We have multi-path results.
            // We want statistics for:
            // [0] all fitted spots
            // [1] fitted spots that match a result
            // [2] fitted spots that do not match a result
            addToStats(fitResult.getSingleFitResult(), stats);
            addToStats(fitResult.getMultiFitResult(), stats);
            addToStats(fitResult.getDoubletFitResult(), stats);
            addToStats(fitResult.getMultiDoubletFitResult(), stats);
        }
        // Statistics on spots that fit an actual result
        for (int i = 0; i < result.match.length; i++) {
            if (!result.match[i].isFitResult())
                // For now just ignore the candidates that matched
                continue;
            FitMatch fitMatch = (FitMatch) result.match[i];
            distanceStats.add(fitMatch.d * nmPerPixel);
            depthStats.add(fitMatch.z * nmPerPixel);
        }
    }
    // Store data for computing correlation
    double[] i1 = new double[depthStats.getN()];
    double[] i2 = new double[i1.length];
    double[] is = new double[i1.length];
    int ci = 0;
    for (FilterCandidates result : candidates) {
        for (int i = 0; i < result.match.length; i++) {
            if (!result.match[i].isFitResult())
                // For now just ignore the candidates that matched
                continue;
            FitMatch fitMatch = (FitMatch) result.match[i];
            ScoredSpot spot = result.spots[fitMatch.i];
            i1[ci] = fitMatch.predictedSignal;
            i2[ci] = fitMatch.actualSignal;
            is[ci] = spot.spot.intensity;
            ci++;
        }
    }
    // We want to compute the Jaccard against the spot metric
    // Filter the results using the multi-path filter
    ArrayList<MultiPathFitResults> multiPathResults = new ArrayList<MultiPathFitResults>(filterCandidates.size());
    for (int i = 0; i < frames.length; i++) {
        int frame = frames[i];
        MultiPathFitResult[] multiPathFitResults = candidates[i].fitResult;
        int totalCandidates = candidates[i].spots.length;
        int nActual = actualCoordinates.get(frame).size();
        multiPathResults.add(new MultiPathFitResults(frame, multiPathFitResults, totalCandidates, nActual));
    }
    // Score the results and count the number returned
    List<FractionalAssignment[]> assignments = new ArrayList<FractionalAssignment[]>();
    final TIntHashSet set = new TIntHashSet(nUniqueIDs);
    FractionScoreStore scoreStore = new FractionScoreStore() {

        public void add(int uniqueId) {
            set.add(uniqueId);
        }
    };
    MultiPathFitResults[] multiResults = multiPathResults.toArray(new MultiPathFitResults[multiPathResults.size()]);
    // Filter with no filter
    MultiPathFilter mpf = new MultiPathFilter(new SignalFilter(0), null, multiFilter.residualsThreshold);
    FractionClassificationResult fractionResult = mpf.fractionScoreSubset(multiResults, Integer.MAX_VALUE, this.results.size(), assignments, scoreStore, CoordinateStoreFactory.create(imp.getWidth(), imp.getHeight(), fitConfig.getDuplicateDistance()));
    double nPredicted = fractionResult.getTP() + fractionResult.getFP();
    final double[][] matchScores = new double[set.size()][];
    int count = 0;
    for (int i = 0; i < assignments.size(); i++) {
        FractionalAssignment[] a = assignments.get(i);
        if (a == null)
            continue;
        for (int j = 0; j < a.length; j++) {
            final PreprocessedPeakResult r = ((PeakFractionalAssignment) a[j]).peakResult;
            set.remove(r.getUniqueId());
            final double precision = Math.sqrt(r.getLocationVariance());
            final double signal = r.getSignal();
            final double snr = r.getSNR();
            final double width = r.getXSDFactor();
            final double xShift = r.getXRelativeShift2();
            final double yShift = r.getYRelativeShift2();
            // Since these two are combined for filtering and the max is what matters.
            final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
            final double eshift = Math.sqrt(xShift + yShift);
            final double[] score = new double[8];
            score[FILTER_SIGNAL] = signal;
            score[FILTER_SNR] = snr;
            score[FILTER_MIN_WIDTH] = width;
            score[FILTER_MAX_WIDTH] = width;
            score[FILTER_SHIFT] = shift;
            score[FILTER_ESHIFT] = eshift;
            score[FILTER_PRECISION] = precision;
            score[FILTER_PRECISION + 1] = a[j].getScore();
            matchScores[count++] = score;
        }
    }
    // Add the rest
    set.forEach(new CustomTIntProcedure(count) {

        public boolean execute(int uniqueId) {
            // This should not be null or something has gone wrong
            PreprocessedPeakResult r = preprocessedPeakResults[uniqueId];
            if (r == null)
                throw new RuntimeException("Missing result: " + uniqueId);
            final double precision = Math.sqrt(r.getLocationVariance());
            final double signal = r.getSignal();
            final double snr = r.getSNR();
            final double width = r.getXSDFactor();
            final double xShift = r.getXRelativeShift2();
            final double yShift = r.getYRelativeShift2();
            // Since these two are combined for filtering and the max is what matters.
            final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
            final double eshift = Math.sqrt(xShift + yShift);
            final double[] score = new double[8];
            score[FILTER_SIGNAL] = signal;
            score[FILTER_SNR] = snr;
            score[FILTER_MIN_WIDTH] = width;
            score[FILTER_MAX_WIDTH] = width;
            score[FILTER_SHIFT] = shift;
            score[FILTER_ESHIFT] = eshift;
            score[FILTER_PRECISION] = precision;
            matchScores[c++] = score;
            return true;
        }
    });
    // Debug the reasons the fit failed
    if (singleStatus != null) {
        String name = PeakFit.getSolverName(fitConfig);
        if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera())
            name += " Camera";
        System.out.println("Failure counts: " + name);
        printFailures("Single", singleStatus);
        printFailures("Multi", multiStatus);
        printFailures("Doublet", doubletStatus);
        printFailures("Multi doublet", multiDoubletStatus);
    }
    StringBuilder sb = new StringBuilder(300);
    // Add information about the simulation
    //(simulationParameters.minSignal + simulationParameters.maxSignal) * 0.5;
    final double signal = simulationParameters.signalPerFrame;
    final int n = results.size();
    sb.append(imp.getStackSize()).append("\t");
    final int w = imp.getWidth();
    final int h = imp.getHeight();
    sb.append(w).append("\t");
    sb.append(h).append("\t");
    sb.append(n).append("\t");
    double density = ((double) n / imp.getStackSize()) / (w * h) / (simulationParameters.a * simulationParameters.a / 1e6);
    sb.append(Utils.rounded(density)).append("\t");
    sb.append(Utils.rounded(signal)).append("\t");
    sb.append(Utils.rounded(simulationParameters.s)).append("\t");
    sb.append(Utils.rounded(simulationParameters.a)).append("\t");
    sb.append(Utils.rounded(simulationParameters.depth)).append("\t");
    sb.append(simulationParameters.fixedDepth).append("\t");
    sb.append(Utils.rounded(simulationParameters.gain)).append("\t");
    sb.append(Utils.rounded(simulationParameters.readNoise)).append("\t");
    sb.append(Utils.rounded(simulationParameters.b)).append("\t");
    sb.append(Utils.rounded(simulationParameters.b2)).append("\t");
    // Compute the noise
    double noise = simulationParameters.b2;
    if (simulationParameters.emCCD) {
        // The b2 parameter was computed without application of the EM-CCD noise factor of 2.
        //final double b2 = backgroundVariance + readVariance
        //                = simulationParameters.b + readVariance
        // This should be applied only to the background variance.
        final double readVariance = noise - simulationParameters.b;
        noise = simulationParameters.b * 2 + readVariance;
    }
    if (simulationParameters.fullSimulation) {
    // The total signal is spread over frames
    }
    sb.append(Utils.rounded(signal / Math.sqrt(noise))).append("\t");
    sb.append(Utils.rounded(simulationParameters.s / simulationParameters.a)).append("\t");
    sb.append(spotFilter.getDescription());
    // nP and nN is the fractional score of the spot candidates 
    addCount(sb, nP + nN);
    addCount(sb, nP);
    addCount(sb, nN);
    addCount(sb, fP);
    addCount(sb, fN);
    String name = PeakFit.getSolverName(fitConfig);
    if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera())
        name += " Camera";
    add(sb, name);
    add(sb, config.getFitting());
    resultPrefix = sb.toString();
    // Q. Should I add other fit configuration here?
    // The fraction of positive and negative candidates that were included
    add(sb, (100.0 * cTP) / nP);
    add(sb, (100.0 * cFP) / nN);
    // Score the fitting results compared to the original simulation.
    // Score the candidate selection:
    add(sb, cTP + cFP);
    add(sb, cTP);
    add(sb, cFP);
    // TP are all candidates that can be matched to a spot
    // FP are all candidates that cannot be matched to a spot
    // FN = The number of missed spots
    FractionClassificationResult m = new FractionClassificationResult(cTP, cFP, 0, simulationParameters.molecules - cTP);
    add(sb, m.getRecall());
    add(sb, m.getPrecision());
    add(sb, m.getF1Score());
    add(sb, m.getJaccard());
    // Score the fitting results:
    add(sb, failcTP);
    add(sb, failcFP);
    // TP are all fit results that can be matched to a spot
    // FP are all fit results that cannot be matched to a spot
    // FN = The number of missed spots
    add(sb, tp);
    add(sb, fp);
    m = new FractionClassificationResult(tp, fp, 0, simulationParameters.molecules - tp);
    add(sb, m.getRecall());
    add(sb, m.getPrecision());
    add(sb, m.getF1Score());
    add(sb, m.getJaccard());
    // Do it again but pretend we can perfectly filter all the false positives
    //add(sb, tp);
    m = new FractionClassificationResult(tp, 0, 0, simulationParameters.molecules - tp);
    // Recall is unchanged
    // Precision will be 100%
    add(sb, m.getF1Score());
    add(sb, m.getJaccard());
    // The mean may be subject to extreme outliers so use the median
    double median = distanceStats.getMedian();
    add(sb, median);
    WindowOrganiser wo = new WindowOrganiser();
    String label = String.format("Recall = %s. n = %d. Median = %s nm. SD = %s nm", Utils.rounded(m.getRecall()), distanceStats.getN(), Utils.rounded(median), Utils.rounded(distanceStats.getStandardDeviation()));
    int id = Utils.showHistogram(TITLE, distanceStats, "Match Distance (nm)", 0, 0, 0, label);
    if (Utils.isNewWindow())
        wo.add(id);
    median = depthStats.getMedian();
    add(sb, median);
    // Sort by spot intensity and produce correlation
    int[] indices = Utils.newArray(i1.length, 0, 1);
    if (showCorrelation)
        Sort.sort(indices, is, rankByIntensity);
    double[] r = (showCorrelation) ? new double[i1.length] : null;
    double[] sr = (showCorrelation) ? new double[i1.length] : null;
    double[] rank = (showCorrelation) ? new double[i1.length] : null;
    ci = 0;
    FastCorrelator fastCorrelator = new FastCorrelator();
    ArrayList<Ranking> pc1 = new ArrayList<Ranking>();
    ArrayList<Ranking> pc2 = new ArrayList<Ranking>();
    for (int ci2 : indices) {
        fastCorrelator.add((long) Math.round(i1[ci2]), (long) Math.round(i2[ci2]));
        pc1.add(new Ranking(i1[ci2], ci));
        pc2.add(new Ranking(i2[ci2], ci));
        if (showCorrelation) {
            r[ci] = fastCorrelator.getCorrelation();
            sr[ci] = Correlator.correlation(rank(pc1), rank(pc2));
            if (rankByIntensity)
                rank[ci] = is[0] - is[ci];
            else
                rank[ci] = ci;
        }
        ci++;
    }
    final double pearsonCorr = fastCorrelator.getCorrelation();
    final double rankedCorr = Correlator.correlation(rank(pc1), rank(pc2));
    // Get the regression
    SimpleRegression regression = new SimpleRegression(false);
    for (int i = 0; i < pc1.size(); i++) regression.addData(pc1.get(i).value, pc2.get(i).value);
    //final double intercept = regression.getIntercept();
    final double slope = regression.getSlope();
    if (showCorrelation) {
        String title = TITLE + " Intensity";
        Plot plot = new Plot(title, "Candidate", "Spot");
        double[] limits1 = Maths.limits(i1);
        double[] limits2 = Maths.limits(i2);
        plot.setLimits(limits1[0], limits1[1], limits2[0], limits2[1]);
        label = String.format("Correlation=%s; Ranked=%s; Slope=%s", Utils.rounded(pearsonCorr), Utils.rounded(rankedCorr), Utils.rounded(slope));
        plot.addLabel(0, 0, label);
        plot.setColor(Color.red);
        plot.addPoints(i1, i2, Plot.DOT);
        if (slope > 1)
            plot.drawLine(limits1[0], limits1[0] * slope, limits1[1], limits1[1] * slope);
        else
            plot.drawLine(limits2[0] / slope, limits2[0], limits2[1] / slope, limits2[1]);
        PlotWindow pw = Utils.display(title, plot);
        if (Utils.isNewWindow())
            wo.add(pw);
        title = TITLE + " Correlation";
        plot = new Plot(title, "Spot Rank", "Correlation");
        double[] xlimits = Maths.limits(rank);
        double[] ylimits = Maths.limits(r);
        ylimits = Maths.limits(ylimits, sr);
        plot.setLimits(xlimits[0], xlimits[1], ylimits[0], ylimits[1]);
        plot.setColor(Color.red);
        plot.addPoints(rank, r, Plot.LINE);
        plot.setColor(Color.blue);
        plot.addPoints(rank, sr, Plot.LINE);
        plot.setColor(Color.black);
        plot.addLabel(0, 0, label);
        pw = Utils.display(title, plot);
        if (Utils.isNewWindow())
            wo.add(pw);
    }
    add(sb, pearsonCorr);
    add(sb, rankedCorr);
    add(sb, slope);
    label = String.format("n = %d. Median = %s nm", depthStats.getN(), Utils.rounded(median));
    id = Utils.showHistogram(TITLE, depthStats, "Match Depth (nm)", 0, 1, 0, label);
    if (Utils.isNewWindow())
        wo.add(id);
    // Plot histograms of the stats on the same window
    double[] lower = new double[filterCriteria.length];
    double[] upper = new double[lower.length];
    min = new double[lower.length];
    max = new double[lower.length];
    for (int i = 0; i < stats[0].length; i++) {
        double[] limits = showDoubleHistogram(stats, i, wo, matchScores, nPredicted);
        lower[i] = limits[0];
        upper[i] = limits[1];
        min[i] = limits[2];
        max[i] = limits[3];
    }
    // Reconfigure some of the range limits
    // Make this a bit bigger
    upper[FILTER_SIGNAL] *= 2;
    // Make this a bit bigger
    upper[FILTER_SNR] *= 2;
    double factor = 0.25;
    if (lower[FILTER_MIN_WIDTH] != 0)
        // (assuming lower is less than 1)
        upper[FILTER_MIN_WIDTH] = 1 - Math.max(0, factor * (1 - lower[FILTER_MIN_WIDTH]));
    if (upper[FILTER_MIN_WIDTH] != 0)
        // (assuming upper is more than 1)
        lower[FILTER_MAX_WIDTH] = 1 + Math.max(0, factor * (upper[FILTER_MAX_WIDTH] - 1));
    // Round the ranges
    final double[] interval = new double[stats[0].length];
    interval[FILTER_SIGNAL] = SignalFilter.DEFAULT_INCREMENT;
    interval[FILTER_SNR] = SNRFilter.DEFAULT_INCREMENT;
    interval[FILTER_MIN_WIDTH] = WidthFilter2.DEFAULT_MIN_INCREMENT;
    interval[FILTER_MAX_WIDTH] = WidthFilter.DEFAULT_INCREMENT;
    interval[FILTER_SHIFT] = ShiftFilter.DEFAULT_INCREMENT;
    interval[FILTER_ESHIFT] = EShiftFilter.DEFAULT_INCREMENT;
    interval[FILTER_PRECISION] = PrecisionFilter.DEFAULT_INCREMENT;
    interval[FILTER_ITERATIONS] = 0.1;
    interval[FILTER_EVALUATIONS] = 0.1;
    // Create a range increment
    double[] increment = new double[lower.length];
    for (int i = 0; i < increment.length; i++) {
        lower[i] = Maths.floor(lower[i], interval[i]);
        upper[i] = Maths.ceil(upper[i], interval[i]);
        double range = upper[i] - lower[i];
        // Allow clipping if the range is small compared to the min increment
        double multiples = range / interval[i];
        // Use 8 multiples for the equivalent of +/- 4 steps around the centre
        if (multiples < 8) {
            multiples = Math.ceil(multiples);
        } else
            multiples = 8;
        increment[i] = Maths.ceil(range / multiples, interval[i]);
        if (i == FILTER_MIN_WIDTH)
            // Requires clipping based on the upper limit
            lower[i] = upper[i] - increment[i] * multiples;
        else
            upper[i] = lower[i] + increment[i] * multiples;
    }
    for (int i = 0; i < stats[0].length; i++) {
        lower[i] = Maths.round(lower[i]);
        upper[i] = Maths.round(upper[i]);
        min[i] = Maths.round(min[i]);
        max[i] = Maths.round(max[i]);
        increment[i] = Maths.round(increment[i]);
        sb.append("\t").append(min[i]).append(':').append(lower[i]).append('-').append(upper[i]).append(':').append(max[i]);
    }
    // Disable some filters
    increment[FILTER_SIGNAL] = Double.POSITIVE_INFINITY;
    //increment[FILTER_SHIFT] = Double.POSITIVE_INFINITY;
    increment[FILTER_ESHIFT] = Double.POSITIVE_INFINITY;
    wo.tile();
    sb.append("\t").append(Utils.timeToString(runTime / 1000000.0));
    summaryTable.append(sb.toString());
    if (saveFilterRange) {
        GlobalSettings gs = SettingsManager.loadSettings();
        FilterSettings filterSettings = gs.getFilterSettings();
        String filename = (silent) ? filterSettings.filterSetFilename : Utils.getFilename("Filter_range_file", filterSettings.filterSetFilename);
        if (filename == null)
            return;
        // Remove extension to store the filename
        filename = Utils.replaceExtension(filename, ".xml");
        filterSettings.filterSetFilename = filename;
        // Create a filter set using the ranges
        ArrayList<Filter> filters = new ArrayList<Filter>(3);
        filters.add(new MultiFilter2(lower[0], (float) lower[1], lower[2], lower[3], lower[4], lower[5], lower[6]));
        filters.add(new MultiFilter2(upper[0], (float) upper[1], upper[2], upper[3], upper[4], upper[5], upper[6]));
        filters.add(new MultiFilter2(increment[0], (float) increment[1], increment[2], increment[3], increment[4], increment[5], increment[6]));
        if (saveFilters(filename, filters))
            SettingsManager.saveSettings(gs);
        // Create a filter set using the min/max and the initial bounds.
        // Set sensible limits
        min[FILTER_SIGNAL] = Math.max(min[FILTER_SIGNAL], 30);
        max[FILTER_PRECISION] = Math.min(max[FILTER_PRECISION], 100);
        // Commented this out so that the 4-set filters are the same as the 3-set filters.
        // The difference leads to differences when optimising.
        //			// Use half the initial bounds (hoping this is a good starting guess for the optimum)
        //			final boolean[] limitToLower = new boolean[min.length];
        //			limitToLower[FILTER_SIGNAL] = true;
        //			limitToLower[FILTER_SNR] = true;
        //			limitToLower[FILTER_MIN_WIDTH] = true;
        //			limitToLower[FILTER_MAX_WIDTH] = false;
        //			limitToLower[FILTER_SHIFT] = false;
        //			limitToLower[FILTER_ESHIFT] = false;
        //			limitToLower[FILTER_PRECISION] = true;
        //			for (int i = 0; i < limitToLower.length; i++)
        //			{
        //				final double range = (upper[i] - lower[i]) / 2;
        //				if (limitToLower[i])
        //					upper[i] = lower[i] + range;
        //				else
        //					lower[i] = upper[i] - range;
        //			}
        filters = new ArrayList<Filter>(4);
        filters.add(new MultiFilter2(min[0], (float) min[1], min[2], min[3], min[4], min[5], min[6]));
        filters.add(new MultiFilter2(lower[0], (float) lower[1], lower[2], lower[3], lower[4], lower[5], lower[6]));
        filters.add(new MultiFilter2(upper[0], (float) upper[1], upper[2], upper[3], upper[4], upper[5], upper[6]));
        filters.add(new MultiFilter2(max[0], (float) max[1], max[2], max[3], max[4], max[5], max[6]));
        saveFilters(Utils.replaceExtension(filename, ".4.xml"), filters);
    }
}
Also used : ArrayList(java.util.ArrayList) TIntHashSet(gnu.trove.set.hash.TIntHashSet) MultiPathFitResult(gdsc.smlm.results.filter.MultiPathFitResult) FractionalAssignment(gdsc.core.match.FractionalAssignment) PeakFractionalAssignment(gdsc.smlm.results.filter.PeakFractionalAssignment) ImmutableFractionalAssignment(gdsc.core.match.ImmutableFractionalAssignment) FractionClassificationResult(gdsc.core.match.FractionClassificationResult) BasePreprocessedPeakResult(gdsc.smlm.results.filter.BasePreprocessedPeakResult) PreprocessedPeakResult(gdsc.smlm.results.filter.PreprocessedPeakResult) SignalFilter(gdsc.smlm.results.filter.SignalFilter) FilterSettings(gdsc.smlm.ij.settings.FilterSettings) ScoredSpot(gdsc.smlm.ij.plugins.BenchmarkSpotFilter.ScoredSpot) FastCorrelator(gdsc.core.utils.FastCorrelator) Plot(ij.gui.Plot) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) PlotWindow(ij.gui.PlotWindow) GlobalSettings(gdsc.smlm.ij.settings.GlobalSettings) WindowOrganiser(ij.plugin.WindowOrganiser) PeakResultPoint(gdsc.smlm.ij.plugins.ResultsMatchCalculator.PeakResultPoint) BasePoint(gdsc.core.match.BasePoint) PeakFractionalAssignment(gdsc.smlm.results.filter.PeakFractionalAssignment) FractionScoreStore(gdsc.smlm.results.filter.MultiPathFilter.FractionScoreStore) SimpleRegression(org.apache.commons.math3.stat.regression.SimpleRegression) SignalFilter(gdsc.smlm.results.filter.SignalFilter) DirectFilter(gdsc.smlm.results.filter.DirectFilter) ShiftFilter(gdsc.smlm.results.filter.ShiftFilter) PrecisionFilter(gdsc.smlm.results.filter.PrecisionFilter) Filter(gdsc.smlm.results.filter.Filter) EShiftFilter(gdsc.smlm.results.filter.EShiftFilter) WidthFilter(gdsc.smlm.results.filter.WidthFilter) SNRFilter(gdsc.smlm.results.filter.SNRFilter) MultiPathFilter(gdsc.smlm.results.filter.MultiPathFilter) MaximaSpotFilter(gdsc.smlm.filters.MaximaSpotFilter) MultiFilter2(gdsc.smlm.results.filter.MultiFilter2) MultiPathFitResults(gdsc.smlm.results.filter.MultiPathFitResults) MultiPathFilter(gdsc.smlm.results.filter.MultiPathFilter)

Example 5 with PreprocessedPeakResult

use of gdsc.smlm.results.filter.PreprocessedPeakResult in project GDSC-SMLM by aherbert.

the class BenchmarkSpotFit method run.

private void run() {
    // Extract all the results in memory into a list per frame. This can be cached
    boolean refresh = false;
    if (lastId != simulationParameters.id) {
        // Do not get integer coordinates
        // The Coordinate objects will be PeakResultPoint objects that store the original PeakResult
        // from the MemoryPeakResults
        actualCoordinates = ResultsMatchCalculator.getCoordinates(results.getResults(), false);
        lastId = simulationParameters.id;
        refresh = true;
    }
    // Extract all the candidates into a list per frame. This can be cached if the settings have not changed
    final int width = (config.isIncludeNeighbours()) ? config.getRelativeFitting() : 0;
    final Settings settings = new Settings(BenchmarkSpotFilter.filterResult.id, fractionPositives, fractionNegativesAfterAllPositives, negativesAfterAllPositives, width);
    if (refresh || !settings.equals(lastSettings)) {
        filterCandidates = subsetFilterResults(BenchmarkSpotFilter.filterResult.filterResults, width);
        lastSettings = settings;
        lastFilterId = BenchmarkSpotFilter.filterResult.id;
    }
    stopWatch = StopWatch.createStarted();
    final ImageStack stack = imp.getImageStack();
    clearFitResults();
    // Save results to memory
    MemoryPeakResults peakResults = new MemoryPeakResults();
    peakResults.copySettings(this.results);
    peakResults.setName(TITLE);
    MemoryPeakResults.addResults(peakResults);
    // Create a pool of workers
    final int nThreads = Prefs.getThreads();
    BlockingQueue<Integer> jobs = new ArrayBlockingQueue<Integer>(nThreads * 2);
    List<Worker> workers = new LinkedList<Worker>();
    List<Thread> threads = new LinkedList<Thread>();
    for (int i = 0; i < nThreads; i++) {
        Worker worker = new Worker(jobs, stack, actualCoordinates, filterCandidates, peakResults);
        Thread t = new Thread(worker);
        workers.add(worker);
        threads.add(t);
        t.start();
    }
    // Fit the frames
    long runTime = System.nanoTime();
    totalProgress = stack.getSize();
    stepProgress = Utils.getProgressInterval(totalProgress);
    progress = 0;
    for (int i = 1; i <= totalProgress; i++) {
        put(jobs, i);
    }
    // Finish all the worker threads by passing in a null job
    for (int i = 0; i < threads.size(); i++) {
        put(jobs, -1);
    }
    // Wait for all to finish
    for (int i = 0; i < threads.size(); i++) {
        try {
            threads.get(i).join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    threads.clear();
    IJ.showProgress(1);
    runTime = System.nanoTime() - runTime;
    if (Utils.isInterrupted()) {
        return;
    }
    stopWatch.stop();
    final String timeString = stopWatch.toString();
    IJ.log("Spot fit time : " + timeString);
    IJ.showStatus("Collecting results ...");
    fitResultsId++;
    fitResults = new TIntObjectHashMap<FilterCandidates>();
    for (Worker w : workers) {
        fitResults.putAll(w.results);
    }
    // Assign a unique ID to each result
    int count = 0;
    // Materialise into an array since we use it twice
    FilterCandidates[] candidates = fitResults.values(new FilterCandidates[fitResults.size()]);
    for (FilterCandidates result : candidates) {
        for (int i = 0; i < result.fitResult.length; i++) {
            final MultiPathFitResult fitResult = result.fitResult[i];
            count += count(fitResult.getSingleFitResult());
            count += count(fitResult.getMultiFitResult());
            count += count(fitResult.getDoubletFitResult());
            count += count(fitResult.getMultiDoubletFitResult());
        }
    }
    PreprocessedPeakResult[] preprocessedPeakResults = new PreprocessedPeakResult[count];
    count = 0;
    for (FilterCandidates result : candidates) {
        for (int i = 0; i < result.fitResult.length; i++) {
            final MultiPathFitResult fitResult = result.fitResult[i];
            count = store(fitResult.getSingleFitResult(), count, preprocessedPeakResults);
            count = store(fitResult.getMultiFitResult(), count, preprocessedPeakResults);
            count = store(fitResult.getDoubletFitResult(), count, preprocessedPeakResults);
            count = store(fitResult.getMultiDoubletFitResult(), count, preprocessedPeakResults);
        }
    }
    summariseResults(fitResults, runTime, preprocessedPeakResults, count);
    IJ.showStatus("");
}
Also used : ImageStack(ij.ImageStack) PeakResultPoint(gdsc.smlm.ij.plugins.ResultsMatchCalculator.PeakResultPoint) BasePoint(gdsc.core.match.BasePoint) LinkedList(java.util.LinkedList) MultiPathFitResult(gdsc.smlm.results.filter.MultiPathFitResult) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) BasePreprocessedPeakResult(gdsc.smlm.results.filter.BasePreprocessedPeakResult) PreprocessedPeakResult(gdsc.smlm.results.filter.PreprocessedPeakResult) FitWorker(gdsc.smlm.engine.FitWorker) MemoryPeakResults(gdsc.smlm.results.MemoryPeakResults) FilterSettings(gdsc.smlm.ij.settings.FilterSettings) Settings(gdsc.core.utils.Settings) GlobalSettings(gdsc.smlm.ij.settings.GlobalSettings)

Aggregations

PreprocessedPeakResult (gdsc.smlm.results.filter.PreprocessedPeakResult)7 BasePreprocessedPeakResult (gdsc.smlm.results.filter.BasePreprocessedPeakResult)6 FractionalAssignment (gdsc.core.match.FractionalAssignment)3 MultiPathFilter (gdsc.smlm.results.filter.MultiPathFilter)3 MultiPathFitResult (gdsc.smlm.results.filter.MultiPathFitResult)3 PeakFractionalAssignment (gdsc.smlm.results.filter.PeakFractionalAssignment)3 BasePoint (gdsc.core.match.BasePoint)2 FractionClassificationResult (gdsc.core.match.FractionClassificationResult)2 MaximaSpotFilter (gdsc.smlm.filters.MaximaSpotFilter)2 PeakResultPoint (gdsc.smlm.ij.plugins.ResultsMatchCalculator.PeakResultPoint)2 FilterSettings (gdsc.smlm.ij.settings.FilterSettings)2 GlobalSettings (gdsc.smlm.ij.settings.GlobalSettings)2 MemoryPeakResults (gdsc.smlm.results.MemoryPeakResults)2 DirectFilter (gdsc.smlm.results.filter.DirectFilter)2 TIntHashSet (gnu.trove.set.hash.TIntHashSet)2 ArrayList (java.util.ArrayList)2 SimpleRegression (org.apache.commons.math3.stat.regression.SimpleRegression)2 ImmutableFractionalAssignment (gdsc.core.match.ImmutableFractionalAssignment)1 FastCorrelator (gdsc.core.utils.FastCorrelator)1 Settings (gdsc.core.utils.Settings)1