Search in sources :

Example 1 with SummaryStatistics

use of org.apache.commons.math3.stat.descriptive.SummaryStatistics in project cassandra by apache.

the class TokenAllocation method allocateTokens.

public static Collection<Token> allocateTokens(final TokenMetadata tokenMetadata, final AbstractReplicationStrategy rs, final InetAddress endpoint, int numTokens) {
    TokenMetadata tokenMetadataCopy = tokenMetadata.cloneOnlyTokenMap();
    StrategyAdapter strategy = getStrategy(tokenMetadataCopy, rs, endpoint);
    Collection<Token> tokens = create(tokenMetadata, strategy).addUnit(endpoint, numTokens);
    tokens = adjustForCrossDatacenterClashes(tokenMetadata, strategy, tokens);
    if (logger.isWarnEnabled()) {
        logger.warn("Selected tokens {}", tokens);
        SummaryStatistics os = replicatedOwnershipStats(tokenMetadataCopy, rs, endpoint);
        tokenMetadataCopy.updateNormalTokens(tokens, endpoint);
        SummaryStatistics ns = replicatedOwnershipStats(tokenMetadataCopy, rs, endpoint);
        logger.warn("Replicated node load in datacentre before allocation {}", statToString(os));
        logger.warn("Replicated node load in datacentre after allocation {}", statToString(ns));
        // TODO: Is it worth doing the replicated ownership calculation always to be able to raise this alarm?
        if (ns.getStandardDeviation() > os.getStandardDeviation())
            logger.warn("Unexpected growth in standard deviation after allocation.");
    }
    return tokens;
}
Also used : SummaryStatistics(org.apache.commons.math3.stat.descriptive.SummaryStatistics) Token(org.apache.cassandra.dht.Token) TokenMetadata(org.apache.cassandra.locator.TokenMetadata)

Example 2 with SummaryStatistics

use of org.apache.commons.math3.stat.descriptive.SummaryStatistics in project cassandra by apache.

the class AbstractReplicationAwareTokenAllocatorTest method updateSummary.

private void updateSummary(ReplicationAwareTokenAllocator<Unit> t, Summary su, Summary st, boolean print) {
    int size = t.sortedTokens.size();
    double inverseAverage = 1.0 * size / t.strategy.replicas();
    Map<Unit, Double> ownership = evaluateReplicatedOwnership(t);
    SummaryStatistics unitStat = new SummaryStatistics();
    for (Map.Entry<Unit, Double> en : ownership.entrySet()) unitStat.addValue(en.getValue() * inverseAverage / t.unitToTokens.get(en.getKey()).size());
    su.update(unitStat);
    SummaryStatistics tokenStat = new SummaryStatistics();
    for (Token tok : t.sortedTokens.keySet()) tokenStat.addValue(replicatedTokenOwnership(tok, t.sortedTokens, t.strategy) * inverseAverage);
    st.update(tokenStat);
    if (print) {
        System.out.format("Size %d(%d)   \tunit %s  token %s   %s\n", t.unitCount(), size, mms(unitStat), mms(tokenStat), t.strategy);
        System.out.format("Worst intermediate unit\t%s  token %s\n", su, st);
    }
}
Also used : SummaryStatistics(org.apache.commons.math3.stat.descriptive.SummaryStatistics) Token(org.apache.cassandra.dht.Token)

Example 3 with SummaryStatistics

use of org.apache.commons.math3.stat.descriptive.SummaryStatistics in project cassandra by apache.

the class BootStrapperTest method testAllocateTokensMultipleKeyspaces.

@Test
public void testAllocateTokensMultipleKeyspaces() throws UnknownHostException {
    // TODO: This scenario isn't supported very well. Investigate a multi-keyspace version of the algorithm.
    int vn = 16;
    // RF = 3
    String ks3 = "BootStrapperTestKeyspace4";
    // RF = 2
    String ks2 = "BootStrapperTestKeyspace5";
    TokenMetadata tm = new TokenMetadata();
    generateFakeEndpoints(tm, 10, vn);
    InetAddress dcaddr = FBUtilities.getBroadcastAddress();
    SummaryStatistics os3 = TokenAllocation.replicatedOwnershipStats(tm, Keyspace.open(ks3).getReplicationStrategy(), dcaddr);
    SummaryStatistics os2 = TokenAllocation.replicatedOwnershipStats(tm, Keyspace.open(ks2).getReplicationStrategy(), dcaddr);
    String cks = ks3;
    String nks = ks2;
    for (int i = 11; i <= 20; ++i) {
        allocateTokensForNode(vn, cks, tm, InetAddress.getByName("127.0.0." + (i + 1)));
        String t = cks;
        cks = nks;
        nks = t;
    }
    SummaryStatistics ns3 = TokenAllocation.replicatedOwnershipStats(tm, Keyspace.open(ks3).getReplicationStrategy(), dcaddr);
    SummaryStatistics ns2 = TokenAllocation.replicatedOwnershipStats(tm, Keyspace.open(ks2).getReplicationStrategy(), dcaddr);
    verifyImprovement(os3, ns3);
    verifyImprovement(os2, ns2);
}
Also used : SummaryStatistics(org.apache.commons.math3.stat.descriptive.SummaryStatistics) TokenMetadata(org.apache.cassandra.locator.TokenMetadata) InetAddress(java.net.InetAddress) Test(org.junit.Test)

Example 4 with SummaryStatistics

use of org.apache.commons.math3.stat.descriptive.SummaryStatistics in project GDSC-SMLM by aherbert.

the class TraceMolecules method getBlinkingRate.

private double getBlinkingRate(Trace[] traces) {
    SummaryStatistics stats = new SummaryStatistics();
    for (Trace trace : traces) stats.addValue(trace.getNBlinks());
    double blinkingRate = stats.getMean();
    return blinkingRate;
}
Also used : Trace(gdsc.smlm.results.Trace) SummaryStatistics(org.apache.commons.math3.stat.descriptive.SummaryStatistics)

Example 5 with SummaryStatistics

use of org.apache.commons.math3.stat.descriptive.SummaryStatistics in project GDSC-SMLM by aherbert.

the class CreateData method drawImage.

//StoredDataStatistics rawPhotons = new StoredDataStatistics();
//StoredDataStatistics drawPhotons = new StoredDataStatistics();
//	private synchronized void addRaw(double d)
//	{
//		//rawPhotons.add(d);
//	}
//
//	private synchronized void addDraw(double d)
//	{
//		//drawPhotons.add(d);
//	}
/**
	 * Create an image from the localisations using the configured PSF width. Draws a new stack
	 * image.
	 * <p>
	 * Note that the localisations are filtered using the signal. The input list of localisations will be updated.
	 * 
	 * @param localisationSets
	 * @return The localisations
	 */
private List<LocalisationModel> drawImage(final List<LocalisationModelSet> localisationSets) {
    if (localisationSets.isEmpty())
        return null;
    // Create a new list for all localisation that are drawn (i.e. pass the signal filters)
    List<LocalisationModelSet> newLocalisations = Collections.synchronizedList(new ArrayList<LocalisationModelSet>(localisationSets.size()));
    photonsRemoved = new AtomicInteger();
    t1Removed = new AtomicInteger();
    tNRemoved = new AtomicInteger();
    photonStats = new SummaryStatistics();
    // Add drawn spots to memory
    results = new MemoryPeakResults();
    Calibration c = new Calibration(settings.pixelPitch, settings.getTotalGain(), settings.exposureTime);
    c.setEmCCD((settings.getEmGain() > 1));
    c.setBias(settings.bias);
    c.setReadNoise(settings.readNoise * ((settings.getCameraGain() > 0) ? settings.getCameraGain() : 1));
    c.setAmplification(settings.getAmplification());
    results.setCalibration(c);
    results.setSortAfterEnd(true);
    results.begin();
    maxT = localisationSets.get(localisationSets.size() - 1).getTime();
    // Display image
    ImageStack stack = new ImageStack(settings.size, settings.size, maxT);
    final double psfSD = getPsfSD();
    if (psfSD <= 0)
        return null;
    ImagePSFModel imagePSFModel = null;
    if (imagePSF) {
        // Create one Image PSF model that can be copied
        imagePSFModel = createImagePSF(localisationSets);
        if (imagePSFModel == null)
            return null;
    }
    IJ.showStatus("Drawing image ...");
    // Multi-thread for speed
    // Note that the default Executors.newCachedThreadPool() will continue to make threads if
    // new tasks are added. We need to limit the tasks that can be added using a fixed size
    // blocking queue.
    // http://stackoverflow.com/questions/1800317/impossible-to-make-a-cached-thread-pool-with-a-size-limit
    // ExecutorService threadPool = Executors.newCachedThreadPool();
    ExecutorService threadPool = Executors.newFixedThreadPool(Prefs.getThreads());
    List<Future<?>> futures = new LinkedList<Future<?>>();
    // Count all the frames to process
    frame = 0;
    totalFrames = maxT;
    // Collect statistics on the number of photons actually simulated
    // Process all frames
    int i = 0;
    int lastT = -1;
    for (LocalisationModelSet l : localisationSets) {
        if (Utils.isInterrupted())
            break;
        if (l.getTime() != lastT) {
            lastT = l.getTime();
            futures.add(threadPool.submit(new ImageGenerator(localisationSets, newLocalisations, i, lastT, createPSFModel(imagePSFModel), results, stack, poissonNoise, new RandomDataGenerator(createRandomGenerator()))));
        }
        i++;
    }
    // Finish processing data
    Utils.waitForCompletion(futures);
    futures.clear();
    if (Utils.isInterrupted()) {
        IJ.showProgress(1);
        return null;
    }
    // Do all the frames that had no localisations
    for (int t = 1; t <= maxT; t++) {
        if (Utils.isInterrupted())
            break;
        if (stack.getPixels(t) == null) {
            futures.add(threadPool.submit(new ImageGenerator(localisationSets, newLocalisations, maxT, t, null, results, stack, poissonNoise, new RandomDataGenerator(createRandomGenerator()))));
        }
    }
    // Finish
    Utils.waitForCompletion(futures);
    threadPool.shutdown();
    IJ.showProgress(1);
    if (Utils.isInterrupted()) {
        return null;
    }
    results.end();
    // Clear memory
    imagePSFModel = null;
    threadPool = null;
    futures.clear();
    futures = null;
    if (photonsRemoved.get() > 0)
        Utils.log("Removed %d localisations with less than %.1f rendered photons", photonsRemoved.get(), settings.minPhotons);
    if (t1Removed.get() > 0)
        Utils.log("Removed %d localisations with no neighbours @ SNR %.2f", t1Removed.get(), settings.minSNRt1);
    if (tNRemoved.get() > 0)
        Utils.log("Removed %d localisations with valid neighbours @ SNR %.2f", tNRemoved.get(), settings.minSNRtN);
    if (photonStats.getN() > 0)
        Utils.log("Average photons rendered = %s +/- %s", Utils.rounded(photonStats.getMean()), Utils.rounded(photonStats.getStandardDeviation()));
    //System.out.printf("rawPhotons = %f\n", rawPhotons.getMean());
    //System.out.printf("drawPhotons = %f\n", drawPhotons.getMean());
    //Utils.showHistogram("draw photons", drawPhotons, "photons", true, 0, 1000);
    // Update with all those localisation that have been drawn
    localisationSets.clear();
    localisationSets.addAll(newLocalisations);
    newLocalisations = null;
    IJ.showStatus("Displaying image ...");
    ImageStack newStack = stack;
    if (!settings.rawImage) {
        // Get the global limits and ensure all values can be represented
        Object[] imageArray = stack.getImageArray();
        float[] limits = Maths.limits((float[]) imageArray[0]);
        for (int j = 1; j < imageArray.length; j++) limits = Maths.limits(limits, (float[]) imageArray[j]);
        // Leave bias in place
        limits[0] = 0;
        // Check if the image will fit in a 16-bit range
        if ((limits[1] - limits[0]) < 65535) {
            // Convert to 16-bit
            newStack = new ImageStack(stack.getWidth(), stack.getHeight(), stack.getSize());
            // Account for rounding
            final float min = (float) (limits[0] - 0.5);
            for (int j = 0; j < imageArray.length; j++) {
                float[] image = (float[]) imageArray[j];
                short[] pixels = new short[image.length];
                for (int k = 0; k < pixels.length; k++) {
                    pixels[k] = (short) (image[k] - min);
                }
                newStack.setPixels(pixels, j + 1);
                // Free memory
                imageArray[j] = null;
                // Attempt to stay within memory (check vs 32MB)
                if (MemoryPeakResults.freeMemory() < 33554432L)
                    MemoryPeakResults.runGCOnce();
            }
        } else {
            // Keep as 32-bit but round to whole numbers
            for (int j = 0; j < imageArray.length; j++) {
                float[] pixels = (float[]) imageArray[j];
                for (int k = 0; k < pixels.length; k++) {
                    pixels[k] = Math.round(pixels[k]);
                }
            }
        }
    }
    // Show image
    ImagePlus imp = Utils.display(CREATE_DATA_IMAGE_TITLE, newStack);
    ij.measure.Calibration cal = new ij.measure.Calibration();
    String unit = "nm";
    double unitPerPixel = settings.pixelPitch;
    if (unitPerPixel > 100) {
        unit = "um";
        unitPerPixel /= 1000.0;
    }
    cal.setUnit(unit);
    cal.pixelHeight = cal.pixelWidth = unitPerPixel;
    imp.setCalibration(cal);
    imp.setDimensions(1, 1, newStack.getSize());
    imp.resetDisplayRange();
    imp.updateAndDraw();
    saveImage(imp);
    results.setSource(new IJImageSource(imp));
    results.setName(CREATE_DATA_IMAGE_TITLE + " (" + TITLE + ")");
    results.setConfiguration(createConfiguration((float) psfSD));
    results.setBounds(new Rectangle(0, 0, settings.size, settings.size));
    MemoryPeakResults.addResults(results);
    setBenchmarkResults(imp, results);
    if (benchmarkMode && benchmarkParameters != null)
        benchmarkParameters.setPhotons(results);
    List<LocalisationModel> localisations = toLocalisations(localisationSets);
    savePulses(localisations, results, CREATE_DATA_IMAGE_TITLE);
    // Saved the fixed and moving localisations into different datasets
    saveFixedAndMoving(results, CREATE_DATA_IMAGE_TITLE);
    return localisations;
}
Also used : Rectangle(java.awt.Rectangle) MemoryPeakResults(gdsc.smlm.results.MemoryPeakResults) ImagePSFModel(gdsc.smlm.model.ImagePSFModel) ImageStack(ij.ImageStack) RandomDataGenerator(org.apache.commons.math3.random.RandomDataGenerator) SummaryStatistics(org.apache.commons.math3.stat.descriptive.SummaryStatistics) Calibration(gdsc.smlm.results.Calibration) ImagePlus(ij.ImagePlus) LinkedList(java.util.LinkedList) IJImageSource(gdsc.smlm.ij.IJImageSource) LocalisationModel(gdsc.smlm.model.LocalisationModel) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) LocalisationModelSet(gdsc.smlm.model.LocalisationModelSet)

Aggregations

SummaryStatistics (org.apache.commons.math3.stat.descriptive.SummaryStatistics)17 HashMap (java.util.HashMap)3 Map (java.util.Map)3 PeakResult (gdsc.smlm.results.PeakResult)2 Rectangle (java.awt.Rectangle)2 InetAddress (java.net.InetAddress)2 Token (org.apache.cassandra.dht.Token)2 TokenMetadata (org.apache.cassandra.locator.TokenMetadata)2 ClusterPoint (gdsc.core.clustering.ClusterPoint)1 DensityManager (gdsc.core.clustering.DensityManager)1 IJImageSource (gdsc.smlm.ij.IJImageSource)1 ImagePSFModel (gdsc.smlm.model.ImagePSFModel)1 LocalisationModel (gdsc.smlm.model.LocalisationModel)1 LocalisationModelSet (gdsc.smlm.model.LocalisationModelSet)1 Calibration (gdsc.smlm.results.Calibration)1 MemoryPeakResults (gdsc.smlm.results.MemoryPeakResults)1 Trace (gdsc.smlm.results.Trace)1 ImagePlus (ij.ImagePlus)1 ImageStack (ij.ImageStack)1 IOException (java.io.IOException)1