use of org.HdrHistogram.ShortCountsHistogram in project rest.li by linkedin.
the class TestLatencyMetric method testShortCountHistorgramOverflow.
@Test
public void testShortCountHistorgramOverflow() {
ShortCountsHistogram histogram = new ShortCountsHistogram(LatencyMetric.LOWEST_DISCERNIBLE_VALUE, LatencyMetric.HIGHEST_TRACKABLE_VALUE, LatencyMetric.NUMBER_OF_SIGNIFICANT_VALUE_DIGITS);
for (int i = 0; i < Short.MAX_VALUE; i++) {
histogram.recordValue(1000);
}
IllegalStateException expectedException = null;
try {
histogram.recordValue(1000);
} catch (IllegalStateException e) {
expectedException = e;
}
assertNotNull(expectedException);
}
use of org.HdrHistogram.ShortCountsHistogram in project narchy by automenta.
the class NAR method stats.
/**
* creates a snapshot statistics object
* TODO extract a Method Object holding the snapshot stats with the instances created below as its fields
*/
public SortedMap<String, Object> stats() {
LongSummaryStatistics beliefs = new LongSummaryStatistics();
LongSummaryStatistics goals = new LongSummaryStatistics();
LongSummaryStatistics questions = new LongSummaryStatistics();
LongSummaryStatistics quests = new LongSummaryStatistics();
Histogram termlinkCount = new Histogram(1);
Histogram tasklinkCount = new Histogram(1);
// Frequency complexity = new Frequency();
HashBag clazz = new HashBag();
HashBag policy = new HashBag();
HashBag rootOp = new HashBag();
ShortCountsHistogram volume = new ShortCountsHistogram(2);
// AtomicInteger i = new AtomicInteger(0);
// LongSummaryStatistics termlinksCap = new LongSummaryStatistics();
// LongSummaryStatistics tasklinksCap = new LongSummaryStatistics();
SortedMap<String, Object> x = new TreeMap();
synchronized (exe) {
concepts().filter(xx -> !(xx instanceof Functor)).forEach(c -> {
// complexity.addValue(c.complexity());
volume.recordValue(c.volume());
rootOp.add(c.op());
clazz.add(c.getClass().toString());
ConceptState p = c.state();
policy.add(p != null ? p.toString() : "null");
// termlinksCap.accept(c.termlinks().capacity());
termlinkCount.recordValue(c.termlinks().size());
// tasklinksCap.accept(c.tasklinks().capacity());
tasklinkCount.recordValue(c.tasklinks().size());
beliefs.accept(c.beliefs().size());
goals.accept(c.goals().size());
questions.accept(c.questions().size());
quests.accept(c.quests().size());
});
// x.put("time real", new Date());
if (loop.isRunning()) {
loop.stats("loop", x);
}
x.put("time", time());
// x.put("term index", terms.summary());
x.put("concept count", concepts.size());
}
x.put("belief count", ((double) beliefs.getSum()));
x.put("goal count", ((double) goals.getSum()));
Util.decode(tasklinkCount, "tasklink count", 4, x::put);
// x.put("tasklink usage", ((double) tasklinkCount.getTotalCount()) / tasklinksCap.getSum());
x.put("tasklink total", ((double) tasklinkCount.getTotalCount()));
Util.decode(termlinkCount, "termlink count", 4, x::put);
// x.put("termlink usage", ((double) termlinkCount.getTotalCount()) / termlinksCap.getSum());
x.put("termlink total", ((double) termlinkCount.getTotalCount()));
// DoubleSummaryStatistics pos = new DoubleSummaryStatistics();
// DoubleSummaryStatistics neg = new DoubleSummaryStatistics();
// causes.forEach(c -> pos.accept(c.pos()));
// causes.forEach(c -> neg.accept(c.neg()));
// x.put("value count", pos.getCount());
// x.put("value pos mean", pos.getAverage());
// x.put("value pos min", pos.getMin());
// x.put("value pos max", pos.getMax());
// x.put("value neg mean", neg.getAverage());
// x.put("value neg min", neg.getMin());
// x.put("value neg max", neg.getMax());
// x.put("volume mean", volume.);
//
// x.put("termLinksCapacity", termlinksCap);
// x.put("taskLinksUsed", tasklinksUsed);
// x.put("taskLinksCapacity", tasklinksCap);
Util.toMap(policy, "concept state", x::put);
Util.toMap(rootOp, "concept op", x::put);
Util.decode(volume, "concept volume", 4, x::put);
Util.toMap(clazz, "concept class", x::put);
x.put("term cache (eternal)", Op.cache.summary());
x.put("term cache (temporal)", Op.cacheTemporal.summary());
return x;
}
Aggregations