use of cern.jet.random.Normal in project stream-lib by addthis.
the class QDigestTest method testComprehensiveOnMixture.
@Test
public void testComprehensiveOnMixture() {
RandomEngine r = new MersenneTwister64(0);
Normal[] dists = new Normal[] { new Normal(100, 50, r), new Normal(150, 20, r), new Normal(500, 300, r), new Normal(10000, 10000, r), new Normal(1200, 300, r) };
for (int numSamples : new int[] { 1, 10, 100, 1000, 10000 }) {
long[][] samples = new long[dists.length][];
for (int i = 0; i < dists.length; ++i) {
samples[i] = new long[numSamples];
for (int j = 0; j < samples[i].length; ++j) {
samples[i][j] = (long) Math.max(0, dists[i].nextDouble());
}
}
double compressionFactor = 1000;
int logCapacity = 1;
long max = 0;
for (long[] s : samples) {
for (long x : s) max = Math.max(max, x);
}
for (double scale = 1; scale < max; scale *= 2, logCapacity++) {
;
}
double eps = logCapacity / compressionFactor;
QDigest[] digests = new QDigest[dists.length];
for (int i = 0; i < digests.length; ++i) {
digests[i] = new QDigest(compressionFactor);
for (long x : samples[i]) {
digests[i].offer(x);
}
assertEquals(samples[i].length, digests[i].computeActualSize());
}
int numTotal = 0;
for (int i = 0; i < digests.length; ++i) {
for (double q = 0; q <= 1; q += 0.01) {
long res = digests[i].getQuantile(q);
double[] actualRank = actualRankOf(res, samples[i]);
assertTrue(actualRank[0] + " .. " + actualRank[1] + " outside error bound for " + q, q >= actualRank[0] - eps && q <= actualRank[1] + eps);
}
// Test the same on the union of all distributions up to i-th
numTotal += samples[i].length;
long[] total = new long[numTotal];
int offset = 0;
QDigest totalDigest = new QDigest(compressionFactor);
long expectedSize = 0;
for (int j = 0; j <= i; ++j) {
System.arraycopy(samples[j], 0, total, offset, samples[j].length);
offset += samples[j].length;
totalDigest = QDigest.unionOf(totalDigest, digests[j]);
expectedSize += samples[j].length;
}
assertEquals(expectedSize, totalDigest.computeActualSize());
for (double q = 0; q <= 1; q += 0.01) {
long res = totalDigest.getQuantile(q);
double[] actualRank = actualRankOf(res, total);
assertTrue(actualRank[0] + " .. " + actualRank[1] + " outside error bound for " + q, q >= actualRank[0] - eps && q <= actualRank[1] + eps);
}
}
}
}
use of cern.jet.random.Normal in project tetrad by cmu-phil.
the class NormalityTests method kolmogorovSmirnov.
/**
* Calculates the Kolmogorov-Smirnov statistics for a variable
*
* @param dataSet relevant data set
* @param variable continuous variable whose normality is in question
*
* @return Kolmogorov-Smirnov statistics: index 0 is the D_n value, 1-5 are the critical values at alpha = .2, .15. .10, .05, and .01 respectively.
*/
public static double[] kolmogorovSmirnov(DataSet dataSet, ContinuousVariable variable) {
int n = dataSet.getNumRows();
int columnIndex = dataSet.getColumn(variable);
Normal idealDistribution = getNormal(dataSet, variable);
double[] ks = new double[6];
// get all critical values
for (int i = 1; i < 6; i++) {
ks[i] = estimateKSCriticalValue(i, n);
}
double[] _data = dataSet.getDoubleData().getColumn(columnIndex).toArray();
List<Double> _leaveOutMissing = new ArrayList<>();
for (int i = 0; i < _data.length; i++) {
if (!Double.isNaN(_data[i])) {
_leaveOutMissing.add(_data[i]);
}
}
double[] data = new double[_leaveOutMissing.size()];
for (int i = 0; i < _leaveOutMissing.size(); i++) data[i] = _leaveOutMissing.get(i);
Arrays.sort(data);
// sortVariable(dataSet, variable);
// d = sup x |Fn(X) - i / n| -- the greatest distance between the ideal cdf and the edf
double d = 0.0;
for (int i = 1; i <= n; i++) {
// double x = dataSet.getDouble(i - 1, columnIndex);
double x = data[i - 1];
// System.out.println("****" + x);
// double valueAtQuantile = QQPlot.findQuantile((i + 1) / (dataSet.getNumRows() + 1.0), dataSet.getDouble(0, columnIndex), dataSet.getDouble(n - 1, columnIndex), idealDistribution, .0001, 0, 50);
double idealValue = idealDistribution.cdf(x);
// System.out.println(idealValue);
// System.out.println((double)i / n);
double difference = Math.abs(idealValue - ((double) i / n));
if (difference > d) {
// System.out.println("$$$$$" + difference);
d = difference;
}
}
ks[0] = d;
return ks;
}
use of cern.jet.random.Normal in project micrometer by micrometer-metrics.
the class CounterSample method main.
public static void main(String[] args) {
MeterRegistry registry = SampleConfig.myMonitoringSystem();
Counter counter = registry.counter("counter", "method", "actual");
AtomicInteger n = new AtomicInteger(0);
registry.more().counter("counter", Tags.of("method", "function"), n);
RandomEngine r = new MersenneTwister64(0);
Normal dist = new Normal(0, 1, r);
Flux.interval(Duration.ofMillis(10)).doOnEach(d -> {
if (dist.nextDouble() + 0.1 > 0) {
counter.increment();
n.incrementAndGet();
}
}).blockLast();
}
use of cern.jet.random.Normal in project micrometer by micrometer-metrics.
the class TimerMaximumThroughputSample method main.
public static void main(String[] args) {
MeterRegistry registry = SampleConfig.myMonitoringSystem();
Timer timer = Timer.builder("timer").publishPercentileHistogram().sla(Duration.ofMillis(275), Duration.ofMillis(300), Duration.ofMillis(500)).distributionStatisticExpiry(Duration.ofSeconds(10)).distributionStatisticBufferLength(3).register(registry);
RandomEngine r = new MersenneTwister64(0);
Normal duration = new Normal(250, 50, r);
AtomicInteger latencyForThisSecond = new AtomicInteger(duration.nextInt());
Flux.interval(Duration.ofSeconds(1)).doOnEach(d -> latencyForThisSecond.set(duration.nextInt())).subscribe();
Stream<Integer> infiniteStream = Stream.iterate(0, i -> (i + 1) % 1000);
Flux.fromStream(infiniteStream).parallel(4).runOn(Schedulers.parallel()).doOnEach(d -> timer.record(latencyForThisSecond.get(), TimeUnit.MILLISECONDS)).subscribe();
Flux.never().blockLast();
}
use of cern.jet.random.Normal in project micrometer by micrometer-metrics.
the class TimerSample method main.
public static void main(String[] args) {
MeterRegistry registry = SampleConfig.myMonitoringSystem();
Timer timer = Timer.builder("timer").publishPercentileHistogram().publishPercentiles(0.5, 0.95, 0.99).sla(Duration.ofMillis(275), Duration.ofMillis(300), Duration.ofMillis(500)).distributionStatisticExpiry(Duration.ofSeconds(10)).distributionStatisticBufferLength(3).register(registry);
FunctionTimer.builder("ftimer", timer, Timer::count, t -> t.totalTime(TimeUnit.SECONDS), TimeUnit.SECONDS).register(registry);
RandomEngine r = new MersenneTwister64(0);
Normal incomingRequests = new Normal(0, 1, r);
Normal duration = new Normal(250, 50, r);
AtomicInteger latencyForThisSecond = new AtomicInteger(duration.nextInt());
Flux.interval(Duration.ofSeconds(1)).doOnEach(d -> latencyForThisSecond.set(duration.nextInt())).subscribe();
// the potential for an "incoming request" every 10 ms
Flux.interval(Duration.ofMillis(10)).doOnEach(d -> {
if (incomingRequests.nextDouble() + 0.4 > 0) {
// pretend the request took some amount of time, such that the time is
// distributed normally with a mean of 250ms
timer.record(latencyForThisSecond.get(), TimeUnit.MILLISECONDS);
}
}).blockLast();
}
Aggregations