use of org.apache.commons.math3.distribution.NormalDistribution in project gatk by broadinstitute.
the class MathUtilsUnitTest method testNormalDistribution.
@Test
public void testNormalDistribution() {
final double requiredPrecision = 1E-10;
for (final double mu : new double[] { -5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977 }) {
for (final double sigma : new double[] { 1.2, 3.0, 5.8977 }) {
for (final double x : new double[] { -5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977 }) {
// TODO: GATK3 uses the cern.jet.random Normal implementation to verify this;
// since it was only used for test verification, rather than introducing a new
// GATK4 dependency we just use the apache NormalDistribution
final NormalDistribution n = new NormalDistribution(mu, sigma);
Assert.assertEquals(n.density(x), MathUtils.normalDistribution(mu, sigma, x), requiredPrecision);
Assert.assertEquals(Math.log10(n.density(x)), MathUtils.normalDistributionLog10(mu, sigma, x), requiredPrecision);
}
}
}
}
use of org.apache.commons.math3.distribution.NormalDistribution in project gatk by broadinstitute.
the class CoverageDropoutDetectorTest method getUnivariateGaussianTargets.
private Object[][] getUnivariateGaussianTargets(final double sigma) {
Random rng = new Random(337);
final RandomGenerator randomGenerator = RandomGeneratorFactory.createRandomGenerator(rng);
NormalDistribution n = new NormalDistribution(randomGenerator, 1, sigma);
final int numDataPoints = 10000;
final int numEventPoints = 2000;
final List<ReadCountRecord.SingleSampleRecord> targetList = new ArrayList<>();
for (int i = 0; i < (numDataPoints - numEventPoints); i++) {
targetList.add(new ReadCountRecord.SingleSampleRecord(new Target("arbitrary_name", new SimpleInterval("chr1", 100 + 2 * i, 101 + 2 * i)), n.sample()));
}
for (int i = (numDataPoints - numEventPoints); i < numDataPoints; i++) {
targetList.add(new ReadCountRecord.SingleSampleRecord(new Target("arbitrary_name", new SimpleInterval("chr1", 100 + 2 * i, 101 + 2 * i)), 0.5 + n.sample()));
}
HashedListTargetCollection<ReadCountRecord.SingleSampleRecord> targets = new HashedListTargetCollection<>(targetList);
List<ModeledSegment> segments = new ArrayList<>();
segments.add(new ModeledSegment(new SimpleInterval("chr1", 100, 16050), 8000, 1));
segments.add(new ModeledSegment(new SimpleInterval("chr1", 16100, 20200), 2000, 1.5));
return new Object[][] { { targets, segments } };
}
use of org.apache.commons.math3.distribution.NormalDistribution in project gatk-protected by broadinstitute.
the class AdaptiveMetropolisSampler method sample.
public double sample(final RandomGenerator rng, final Function<Double, Double> logPDF) {
Utils.nonNull(rng);
Utils.nonNull(logPDF);
final AbstractRealDistribution normal = new NormalDistribution(rng, 0, 1);
final double proposal = xCurrent + stepSize * normal.sample();
final double acceptanceProbability = (proposal < lowerBound || upperBound < proposal) ? 0 : Math.min(1, Math.exp(logPDF.apply(proposal) - logPDF.apply(xCurrent)));
//adjust stepSize larger/smaller to decrease/increase the acceptance rate
final double correctionFactor = (acceptanceProbability - optimalAcceptanceRate) * adjustmentRate * (timeScale / (timeScale + iteration));
stepSize *= Math.exp(correctionFactor);
iteration++;
return rng.nextDouble() < acceptanceProbability ? proposal : xCurrent;
}
use of org.apache.commons.math3.distribution.NormalDistribution in project gatk-protected by broadinstitute.
the class CoverageDropoutDetectorTest method getUnivariateGaussianTargets.
private Object[][] getUnivariateGaussianTargets(final double sigma) {
Random rng = new Random(337);
final RandomGenerator randomGenerator = RandomGeneratorFactory.createRandomGenerator(rng);
NormalDistribution n = new NormalDistribution(randomGenerator, 1, sigma);
final int numDataPoints = 10000;
final int numEventPoints = 2000;
final List<ReadCountRecord.SingleSampleRecord> targetList = new ArrayList<>();
for (int i = 0; i < (numDataPoints - numEventPoints); i++) {
targetList.add(new ReadCountRecord.SingleSampleRecord(new Target("arbitrary_name", new SimpleInterval("chr1", 100 + 2 * i, 101 + 2 * i)), n.sample()));
}
for (int i = (numDataPoints - numEventPoints); i < numDataPoints; i++) {
targetList.add(new ReadCountRecord.SingleSampleRecord(new Target("arbitrary_name", new SimpleInterval("chr1", 100 + 2 * i, 101 + 2 * i)), 0.5 + n.sample()));
}
HashedListTargetCollection<ReadCountRecord.SingleSampleRecord> targets = new HashedListTargetCollection<>(targetList);
List<ModeledSegment> segments = new ArrayList<>();
segments.add(new ModeledSegment(new SimpleInterval("chr1", 100, 16050), 8000, 1));
segments.add(new ModeledSegment(new SimpleInterval("chr1", 16100, 20200), 2000, 1.5));
return new Object[][] { { targets, segments } };
}
use of org.apache.commons.math3.distribution.NormalDistribution in project vcell by virtualcell.
the class BrownianDynamics2DSolver method step.
public void step(double diffusion, double deltaT) {
//
// x(n+1) = x(n) + NORMAL(0,2*D*dt)
// y(n+1) = y(n) + NORMAL(0,2*D*dt)
//
// Or try http://simul.iro.umontreal.ca/ssj/doc/html/umontreal/iro/lecuyer/stochprocess/BrownianMotion.html ???
//
NormalDistribution normalDistribution = new NormalDistribution(simulationRng, 0.0, 2 * diffusion * deltaT);
double maxX = origin.getX() + extent.getX();
double minX = origin.getX();
double maxY = origin.getY() + extent.getY();
double minY = origin.getY();
for (int p = 0; p < numParticles; p++) {
double displacementX = normalDistribution.sample();
double displacementY = normalDistribution.sample();
double newX = particleX[p] + displacementX;
if (newX < minX) {
newX = newX + 2 * (minX - newX);
}
if (newX > maxX) {
newX = newX - 2 * (newX - maxX);
}
double newY = particleY[p] + displacementY;
if (newY < minY) {
newY = newY + 2 * (minY - newY);
}
if (newY > maxY) {
newY = newY - 2 * (newY - maxY);
}
particleX[p] = newX;
particleY[p] = newY;
}
currTime += deltaT;
}
Aggregations