use of org.apache.commons.math3.distribution.UniformRealDistribution in project GDSC-SMLM by aherbert.
the class CreateData method createPhotonDistribution.
/**
* @return A photon distribution loaded from a file of floating-point values with the specified population mean.
*/
private RealDistribution createPhotonDistribution() {
if (PHOTON_DISTRIBUTION[PHOTON_CUSTOM].equals(settings.photonDistribution)) {
// Get the distribution file
String filename = Utils.getFilename("Photon_distribution", settings.photonDistributionFile);
if (filename != null) {
settings.photonDistributionFile = filename;
try {
InputStream is = new FileInputStream(new File(settings.photonDistributionFile));
BufferedReader in = new BufferedReader(new UnicodeReader(is, null));
StoredDataStatistics stats = new StoredDataStatistics();
try {
String str = null;
double val = 0.0d;
while ((str = in.readLine()) != null) {
val = Double.parseDouble(str);
stats.add(val);
}
} finally {
in.close();
}
if (stats.getSum() > 0) {
// Update the statistics to the desired mean.
double scale = (double) settings.photonsPerSecond / stats.getMean();
double[] values = stats.getValues();
for (int i = 0; i < values.length; i++) values[i] *= scale;
// TODO - Investigate the limits of this distribution.
// How far above and below the input data will values be generated.
// Create the distribution using the recommended number of bins
final int binCount = stats.getN() / 10;
EmpiricalDistribution dist = new EmpiricalDistribution(binCount, createRandomGenerator());
dist.load(values);
return dist;
}
} catch (IOException e) {
// Ignore
} catch (NullArgumentException e) {
// Ignore
} catch (NumberFormatException e) {
// Ignore
}
}
Utils.log("Failed to load custom photon distribution from file: %s. Default to fixed.", settings.photonDistributionFile);
} else if (PHOTON_DISTRIBUTION[PHOTON_UNIFORM].equals(settings.photonDistribution)) {
if (settings.photonsPerSecond < settings.photonsPerSecondMaximum) {
UniformRealDistribution dist = new UniformRealDistribution(createRandomGenerator(), settings.photonsPerSecond, settings.photonsPerSecondMaximum);
return dist;
}
} else if (PHOTON_DISTRIBUTION[PHOTON_GAMMA].equals(settings.photonDistribution)) {
final double scaleParameter = settings.photonsPerSecond / settings.photonShape;
GammaDistribution dist = new GammaDistribution(createRandomGenerator(), settings.photonShape, scaleParameter, ExponentialDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
return dist;
} else if (PHOTON_DISTRIBUTION[PHOTON_CORRELATED].equals(settings.photonDistribution)) {
// No distribution required
return null;
}
settings.photonDistribution = PHOTON_DISTRIBUTION[PHOTON_FIXED];
return null;
}
use of org.apache.commons.math3.distribution.UniformRealDistribution in project gatk-protected by broadinstitute.
the class CoverageDropoutDetectorTest method getUnivariateGaussianTargetsWithDropout.
private Object[][] getUnivariateGaussianTargetsWithDropout(final double sigma, final double dropoutRate) {
Random rng = new Random(337);
final RandomGenerator randomGenerator = RandomGeneratorFactory.createRandomGenerator(rng);
NormalDistribution n = new NormalDistribution(randomGenerator, 1, sigma);
final int numDataPoints = 10000;
final int numEventPoints = 2000;
// Randomly select dropoutRate of targets and reduce by 25%-75% (uniformly distributed)
UniformRealDistribution uniformRealDistribution = new UniformRealDistribution(randomGenerator, 0, 1.0);
final List<ReadCountRecord.SingleSampleRecord> targetList = new ArrayList<>();
for (int i = 0; i < numDataPoints; i++) {
double coverage = n.sample() + (i < (numDataPoints - numEventPoints) ? 0.0 : 0.5);
if (uniformRealDistribution.sample() < dropoutRate) {
double multiplier = .25 + uniformRealDistribution.sample() / 2;
coverage = coverage * multiplier;
}
targetList.add(new ReadCountRecord.SingleSampleRecord(new Target("arbitrary_name", new SimpleInterval("chr1", 100 + 2 * i, 101 + 2 * i)), coverage));
}
HashedListTargetCollection<ReadCountRecord.SingleSampleRecord> targets = new HashedListTargetCollection<>(targetList);
List<ModeledSegment> segments = new ArrayList<>();
segments.add(new ModeledSegment(new SimpleInterval("chr1", 100, 16050), 8000, 1));
segments.add(new ModeledSegment(new SimpleInterval("chr1", 16100, 20200), 2000, 1.5));
return new Object[][] { { targets, segments } };
}
use of org.apache.commons.math3.distribution.UniformRealDistribution in project gatk by broadinstitute.
the class CoverageDropoutDetectorTest method getUnivariateGaussianTargetsWithDropout.
private Object[][] getUnivariateGaussianTargetsWithDropout(final double sigma, final double dropoutRate) {
Random rng = new Random(337);
final RandomGenerator randomGenerator = RandomGeneratorFactory.createRandomGenerator(rng);
NormalDistribution n = new NormalDistribution(randomGenerator, 1, sigma);
final int numDataPoints = 10000;
final int numEventPoints = 2000;
// Randomly select dropoutRate of targets and reduce by 25%-75% (uniformly distributed)
UniformRealDistribution uniformRealDistribution = new UniformRealDistribution(randomGenerator, 0, 1.0);
final List<ReadCountRecord.SingleSampleRecord> targetList = new ArrayList<>();
for (int i = 0; i < numDataPoints; i++) {
double coverage = n.sample() + (i < (numDataPoints - numEventPoints) ? 0.0 : 0.5);
if (uniformRealDistribution.sample() < dropoutRate) {
double multiplier = .25 + uniformRealDistribution.sample() / 2;
coverage = coverage * multiplier;
}
targetList.add(new ReadCountRecord.SingleSampleRecord(new Target("arbitrary_name", new SimpleInterval("chr1", 100 + 2 * i, 101 + 2 * i)), coverage));
}
HashedListTargetCollection<ReadCountRecord.SingleSampleRecord> targets = new HashedListTargetCollection<>(targetList);
List<ModeledSegment> segments = new ArrayList<>();
segments.add(new ModeledSegment(new SimpleInterval("chr1", 100, 16050), 8000, 1));
segments.add(new ModeledSegment(new SimpleInterval("chr1", 16100, 20200), 2000, 1.5));
return new Object[][] { { targets, segments } };
}
use of org.apache.commons.math3.distribution.UniformRealDistribution in project druid by druid-io.
the class ColumnValueGenerator method initDistribution.
private void initDistribution() {
GeneratorColumnSchema.ValueDistribution distributionType = schema.getDistributionType();
ValueType type = schema.getType();
List<Object> enumeratedValues = schema.getEnumeratedValues();
List<Double> enumeratedProbabilities = schema.getEnumeratedProbabilities();
List<Pair<Object, Double>> probabilities = new ArrayList<>();
switch(distributionType) {
case SEQUENTIAL:
// not random, just cycle through numbers from start to end, or cycle through enumerated values if provided
distribution = new SequentialDistribution(schema.getStartInt(), schema.getEndInt(), schema.getEnumeratedValues());
break;
case UNIFORM:
distribution = new UniformRealDistribution(schema.getStartDouble(), schema.getEndDouble());
break;
case DISCRETE_UNIFORM:
if (enumeratedValues == null) {
enumeratedValues = new ArrayList<>();
for (int i = schema.getStartInt(); i < schema.getEndInt(); i++) {
Object val = convertType(i, type);
enumeratedValues.add(val);
}
}
// give them all equal probability, the library will normalize probabilities to sum to 1.0
for (Object enumeratedValue : enumeratedValues) {
probabilities.add(new Pair<>(enumeratedValue, 0.1));
}
distribution = new EnumeratedTreeDistribution<>(probabilities);
break;
case NORMAL:
distribution = new NormalDistribution(schema.getMean(), schema.getStandardDeviation());
break;
case ROUNDED_NORMAL:
NormalDistribution normalDist = new NormalDistribution(schema.getMean(), schema.getStandardDeviation());
distribution = new RealRoundingDistribution(normalDist);
break;
case ZIPF:
int cardinality;
if (enumeratedValues == null) {
Integer startInt = schema.getStartInt();
cardinality = schema.getEndInt() - startInt;
ZipfDistribution zipf = new ZipfDistribution(cardinality, schema.getZipfExponent());
for (int i = 0; i < cardinality; i++) {
probabilities.add(new Pair<>((Object) (i + startInt), zipf.probability(i)));
}
} else {
cardinality = enumeratedValues.size();
ZipfDistribution zipf = new ZipfDistribution(enumeratedValues.size(), schema.getZipfExponent());
for (int i = 0; i < cardinality; i++) {
probabilities.add(new Pair<>(enumeratedValues.get(i), zipf.probability(i)));
}
}
distribution = new EnumeratedTreeDistribution<>(probabilities);
break;
case LAZY_ZIPF:
int lazyCardinality;
Integer startInt = schema.getStartInt();
lazyCardinality = schema.getEndInt() - startInt;
distribution = new ZipfDistribution(lazyCardinality, schema.getZipfExponent());
break;
case LAZY_DISCRETE_UNIFORM:
distribution = new UniformIntegerDistribution(schema.getStartInt(), schema.getEndInt());
break;
case ENUMERATED:
for (int i = 0; i < enumeratedValues.size(); i++) {
probabilities.add(new Pair<>(enumeratedValues.get(i), enumeratedProbabilities.get(i)));
}
distribution = new EnumeratedTreeDistribution<>(probabilities);
break;
default:
throw new UnsupportedOperationException("Unknown distribution type: " + distributionType);
}
if (distribution instanceof AbstractIntegerDistribution) {
((AbstractIntegerDistribution) distribution).reseedRandomGenerator(seed);
} else if (distribution instanceof AbstractRealDistribution) {
((AbstractRealDistribution) distribution).reseedRandomGenerator(seed);
} else {
((EnumeratedDistribution) distribution).reseedRandomGenerator(seed);
}
}
use of org.apache.commons.math3.distribution.UniformRealDistribution in project beam by apache.
the class SyntheticOptionsTest method testRealDistributionDeserializerWithUniformDistribution.
@Test
public void testRealDistributionDeserializerWithUniformDistribution() throws Exception {
String syntheticOptions = "{\"seed\":12345," + "\"delayDistribution\":{\"type\":\"uniform\",\"lower\":0,\"upper\":100}}";
SyntheticOptions sourceOptions = optionsFromString(syntheticOptions, SyntheticOptions.class);
assertEquals(0, (long) ((UniformRealDistribution) sourceOptions.delayDistribution.getDistribution()).getSupportLowerBound());
assertEquals(100, (long) ((UniformRealDistribution) sourceOptions.delayDistribution.getDistribution()).getSupportUpperBound());
}
Aggregations