use of org.apache.commons.math3.stat.descriptive.summary.Sum in project narchy by automenta.
the class Optimize method run.
public Result<X> run(int maxIterations, int repeats, FloatFunction<Supplier<X>> eval) {
assert (repeats >= 1);
final int dim = tweaks.size();
double[] mid = new double[dim];
// double[] sigma = new double[n];
double[] min = new double[dim];
double[] max = new double[dim];
double[] inc = new double[dim];
// double[] range = new double[dim];
X example = subject.get();
int i = 0;
for (Tweak w : tweaks) {
TweakFloat s = (TweakFloat) w;
// initial guess: get from sample, otherwise midpoint of min/max range
Object guess = s.get(example);
mid[i] = guess != null ? ((float) guess) : ((s.getMax() + s.getMin()) / 2f);
min[i] = (s.getMin());
max[i] = (s.getMax());
inc[i] = s.getInc();
// range[i] = max[i] - min[i];
// sigma[i] = Math.abs(max[i] - min[i]) * 0.75f; //(s.getInc());
i++;
}
FasterList<DoubleObjectPair<double[]>> experiments = new FasterList<>(maxIterations);
final double[] maxScore = { Double.NEGATIVE_INFINITY };
ObjectiveFunction func = new ObjectiveFunction(point -> {
double score;
try {
double sum = 0;
for (int r = 0; r < repeats; r++) {
Supplier<X> x = () -> subject(point);
sum += eval.floatValueOf(x);
}
score = sum / repeats;
} catch (Exception e) {
logger.error("{} {} {}", this, point, e);
score = Float.NEGATIVE_INFINITY;
}
if (trace)
csv.out(ArrayUtils.add(point, (int) 0, score));
maxScore[0] = Math.max(maxScore[0], score);
// System.out.println(
// n4(score) + " / " + n4(maxScore[0]) + "\t" + n4(point)
// );
experiments.add(pair(score, point));
experimentIteration(point, score);
return score;
});
if (trace)
csv = new CSVOutput(System.out, Stream.concat(Stream.of("score"), tweaks.stream().map(t -> t.id)).toArray(String[]::new));
experimentStart();
try {
solve(dim, func, mid, min, max, inc, maxIterations);
} catch (Throwable t) {
logger.info("solve {} {}", func, t);
}
return new Result<>(experiments, tweaks);
}
use of org.apache.commons.math3.stat.descriptive.summary.Sum in project chordatlas by twak.
the class FacadeTool method renderFacades.
private void renderFacades(Node gNode, String blockName, FacadeFinder ff) {
Thread thread = new Thread() {
@Override
public void run() {
File blockFile = new File(Tweed.DATA + File.separator + FeatureCache.FEATURE_FOLDER + File.separator + blockName);
if (GISGen.mode == Mode.RENDER_SELECTED_BLOCK)
try {
FileUtils.deleteDirectory(blockFile);
} catch (IOException e1) {
e1.printStackTrace();
}
for (int mfi = 0; mfi < ff.results.size(); mfi++) {
ToProjMega tpm = ff.results.get(mfi);
if (tpm.size() == 0 || tpm.stream().mapToInt(x -> tpm.size()).sum() == 0)
continue;
File megaFolder = new File(blockFile, "" + mfi);
megaFolder.mkdirs();
try {
new XStream().toXML(tpm.megafacade, new FileOutputStream(new File(megaFolder, LINE_XML)));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
// print a list of panoramas on this side.
// List<Double> rots = new ArrayList();
//
// for (ToProject tp : tpm)
// for ( Pano pano : tp.toProject ) {
// ImagePlaneGen pg = new ImagePlaneGen( tweed, (float) tp.e.x, (float) tp.e.y, (float) tp.s.x, (float) tp.s.y, (float) tp.minHeight, (float) tp.maxHeight, tp.toProject );
// pg.fudgeToDepth( pixelsPerMeter, pano, rots );
// }
//
double rot = 0;
// if (!rots.isEmpty())
// rot = biggestClusterMean(rots);// rots.stream().mapToDouble( x -> x ).average().getAsDouble();
// System.out.println ( "avg rot was "+rot );
List<BufferedImage> images = new ArrayList<>();
for (int fc = 0; fc < tpm.size(); fc++) {
// if (mfi != 2 || fc != 0)
// continue;
ToProject tp = tpm.get(fc);
// if (!tp.toProject.iterator().next().name.contains( "hfhGoIsR24hezjXpuIqklw" ))
// continue;
System.out.println("mega " + mfi + " pano " + fc);
File imageFolder;
String imageFilename = null;
// if ( GISGen.mode == Mode.RENDER_SELECTED_FACADE ) {
imageFolder = new File(megaFolder, "" + fc);
imageFilename = FeatureCache.RENDERED_IMAGE;
// }
// else
// imageFolder = new File( blockName );
imageFolder.mkdirs();
if (tp.toProject.size() != 1)
throw new Error();
ImagePlaneGen pg = new ImagePlaneGen(tweed, (float) tp.e.x, (float) tp.e.y, (float) tp.s.x, (float) tp.s.y, (float) tp.minHeight, (float) tp.maxHeight, tp.toProject);
if (GISGen.mode != Mode.RENDER_ALL_BLOCKS)
tweed.frame.addGen(pg, true);
for (Pano pano_ : tp.toProject) {
Pano pano = new Pano(pano_);
pano.set(pano.oa1 - (float) rot, pano.oa2, pano.oa3);
if (imageFilename == null)
imageFilename = new File(pano.name).getName() + "_" + tpm.megafacade.start + "_" + tpm.megafacade.end;
BufferedImage bi = pg.render(imageFolder, pixelsPerMeter, pano, tpm.megafacade, imageFilename);
if (GISGen.mode == Mode.RENDER_SELECTED_BLOCK)
images.add(bi);
try {
FileWriter out = new FileWriter(new File(imageFolder, "meta.txt"));
out.write(pixelsPerMeter * 10 + " " + (tp.s.distance(tp.e) * pixelsPerMeter - pixelsPerMeter * 20) + " " + (tp.maxHeight - tp.minHeight) * pixelsPerMeter + "\n");
out.write(pg.toString() + "\n");
out.write(pano.orig.getName() + "\n");
Point2d cen = tpm.megafacade.project(new Point2d(pano.location.x, pano.location.z), false);
out.write(tp.s.x + " " + tp.s.y + " " + tp.e.x + " " + tp.e.y + " " + cen.x + " " + cen.y + " " + pano.location.x + " " + pano.location.z + "\n");
out.close();
} catch (Throwable th) {
th.printStackTrace();
}
}
}
if (GISGen.mode == Mode.RENDER_SELECTED_BLOCK)
Imagez.writeSummary(new File(megaFolder, "summary.png"), images);
}
}
};
if (GISGen.mode == Mode.RENDER_SELECTED_BLOCK)
thread.start();
else
thread.run();
}
use of org.apache.commons.math3.stat.descriptive.summary.Sum in project OpenTripPlanner by opentripplanner.
the class PropagatedTimesStore method setFromArray.
/**
* @param times for search (varying departure time), an array of travel times to each destination.
* @param includeInAverages for each iteration, whether that iteration should be included in average calculations.
* In RaptorWorker's Monte Carlo code we also include minima and maxima, which should
* not be included in averages.
* Iterations that are not included in averages are still used to determine extrema.
*/
public void setFromArray(int[][] times, boolean[] includeInAverages, ConfidenceCalculationMethod confidenceCalculationMethod) {
if (times.length == 0)
// nothing to do
return;
// assume array is rectangular
int nTargets = times[0].length;
// cache random numbers. This should be fine as we're mixing it with the number of minutes
// at which each destination is accessible, which is sometimes not 120, as well as the stop
// position in the list (note that we have cleverly chosen a number which is a prime
// so is not divisible by the number of iterations on the bootstrap). Finally recall that
// the maximum number of times we're sampling from is generally 120 and we modulo this,
// so the pigeonhole principle applies.
// this is effectively a "random number generator" with phase 10007
int[] randomNumbers = random.ints().limit(10007).map(Math::abs).toArray();
int nextRandom = 0;
int effectiveIterations = 0;
for (int i = 0; i < includeInAverages.length; i++) {
if (includeInAverages[i])
effectiveIterations++;
}
// loop over targets on the outside so we can bootstrap
TARGETS: for (int target = 0; target < nTargets; target++) {
// compute the average
int sum = 0;
int count = 0;
TIntList timeList = new TIntArrayList();
TIntList avgList = new TIntArrayList();
ITERATIONS: for (int i = 0; i < times.length; i++) {
if (times[i][target] == RaptorWorker.UNREACHED)
continue ITERATIONS;
if (includeInAverages[i]) {
avgList.add(times[i][target]);
sum += times[i][target];
count++;
}
timeList.add(times[i][target]);
}
// never reachable
if (count == 0)
continue TARGETS;
// wait times as well.
if (count >= effectiveIterations * req.reachabilityThreshold)
avgs[target] = sum / count;
// TODO: correctly handle partial accessibility for bootstrap and percentile options.
switch(confidenceCalculationMethod) {
case BOOTSTRAP:
// now bootstrap out a 95% confidence interval on the time
int[] bootMeans = new int[N_BOOTSTRAPS];
// prevent overflow
nextRandom += N_BOOTSTRAPS * count % randomNumbers.length;
final int randOff = nextRandom;
final int finalCount = count;
IntStream.range(0, N_BOOTSTRAPS).parallel().forEach(boot -> {
int bsum = 0;
// sample from the Monte Carlo distribution with replacement
for (int iter = 0; iter < finalCount; iter++) {
bsum += avgList.get(randomNumbers[(randOff + boot * iter) % randomNumbers.length] % avgList.size());
// bsum += timeList.get(random.nextInt(count));
}
bootMeans[boot] = bsum / finalCount;
});
Arrays.sort(bootMeans);
// 2.5 percentile of distribution of means
mins[target] = bootMeans[N_BOOTSTRAPS / 40];
// 97.5 percentile of distribution of means
maxs[target] = bootMeans[N_BOOTSTRAPS - N_BOOTSTRAPS / 40];
break;
case PERCENTILE:
timeList.sort();
mins[target] = timeList.get(timeList.size() / 40);
maxs[target] = timeList.get(39 * timeList.size() / 40);
break;
case NONE:
mins[target] = maxs[target] = avgs[target];
break;
case MIN_MAX:
default:
mins[target] = timeList.min();
// NB not using count here as it doesn't count iterations that are not included in averages
if (timeList.size() == times.length)
maxs[target] = timeList.max();
break;
}
}
}
use of org.apache.commons.math3.stat.descriptive.summary.Sum in project tutorials by eugenp.
the class FractionUnitTest method whenFractionAdd_thenCorrect.
@Test
public void whenFractionAdd_thenCorrect() {
Fraction lhs = new Fraction(1, 3);
Fraction rhs = new Fraction(2, 5);
Fraction sum = lhs.add(rhs);
Assert.assertEquals(11, sum.getNumerator());
Assert.assertEquals(15, sum.getDenominator());
}
use of org.apache.commons.math3.stat.descriptive.summary.Sum in project tetrad by cmu-phil.
the class Ling method allEigenvaluesAreSmallerThanOneInModulus.
private static boolean allEigenvaluesAreSmallerThanOneInModulus(TetradMatrix mat) {
EigenDecomposition dec = new EigenDecomposition(mat.getRealMatrix());
double[] realEigenvalues = dec.getRealEigenvalues();
double[] imagEigenvalues = dec.getImagEigenvalues();
double sum = 0.0;
// boolean allEigenvaluesSmallerThanOneInModulus = true;
for (int i = 0; i < realEigenvalues.length; i++) {
double realEigenvalue = realEigenvalues[i];
double imagEigenvalue = imagEigenvalues[i];
double modulus = Math.sqrt(Math.pow(realEigenvalue, 2) + Math.pow(imagEigenvalue, 2));
// double argument = Math.atan(imagEigenvalue/realEigenvalue);
// double modulusCubed = Math.pow(modulus, 3);
// System.out.println("eigenvalue #"+i+" = " + realEigenvalue + "+" + imagEigenvalue + "i");
// System.out.println("eigenvalue #"+i+" has argument = " + argument);
// System.out.println("eigenvalue #"+i+" has modulus = " + modulus);
// System.out.println("eigenvalue #"+i+" has modulus^3 = " + modulusCubed);
sum += modulus;
if (modulus >= 1.5) {
return false;
// allEigenvaluesSmallerThanOneInModulus = false;
}
}
return true;
// return allEigenvaluesSmallerThanOneInModulus;
// return sum / realEigenvalues.size() < 1;
}
Aggregations