use of org.apache.commons.math3.stat.descriptive.rank.Max in project MPW by shineangelic.
the class ChartUtils method drawHashrateHistory.
static void drawHashrateHistory(TextView titleTextView, LinkedMap<Date, HomeStatsChartData> storia, LineView chart, GranularityEnum grane) {
SummaryStatistics stats = new SummaryStatistics();
ArrayList<Float> dataList = new ArrayList<>();
ArrayList<Float> dataListMax = new ArrayList<>();
ArrayList<Float> dataListMin = new ArrayList<>();
ArrayList<String> labelsArr = new ArrayList<>();
List<Date> dates = storia.asList();
// HomeStats campione = storia.values().iterator().next();
for (Date date2 : dates) {
labelsArr.add(getLabelFormat(grane, date2));
dataList.add(Utils.condenseHashRate(storia.get(date2).getHashrate()));
dataListMax.add(Utils.condenseHashRate(storia.get(date2).getHashrateMax()));
dataListMin.add(Utils.condenseHashRate(storia.get(date2).getHashrateMin()));
stats.addValue(storia.get(date2).getHashrate());
stats.addValue(storia.get(date2).getHashrateMax());
stats.addValue(storia.get(date2).getHashrateMin());
}
String curHashRateTxt = "--";
try {
curHashRateTxt = Utils.formatHashrate(storia.get(dates.get(dataList.size() - 1)).getHashrate());
} catch (Exception re) {
Log.w(TAG, "history too short?", re);
}
titleTextView.setText("Hashrate History chart " + "(avg: " + Utils.formatHashrate((long) stats.getMean()) + ", max: " + Utils.formatHashrate((long) stats.getMax()) + ", min: " + Utils.formatHashrate((long) stats.getMin()) + ", now: " + curHashRateTxt + ", std dev: " + Utils.formatHashrate((long) stats.getStandardDeviation()) + ")");
ArrayList<ArrayList<Float>> dataLists = new ArrayList<>();
List<Integer> colArr = new ArrayList<>();
dataLists.add(dataListMax);
colArr.add(ResourcesCompat.getColor(titleTextView.getResources(), R.color.colorPrimaryAlpha, null));
dataLists.add(dataListMin);
colArr.add(ResourcesCompat.getColor(titleTextView.getResources(), R.color.colorAccentAlpha, null));
dataLists.add(dataList);
chart.setShowPopup(LineView.SHOW_POPUPS_MAXMIN_ONLY);
// optional
chart.setDrawDotLine(false);
chart.setBottomTextList(labelsArr);
colArr.add(Color.DKGRAY);
/*colorArray = new int[]{ ResourcesCompat.getColor(titleTextView.getResources(), enableMax?R.color.colorPrimaryAlpha:android.R.color.transparent,null),
ResourcesCompat.getColor(titleTextView.getResources(), enableMin?R.color.colorAccentAlpha:android.R.color.transparent,null),
Color.DKGRAY,};*/
int[] ret = new int[colArr.size()];
int i = 0;
for (Integer e : colArr) ret[i++] = e.intValue();
chart.setColorArray(ret);
Assert.assertTrue(dataLists.size() == colArr.size());
// or lineView.setFloatDataList(floatDataLists)
chart.setFloatDataList(dataLists);
chart.requestLayout();
chart.invalidate();
}
use of org.apache.commons.math3.stat.descriptive.rank.Max in project MPW by shineangelic.
the class ChartUtils method drawWalletHashRateHistory.
public static void drawWalletHashRateHistory(TextView titleTextView, LineView chart, LinkedMap<Date, Wallet> dateWalletLinkedMap, GranularityEnum grane) {
SummaryStatistics stats = new SummaryStatistics();
ArrayList<Float> dataList = new ArrayList<>();
ArrayList<String> labelsArr = new ArrayList<>();
List<Date> dates = dateWalletLinkedMap.asList();
Wallet campione = dateWalletLinkedMap.values().iterator().next();
for (Date date2 : dates) {
labelsArr.add(getLabelFormat(grane, date2));
dataList.add(Utils.condenseHashRate(dateWalletLinkedMap.get(date2).getHashrate()));
stats.addValue(dateWalletLinkedMap.get(date2).getHashrate());
}
titleTextView.setText("Wallet Hashrate History " + "(avg: " + Utils.formatHashrate((long) stats.getMean()) + ", max: " + Utils.formatHashrate((long) stats.getMax()) + ", min: " + Utils.formatHashrate((long) stats.getMin()) + ", now: " + Utils.formatHashrate(dateWalletLinkedMap.get(dates.get(dataList.size() - 1)).getHashrate()) + ", std dev: " + Utils.formatHashrate((long) stats.getStandardDeviation()) + ")");
ArrayList<ArrayList<Float>> dataLists = new ArrayList<>();
dataLists.add(dataList);
chart.setShowPopup(LineView.SHOW_POPUPS_All);
// optional
chart.setDrawDotLine(false);
chart.setBottomTextList(labelsArr);
chart.setColorArray(new int[] { Color.DKGRAY, Color.CYAN });
// or lineView.setFloatDataList(floatDataLists)
chart.setFloatDataList(dataLists);
}
use of org.apache.commons.math3.stat.descriptive.rank.Max in project chordatlas by twak.
the class Prof method findProfileLines.
/**
* We find an initial base offset. Then we cluster the start point of all
* (clean) profiles. If any are a good distance from the initial base, we
* add those as their own profile lines.
*
* The original line is offset by the remaiing data.
*/
public static List<SuperLine> findProfileLines(Collection<Prof> profiles, Line3d line) {
List<SuperLine> out = new ArrayList();
// PaintThing.debug.clear();
SuperLine superLine = new SuperLine(line.start.x, line.start.z, line.end.x, line.end.z);
double outLen = superLine.length();
double min = Double.MAX_VALUE, max = -Double.MAX_VALUE;
Cache<Prof, Double> vLength = new Cache<Prof, Double>() {
@Override
public Double create(Prof i) {
return i.verticalLength(0.5);
}
};
double vLen = profiles.stream().mapToDouble(p -> vLength.get(p)).sum();
boolean useVertical = vLen / profiles.size() > 1;
class Wrapper implements Clusterable {
double[] pt;
public Wrapper(Point2d pt) {
this.pt = new double[] { pt.x, pt.y };
}
@Override
public double[] getPoint() {
return pt;
}
}
List<Wrapper> toCluster = new ArrayList();
List<Double> baseLineOffset = new ArrayList();
for (Prof p : profiles) {
if (// vLen / (5*profiles.size()))
useVertical && vLength.get(p) < 1)
continue;
Prof clean = p.parameterize();
Point2d pt = clean.get(0);
Point3d pt3 = clean.to3d(pt);
double ppram = superLine.findPPram(new Point2d(pt3.x, pt3.z));
baseLineOffset.add(pt.x);
toCluster.add(new Wrapper(new Point2d(pt.x, ppram * outLen)));
min = Math.min(min, ppram);
max = Math.max(max, ppram);
}
if (min == max || toCluster.isEmpty())
return out;
if (true) {
baseLineOffset.sort(Double::compareTo);
double modeBaselineOffset = baseLineOffset.get(baseLineOffset.size() / 2);
DBSCANClusterer<Wrapper> cr = new DBSCANClusterer<>(1.5, 0);
List<Cluster<Wrapper>> results = cr.cluster(toCluster);
Iterator<Cluster<Wrapper>> cit = results.iterator();
while (cit.hasNext()) {
Cluster<Wrapper> cw = cit.next();
if (cw.getPoints().size() < 2 / TweedSettings.settings.profileHSampleDist) {
cit.remove();
double cMeanY = cw.getPoints().stream().mapToDouble(x -> x.pt[1]).average().getAsDouble();
double bestDist = Double.MAX_VALUE;
Cluster<Wrapper> bestWrapper = null;
for (Cluster<Wrapper> near : results) {
double meanY = near.getPoints().stream().mapToDouble(x -> x.pt[1]).average().getAsDouble();
double dist = Math.abs(meanY - cMeanY);
if (dist < bestDist) {
bestDist = dist;
bestWrapper = near;
}
}
if (bestWrapper != null)
bestWrapper.getPoints().addAll(cw.getPoints());
}
}
{
baseLineOffset.clear();
int c = 0;
for (Cluster<Wrapper> cw : results) {
double[] minMax = cw.getPoints().stream().map(p -> new double[] { p.pt[1] }).collect(new InAxDoubleArray());
double[] offsetA = cw.getPoints().stream().mapToDouble(p -> p.pt[0]).sorted().toArray();
double offset = offsetA[offsetA.length / 2];
if (offset - modeBaselineOffset < 1) {
for (Wrapper w : cw.getPoints()) baseLineOffset.add(w.pt[0]);
continue;
}
SuperLine sl = new SuperLine(superLine.fromPPram(minMax[0] / outLen), superLine.fromPPram(minMax[1] / outLen));
sl.moveLeft(offset);
out.add(sl);
List<Point2d> pts = cw.getPoints().stream().map(w -> new Point2d(w.pt[0], w.pt[1])).collect(Collectors.toList());
PaintThing.debug(Rainbow.getColour(c++), 1, pts);
}
}
}
Point2d nStart = superLine.fromPPram(min), nEnd = superLine.fromPPram(max);
superLine.start = nStart;
superLine.end = nEnd;
baseLineOffset.sort(Double::compare);
if (!baseLineOffset.isEmpty())
superLine.moveLeft(baseLineOffset.get(baseLineOffset.size() / 2));
out.add(0, superLine);
return out;
}
use of org.apache.commons.math3.stat.descriptive.rank.Max in project chordatlas by twak.
the class SkelFootprint method mergeOnProfiles.
private void mergeOnProfiles(HalfMesh2 mesh, List<Line> footprint) {
System.out.println("merging over profiles...");
TreeSet<HalfFace> togo = new TreeSet<>((HalfFace o1, HalfFace o2) -> Double.compare(o1.area(), o2.area()));
togo.addAll(mesh.faces);
int count = 0;
while (!togo.isEmpty()) {
HalfFace f = togo.pollFirst();
Cache<HalfEdge, MutableDouble> crossedBy = new Cach<>(e -> new MutableDouble(0));
for (HalfEdge e : f) {
SuperEdge se = (SuperEdge) e;
if (se.profLine != null) {
MegaFacade mf = ((SuperLine) se.profLine).mega;
if (mf != null)
for (Prof p : mf.getTween(se.start, se.end, 0)) {
Line proj = new Line(Pointz.to2(p.to3d(p.get(0))), Pointz.to2(p.to3d(p.get(p.size() - 1))));
for (HalfEdge e2 : f) {
SuperEdge se2 = (SuperEdge) e2;
if (se2.profLine == null && (se2.over == null || ((SuperEdge) se2.over).profLine == null) && e2.over != null && e2.line().intersects(proj) != null && Mathz.inRange(e2.line().absAngle(proj), 0.25 * Math.PI, 0.75 * Math.PI)) {
crossedBy.get(e2).d += TweedSettings.settings.profileHSampleDist;
}
}
}
}
}
count += crossedBy.cache.size();
Optional<Map.Entry<HalfEdge, MutableDouble>> longestO = crossedBy.cache.entrySet().stream().filter(//
e1 -> ((SuperEdge) e1.getKey()).profLine == null && e1.getValue().d > 0).max((e1, e2) -> Double.compare(e1.getValue().d, e2.getValue().d));
if (longestO.isPresent()) {
Map.Entry<HalfEdge, MutableDouble> longest = longestO.get();
if (longest.getValue().d > 0.6 * longest.getKey().length()) {
HalfFace tgf = longest.getKey().over.face;
togo.remove(tgf);
longest.getKey().face.merge(mesh, tgf);
((SuperFace) longest.getKey().face).mergeFrom((SuperFace) tgf);
togo.add(f);
}
}
}
System.out.println("found crossings " + count);
killDoubleEdges(mesh);
}
use of org.apache.commons.math3.stat.descriptive.rank.Max in project ontrack by nemerosa.
the class JobScatteringTest method scatteringInSameType.
@Test
public void scatteringInSameType() {
// Scheduler
DefaultJobScheduler scheduler = new DefaultJobScheduler(NOPJobDecorator.INSTANCE, new SynchronousScheduledExecutorService(), NOPJobListener.INSTANCE, false, true, 1.0);
// Creates a list of jobs with a weak key
List<TestJob> jobs = TestUtils.range(1, 100).stream().map(i -> TestJob.of(String.format("%d", i))).collect(Collectors.toList());
// Orchestration of all those jobs every 6 hours
Collection<JobOrchestratorSupplier> jobOrchestratorSupplier = Collections.singletonList(() -> jobs.stream().map(j -> JobRegistration.of(j).everyMinutes(6 * 60)));
// Orchestrator
JobOrchestrator orchestrator = new JobOrchestrator(scheduler, "Orchestrator", jobOrchestratorSupplier);
// Scheduling the orchestrator (manual mode)
scheduler.schedule(orchestrator, Schedule.NONE);
// Launching the orchestrator (manually)
orchestrator.orchestrate(JobRunListener.out());
// Getting the actual schedules of the jobs
List<Schedule> actualSchedules = jobs.stream().map(job -> scheduler.getJobStatus(job.getKey())).filter(Optional::isPresent).map(Optional::get).map(JobStatus::getActualSchedule).collect(Collectors.toList());
List<Long> initialPeriods = actualSchedules.stream().map(Schedule::getInitialPeriod).collect(Collectors.toList());
initialPeriods.forEach(l -> System.out.format("--> %d%n", l));
// Checks that all jobs have been scheduled
assertEquals("All jobs have been scheduled", jobs.size(), initialPeriods.size());
// Checks that all schedules more or less different
DescriptiveStatistics stats = new DescriptiveStatistics();
initialPeriods.forEach(stats::addValue);
// Gets the std deviation
double standardDeviation = stats.getStandardDeviation();
double max = stats.getMax();
// Gets this in minutes (this was returned in ms)
double stdDevMinutes = TimeUnit.MINUTES.convert((long) standardDeviation, TimeUnit.MILLISECONDS);
double maxMinutes = TimeUnit.MINUTES.convert((long) max, TimeUnit.MILLISECONDS);
// It must be >> 0
assertTrue("Std deviation must be >> 0", stdDevMinutes > 60.0);
System.out.println("Max = " + maxMinutes);
assertTrue("Max is <= period", maxMinutes <= 6 * 60.0);
}
Aggregations