use of com.serotonin.m2m2.db.dao.BatchPointValue in project ma-core-public by infiniteautomation.
the class NumericPointValueDaoTestHelper method before.
/**
* Insert some test data.
* Call before every test.
*/
public void before() {
List<BatchPointValue<PointValueTime>> values = new ArrayList<>();
// Start back 30 days
endTs = System.currentTimeMillis();
startTs = endTs - (30L * 24L * 60L * 60L * 1000L);
// Insert a few samples for series 2 before our time
series2StartTs = startTs - (1000 * 60 * 15);
long time = series2StartTs;
PointValueTime p2vt = new PointValueTime(-3.0, time);
values.add(new BatchPointValueImpl<PointValueTime>(vo2, p2vt));
time = startTs - (1000 * 60 * 10);
p2vt = new PointValueTime(-2.0, time);
values.add(new BatchPointValueImpl<PointValueTime>(vo2, p2vt));
time = startTs - (1000 * 60 * 5);
p2vt = new PointValueTime(-1.0, time);
values.add(new BatchPointValueImpl<PointValueTime>(vo2, p2vt));
time = startTs;
// Insert a sample every 5 minutes
double value = 0.0;
while (time < endTs) {
PointValueTime pvt = new PointValueTime(value, time);
values.add(new BatchPointValueImpl<PointValueTime>(vo1, pvt));
values.add(new BatchPointValueImpl<PointValueTime>(vo2, pvt));
time = time + 1000 * 60 * 5;
totalSampleCount++;
value++;
}
// Add a few more samples for series 2 after our time
p2vt = new PointValueTime(value++, time);
values.add(new BatchPointValueImpl<PointValueTime>(vo2, p2vt));
time = time + (1000 * 60 * 5);
p2vt = new PointValueTime(value++, time);
values.add(new BatchPointValueImpl<PointValueTime>(vo2, p2vt));
time = time + (1000 * 60 * 5);
p2vt = new PointValueTime(value, time);
values.add(new BatchPointValueImpl<PointValueTime>(vo2, p2vt));
this.series2EndTs = time;
dao.savePointValues(values.stream().peek(v -> {
data.computeIfAbsent(v.getPoint().getSeriesId(), k -> new ArrayList<>()).add(v.getValue());
}), 10000);
}
use of com.serotonin.m2m2.db.dao.BatchPointValue in project ma-core-public by infiniteautomation.
the class StatisticsAggregatorTest method aggregateWithInitialValue.
@Test
public void aggregateWithInitialValue() {
PointValueTime initialValue = new PointValueTime(1.0D, from.minusHours(1L).toInstant().toEpochMilli());
PointValueGenerator generator = new ConstantPointValueGenerator(from.toInstant(), to.toInstant(), pollPeriod, new NumericValue(0.0D));
var stream = generator.apply(new DataPointVO()).map(BatchPointValue::getValue);
BucketCalculator bucketCalc = new TemporalAmountBucketCalculator(from, to, aggregatePeriod);
List<NumericAggregate> aggregates = StatisticsAggregator.aggregate(Stream.concat(Stream.of(initialValue), stream), new AnalogStatisticsQuantizer(bucketCalc)).collect(Collectors.toList());
Assert.assertEquals(expectedAggregateValues, aggregates.size());
for (var aggregate : aggregates) {
assertEquals(180L, aggregate.getCount());
assertEquals(0.0D, aggregate.getArithmeticMean(), 0.0D);
assertEquals(0.0D, aggregate.getArithmeticMean(), 0.0D);
}
}
use of com.serotonin.m2m2.db.dao.BatchPointValue in project ma-core-public by infiniteautomation.
the class StatisticsAggregatorTest method aggregate.
@Test
public void aggregate() {
PointValueGenerator generator = new ConstantPointValueGenerator(from.toInstant(), to.toInstant(), pollPeriod, new NumericValue(0.0D));
var stream = generator.apply(new DataPointVO()).map(BatchPointValue::getValue);
BucketCalculator bucketCalc = new TemporalAmountBucketCalculator(from, to, aggregatePeriod);
List<NumericAggregate> aggregates = StatisticsAggregator.aggregate(stream, new AnalogStatisticsQuantizer(bucketCalc)).collect(Collectors.toList());
Assert.assertEquals(expectedAggregateValues, aggregates.size());
for (var aggregate : aggregates) {
assertEquals(180L, aggregate.getCount());
assertEquals(0.0D, aggregate.getArithmeticMean(), 0.0D);
assertEquals(0.0D, aggregate.getArithmeticMean(), 0.0D);
}
}
use of com.serotonin.m2m2.db.dao.BatchPointValue in project ma-core-public by MangoAutomation.
the class NumericPointValueDaoTestHelper method before.
/**
* Insert some test data.
* Call before every test.
*/
public void before() {
List<BatchPointValue> values = new ArrayList<>();
// Start back 30 days
endTs = System.currentTimeMillis();
startTs = endTs - (30L * 24L * 60L * 60L * 1000L);
// Insert a few samples for series 2 before our time
series2StartTs = startTs - (1000 * 60 * 15);
long time = series2StartTs;
PointValueTime p2vt = new PointValueTime(-3.0, time);
values.add(new BatchPointValueImpl(vo2, p2vt));
time = startTs - (1000 * 60 * 10);
p2vt = new PointValueTime(-2.0, time);
values.add(new BatchPointValueImpl(vo2, p2vt));
time = startTs - (1000 * 60 * 5);
p2vt = new PointValueTime(-1.0, time);
values.add(new BatchPointValueImpl(vo2, p2vt));
time = startTs;
// Insert a sample every 5 minutes
double value = 0.0;
while (time < endTs) {
PointValueTime pvt = new PointValueTime(value, time);
values.add(new BatchPointValueImpl(vo1, pvt));
values.add(new BatchPointValueImpl(vo2, pvt));
time = time + 1000 * 60 * 5;
totalSampleCount++;
value++;
}
// Add a few more samples for series 2 after our time
p2vt = new PointValueTime(value++, time);
values.add(new BatchPointValueImpl(vo2, p2vt));
time = time + (1000 * 60 * 5);
p2vt = new PointValueTime(value++, time);
values.add(new BatchPointValueImpl(vo2, p2vt));
time = time + (1000 * 60 * 5);
p2vt = new PointValueTime(value, time);
values.add(new BatchPointValueImpl(vo2, p2vt));
this.series2EndTs = time;
dao.savePointValues(values.stream().peek(v -> {
data.computeIfAbsent(v.getVo().getSeriesId(), k -> new ArrayList<>()).add(v.getPointValue());
}), 10000);
}
Aggregations