use of com.infiniteautomation.mango.rest.latest.model.pointValue.DataPointValueTime in project ma-modules-public by infiniteautomation.
the class MultiDataPointStatisticsQuantizerStream method finish.
@Override
public void finish(PointValueTimeWriter writer) throws QueryCancelledException, IOException {
if (info.isSingleArray() && voMap.size() > 1) {
// Fast forward to end to fill any gaps at the end and stream out data in time
BucketCalculator bc;
if (this.info.getTimePeriod() == null) {
bc = new BucketsBucketCalculator(ZonedDateTime.ofInstant(Instant.ofEpochMilli(lastFullPeriodToMillis), info.getZoneId()), info.getTo(), 1);
} else {
bc = new TimePeriodBucketCalculator(ZonedDateTime.ofInstant(Instant.ofEpochMilli(lastFullPeriodToMillis), info.getZoneId()), info.getTo(), TimePeriodType.convertFrom(this.info.getTimePeriod().getType()), this.info.getTimePeriod().getPeriods());
}
Instant currentPeriodTo = bc.getStartTime().toInstant();
Instant end = bc.getEndTime().toInstant();
while (currentPeriodTo.isBefore(end)) {
long nextTo = currentPeriodTo.toEpochMilli();
for (DataPointStatisticsQuantizer<?> quant : this.quantizerMap.values()) {
quant.fastForward(nextTo);
}
currentPeriodTo = bc.getNextPeriodTo().toInstant();
}
for (DataPointStatisticsQuantizer<?> quant : this.quantizerMap.values()) {
if (!quant.isDone())
quant.done();
}
// TODO This is likely not necessary
Iterator<Long> it = this.periodStats.keySet().iterator();
while (it.hasNext()) {
List<DataPointValueTime> entries = this.periodStats.get(it.next());
writePeriodStats(entries);
it.remove();
}
} else {
// The last data point may not have been done() as well as any with 0 data
if (currentDataPointId != -1) {
DataPointStatisticsQuantizer<?> quant = this.quantizerMap.get(currentDataPointId);
if (!quant.isDone()) {
quant.done();
}
}
// For any with 0 data TODO Check for is open?
for (DataPointStatisticsQuantizer<?> q : this.quantizerMap.values()) {
if (!q.isDone()) {
q.done();
}
}
}
super.finish(writer);
}
use of com.infiniteautomation.mango.rest.latest.model.pointValue.DataPointValueTime in project ma-modules-public by infiniteautomation.
the class MultiDataPointDefaultRollupStatisticsQuantizerStream method streamData.
@Override
public void streamData(PointValueTimeWriter writer) throws IOException, QueryCancelledException {
if (!useSimplify) {
super.streamData(writer);
return;
}
createQuantizerMap();
Collection<? extends DataPointVO> vos = new ArrayList<>(voMap.values());
if (info.isSingleArray()) {
dao.wideBookendQueryCombined(vos, info.getFromMillis(), info.getToMillis(), null, this);
} else {
dao.wideBookendQueryPerPoint(vos, info.getFromMillis(), info.getToMillis(), null, this);
}
// Fast forward to end to fill any gaps at the end
for (DataPointStatisticsQuantizer<?> quant : this.quantizerMap.values()) {
if (!quant.isDone())
quant.done();
}
boolean singleArray = info.isSingleArray() && voMap.size() > 1;
// Process the data into lists per data point, limit and simplify if necessary.
Map<DataPointVO, List<DataPointValueTime>> processed = process(singleArray ? null : info.getLimit());
if (singleArray) {
// Combine into single array
List<DataPointValueTime> values = new ArrayList<>();
Iterator<DataPointVO> it = processed.keySet().iterator();
while (it.hasNext()) {
values.addAll(processed.get(it.next()));
}
// Sort by time
Collections.sort(values);
// Limit entire list
if (info.getLimit() != null)
values = values.subList(0, info.getLimit());
// Reset current time and write out
if (values.size() > 0) {
long currentTime = values.get(0).getTime();
List<DataPointValueTime> currentValues = new ArrayList<>();
for (DataPointValueTime value : values) {
if (currentTime == value.getTime())
currentValues.add(value);
else {
if (currentValues.size() > 0) {
writer.writeDataPointValues(currentValues, currentValues.get(0).getTime());
currentValues.clear();
}
currentTime = value.getTime();
currentValues.add(value);
}
}
// Finish the current values
if (currentValues.size() > 0)
writer.writeDataPointValues(currentValues, currentValues.get(0).getTime());
}
} else {
Iterator<DataPointVO> it = processed.keySet().iterator();
while (it.hasNext()) {
DataPointVO key = it.next();
List<DataPointValueTime> values = processed.get(key);
if (!info.isSingleArray())
this.writer.writeStartArray(key.getXid());
for (DataPointValueTime value : values) {
writer.writeDataPointValue(value);
count++;
}
if (!info.isSingleArray())
writer.writeEndArray();
}
}
}
use of com.infiniteautomation.mango.rest.latest.model.pointValue.DataPointValueTime in project ma-modules-public by infiniteautomation.
the class MultiDataPointDefaultRollupStatisticsQuantizerStream method process.
/**
* Process the data into lists per data point, simplify if necessary
*/
private Map<DataPointVO, List<DataPointValueTime>> process(Integer limit) {
Map<DataPointVO, List<DataPointValueTime>> processed = new LinkedHashMap<>();
for (DataPointVO vo : voMap.values()) {
List<DataPointStatisticsGenerator> generators = valueMap.get(vo.getId());
List<DataPointValueTime> values = new ArrayList<>();
if (generators.get(0).getGenerator() instanceof NoStatisticsGenerator) {
// Iterate and combine into an array
for (DataPointStatisticsGenerator gen : generators) {
NoStatisticsGenerator noGen = (NoStatisticsGenerator) gen.getGenerator();
for (IValueTime value : noGen.getValues()) {
values.add(new DataPointVOPointValueTimeBookend(vo, (IdPointValueTime) value));
}
}
} else {
for (DataPointStatisticsGenerator generator : generators) {
values.add(new DataPointRollupPeriodValue(generator, RollupEnum.convertTo(vo.getRollup())));
}
}
if (values.size() > 0) {
// As the other endpoints, limit before simplification
if (limit != null)
values = values.subList(0, limit);
if (vo.isSimplifyDataSets()) {
if (vo.getSimplifyType() == DataPointVO.SimplifyTypes.TARGET)
values = SimplifyUtility.simplify(null, vo.getSimplifyTarget(), true, true, values);
else
values = SimplifyUtility.simplify(vo.getSimplifyTolerance(), null, true, true, values);
}
processed.put(vo, values);
}
}
return processed;
}
Aggregations