use of org.openremote.model.datapoint.ValueDatapoint in project openremote by openremote.
the class EnergyOptimisationService method get24HAttributeValues.
protected double[] get24HAttributeValues(String assetId, Attribute<Double> attribute, double intervalSize, int intervalCount, Instant optimisationTime) {
double[] values = new double[intervalCount];
if (attribute == null) {
return values;
}
AttributeRef ref = new AttributeRef(assetId, attribute.getName());
if (attribute.hasMeta(MetaItemType.HAS_PREDICTED_DATA_POINTS)) {
LocalDateTime timestamp = LocalDateTime.ofInstant(optimisationTime, ZoneId.systemDefault());
ValueDatapoint<?>[] predictedData = assetPredictedDatapointService.getValueDatapoints(ref, DatapointInterval.MINUTE, (int) (intervalSize * 60), timestamp, timestamp.plus(24, HOURS).minus((long) (intervalSize * 60), ChronoUnit.MINUTES));
if (predictedData.length != values.length) {
LOG.warning("Returned predicted data point count does not match interval count: Ref=" + ref + ", expected=" + values.length + ", actual=" + predictedData.length);
} else {
IntStream.range(0, predictedData.length).forEach(i -> {
if (predictedData[i].getValue() != null) {
values[i] = (double) (Object) predictedData[i].getValue();
} else {
// Average previous and next values to fill in gaps (goes up to 5 back and forward) - this fixes
// issues with resolution differences between stored predicted data and optimisation interval
Double previous = null;
Double next = null;
int j = i - 1;
while (previous == null && j >= 0) {
previous = (Double) predictedData[j].getValue();
j--;
}
j = i + 1;
while (next == null && j < predictedData.length) {
next = (Double) predictedData[j].getValue();
j++;
}
if (next == null) {
next = previous;
}
if (previous == null) {
previous = next;
}
if (next != null) {
values[i] = (previous + next) / 2;
}
}
});
}
}
values[0] = attribute.getValue().orElse(0d);
return values;
}
use of org.openremote.model.datapoint.ValueDatapoint in project openremote by openremote.
the class AbstractDatapointService method getValueDatapoints.
public ValueDatapoint<?>[] getValueDatapoints(String assetId, Attribute<?> attribute, DatapointInterval datapointInterval, final Integer stepSize, LocalDateTime fromTimestamp, LocalDateTime toTimestamp) {
AttributeRef attributeRef = new AttributeRef(assetId, attribute.getName());
getLogger().finer("Getting datapoints for: " + attributeRef);
return persistenceService.doReturningTransaction(entityManager -> entityManager.unwrap(Session.class).doReturningWork(new AbstractReturningWork<ValueDatapoint<?>[]>() {
@Override
public ValueDatapoint<?>[] execute(Connection connection) throws SQLException {
Class<?> attributeType = attribute.getType().getType();
boolean isNumber = Number.class.isAssignableFrom(attributeType);
boolean isBoolean = Boolean.class.isAssignableFrom(attributeType);
StringBuilder query = new StringBuilder();
boolean downsample = isNumber || isBoolean;
String truncate = null;
String part = null;
String interval = null;
String stepStr = null;
String partQuery = "date_part(?, ?)::int";
String partQuery2 = "date_part(?, TIMESTAMP)::int";
int step = 1;
if (downsample) {
switch(datapointInterval) {
case MINUTE:
// This works with minutes of the day so not constrained to step size < 60
step = stepSize == null ? 1 : Math.max(1, Math.min(1440, stepSize));
truncate = "day";
part = "min";
interval = "min";
partQuery = "(date_part('hour', ?)::int * 60 + date_part(?, ?)::int)";
partQuery2 = "(date_part('hour', TIMESTAMP)::int * 60 + date_part(?, TIMESTAMP)::int)";
break;
case HOUR:
step = stepSize == null ? 1 : Math.max(1, Math.min(24, stepSize));
truncate = "day";
part = "hour";
interval = "hour";
break;
case DAY:
step = stepSize == null ? 1 : Math.max(1, Math.min(365, stepSize));
truncate = "year";
part = "doy";
interval = "day";
break;
case WEEK:
step = stepSize == null ? 1 : Math.max(1, Math.min(53, stepSize));
truncate = "year";
part = "week";
interval = "week";
break;
case MONTH:
step = stepSize == null ? 1 : Math.max(1, Math.min(12, stepSize));
truncate = "year";
part = "month";
interval = "month";
break;
case YEAR:
step = stepSize == null ? 1 : Math.max(1, stepSize);
truncate = "decade";
part = "year";
interval = "year";
break;
default:
throw new UnsupportedOperationException("Can't handle interval: " + datapointInterval);
}
stepStr = step + " " + interval;
// TODO: Change this to use something like this max min decimation algorithm https://knowledge.ni.com/KnowledgeArticleDetails?id=kA00Z0000019YLKSA2&l=en-GB)
query.append("select PERIOD as X, AVG_VALUE as Y " + "from generate_series(date_trunc(?, ?) + " + partQuery + " / ? * ?, date_trunc(?, ?) + " + partQuery + " / ? * ?, ?) PERIOD left join ( " + "select (date_trunc(?, TIMESTAMP) + " + partQuery2 + " / ? * ?)::timestamp as TS, ");
if (isNumber) {
query.append(" AVG(VALUE::text::numeric) as AVG_VALUE ");
} else {
query.append(" AVG(case when VALUE::text::boolean is true then 1 else 0 end) as AVG_VALUE ");
}
query.append("from " + getDatapointTableName() + " where TIMESTAMP >= date_trunc(?, ?) and TIMESTAMP < (date_trunc(?, ?) + ?) and ENTITY_ID = ? and ATTRIBUTE_NAME = ? group by TS) DP on DP.TS = PERIOD order by PERIOD asc");
} else {
query.append("select distinct TIMESTAMP AS X, value AS Y from " + getDatapointTableName() + " where " + "TIMESTAMP >= ?" + "and " + "TIMESTAMP <= ? " + "and " + "ENTITY_ID = ? and ATTRIBUTE_NAME = ?");
}
try (PreparedStatement st = connection.prepareStatement(query.toString())) {
if (downsample) {
int counter = 1;
boolean isMinute = datapointInterval == DatapointInterval.MINUTE;
st.setString(counter++, truncate);
st.setObject(counter++, fromTimestamp);
if (isMinute) {
st.setObject(counter++, fromTimestamp);
}
st.setString(counter++, part);
st.setObject(counter++, fromTimestamp);
st.setInt(counter++, step);
st.setObject(counter++, new PGInterval(stepStr));
st.setString(counter++, truncate);
st.setObject(counter++, toTimestamp);
if (isMinute) {
st.setObject(counter++, toTimestamp);
}
st.setString(counter++, part);
st.setObject(counter++, toTimestamp);
st.setInt(counter++, step);
st.setObject(counter++, new PGInterval(stepStr));
st.setObject(counter++, new PGInterval(stepStr));
st.setString(counter++, truncate);
st.setString(counter++, part);
st.setInt(counter++, step);
st.setObject(counter++, new PGInterval(stepStr));
st.setString(counter++, interval);
st.setObject(counter++, fromTimestamp);
st.setString(counter++, interval);
st.setObject(counter++, toTimestamp);
st.setObject(counter++, new PGInterval(stepStr));
st.setString(counter++, attributeRef.getId());
st.setString(counter++, attributeRef.getName());
} else {
st.setObject(1, fromTimestamp);
st.setObject(2, toTimestamp);
st.setString(3, attributeRef.getId());
st.setString(4, attributeRef.getName());
}
try (ResultSet rs = st.executeQuery()) {
List<ValueDatapoint<?>> result = new ArrayList<>();
while (rs.next()) {
Object value = null;
if (rs.getObject(2) != null) {
if (downsample) {
value = ValueUtil.getValueCoerced(rs.getObject(2), Double.class).orElse(null);
} else {
if (rs.getObject(2) instanceof PGobject) {
value = ValueUtil.parse(((PGobject) rs.getObject(2)).getValue()).orElse(null);
} else {
value = ValueUtil.getValueCoerced(rs.getObject(2), JsonNode.class).orElse(null);
}
}
}
result.add(new ValueDatapoint<>(rs.getTimestamp(1).getTime(), value));
}
return result.toArray(new ValueDatapoint<?>[0]);
}
}
}
}));
}
Aggregations