use of org.hibernate.jdbc.AbstractReturningWork in project openremote by openremote.
the class AbstractDatapointService method getValueDatapoints.
public ValueDatapoint<?>[] getValueDatapoints(String assetId, Attribute<?> attribute, DatapointInterval datapointInterval, final Integer stepSize, LocalDateTime fromTimestamp, LocalDateTime toTimestamp) {
AttributeRef attributeRef = new AttributeRef(assetId, attribute.getName());
getLogger().finer("Getting datapoints for: " + attributeRef);
return persistenceService.doReturningTransaction(entityManager -> entityManager.unwrap(Session.class).doReturningWork(new AbstractReturningWork<ValueDatapoint<?>[]>() {
@Override
public ValueDatapoint<?>[] execute(Connection connection) throws SQLException {
Class<?> attributeType = attribute.getType().getType();
boolean isNumber = Number.class.isAssignableFrom(attributeType);
boolean isBoolean = Boolean.class.isAssignableFrom(attributeType);
StringBuilder query = new StringBuilder();
boolean downsample = isNumber || isBoolean;
String truncate = null;
String part = null;
String interval = null;
String stepStr = null;
String partQuery = "date_part(?, ?)::int";
String partQuery2 = "date_part(?, TIMESTAMP)::int";
int step = 1;
if (downsample) {
switch(datapointInterval) {
case MINUTE:
// This works with minutes of the day so not constrained to step size < 60
step = stepSize == null ? 1 : Math.max(1, Math.min(1440, stepSize));
truncate = "day";
part = "min";
interval = "min";
partQuery = "(date_part('hour', ?)::int * 60 + date_part(?, ?)::int)";
partQuery2 = "(date_part('hour', TIMESTAMP)::int * 60 + date_part(?, TIMESTAMP)::int)";
break;
case HOUR:
step = stepSize == null ? 1 : Math.max(1, Math.min(24, stepSize));
truncate = "day";
part = "hour";
interval = "hour";
break;
case DAY:
step = stepSize == null ? 1 : Math.max(1, Math.min(365, stepSize));
truncate = "year";
part = "doy";
interval = "day";
break;
case WEEK:
step = stepSize == null ? 1 : Math.max(1, Math.min(53, stepSize));
truncate = "year";
part = "week";
interval = "week";
break;
case MONTH:
step = stepSize == null ? 1 : Math.max(1, Math.min(12, stepSize));
truncate = "year";
part = "month";
interval = "month";
break;
case YEAR:
step = stepSize == null ? 1 : Math.max(1, stepSize);
truncate = "decade";
part = "year";
interval = "year";
break;
default:
throw new UnsupportedOperationException("Can't handle interval: " + datapointInterval);
}
stepStr = step + " " + interval;
// TODO: Change this to use something like this max min decimation algorithm https://knowledge.ni.com/KnowledgeArticleDetails?id=kA00Z0000019YLKSA2&l=en-GB)
query.append("select PERIOD as X, AVG_VALUE as Y " + "from generate_series(date_trunc(?, ?) + " + partQuery + " / ? * ?, date_trunc(?, ?) + " + partQuery + " / ? * ?, ?) PERIOD left join ( " + "select (date_trunc(?, TIMESTAMP) + " + partQuery2 + " / ? * ?)::timestamp as TS, ");
if (isNumber) {
query.append(" AVG(VALUE::text::numeric) as AVG_VALUE ");
} else {
query.append(" AVG(case when VALUE::text::boolean is true then 1 else 0 end) as AVG_VALUE ");
}
query.append("from " + getDatapointTableName() + " where TIMESTAMP >= date_trunc(?, ?) and TIMESTAMP < (date_trunc(?, ?) + ?) and ENTITY_ID = ? and ATTRIBUTE_NAME = ? group by TS) DP on DP.TS = PERIOD order by PERIOD asc");
} else {
query.append("select distinct TIMESTAMP AS X, value AS Y from " + getDatapointTableName() + " where " + "TIMESTAMP >= ?" + "and " + "TIMESTAMP <= ? " + "and " + "ENTITY_ID = ? and ATTRIBUTE_NAME = ?");
}
try (PreparedStatement st = connection.prepareStatement(query.toString())) {
if (downsample) {
int counter = 1;
boolean isMinute = datapointInterval == DatapointInterval.MINUTE;
st.setString(counter++, truncate);
st.setObject(counter++, fromTimestamp);
if (isMinute) {
st.setObject(counter++, fromTimestamp);
}
st.setString(counter++, part);
st.setObject(counter++, fromTimestamp);
st.setInt(counter++, step);
st.setObject(counter++, new PGInterval(stepStr));
st.setString(counter++, truncate);
st.setObject(counter++, toTimestamp);
if (isMinute) {
st.setObject(counter++, toTimestamp);
}
st.setString(counter++, part);
st.setObject(counter++, toTimestamp);
st.setInt(counter++, step);
st.setObject(counter++, new PGInterval(stepStr));
st.setObject(counter++, new PGInterval(stepStr));
st.setString(counter++, truncate);
st.setString(counter++, part);
st.setInt(counter++, step);
st.setObject(counter++, new PGInterval(stepStr));
st.setString(counter++, interval);
st.setObject(counter++, fromTimestamp);
st.setString(counter++, interval);
st.setObject(counter++, toTimestamp);
st.setObject(counter++, new PGInterval(stepStr));
st.setString(counter++, attributeRef.getId());
st.setString(counter++, attributeRef.getName());
} else {
st.setObject(1, fromTimestamp);
st.setObject(2, toTimestamp);
st.setString(3, attributeRef.getId());
st.setString(4, attributeRef.getName());
}
try (ResultSet rs = st.executeQuery()) {
List<ValueDatapoint<?>> result = new ArrayList<>();
while (rs.next()) {
Object value = null;
if (rs.getObject(2) != null) {
if (downsample) {
value = ValueUtil.getValueCoerced(rs.getObject(2), Double.class).orElse(null);
} else {
if (rs.getObject(2) instanceof PGobject) {
value = ValueUtil.parse(((PGobject) rs.getObject(2)).getValue()).orElse(null);
} else {
value = ValueUtil.getValueCoerced(rs.getObject(2), JsonNode.class).orElse(null);
}
}
}
result.add(new ValueDatapoint<>(rs.getTimestamp(1).getTime(), value));
}
return result.toArray(new ValueDatapoint<?>[0]);
}
}
}
}));
}
Aggregations