use of org.openremote.model.attribute.AttributeRef in project openremote by openremote.
the class SimulatorProtocol method doLinkAttribute.
@Override
protected void doLinkAttribute(String assetId, Attribute<?> attribute, SimulatorAgentLink agentLink) {
// Look for replay data
agentLink.getReplayData().ifPresent(simulatorReplayDatapoints -> {
LOG.info("Simulator replay data found for linked attribute: " + attribute);
AttributeRef attributeRef = new AttributeRef(assetId, attribute.getName());
ScheduledFuture<?> updateValueFuture = scheduleReplay(attributeRef, simulatorReplayDatapoints);
if (updateValueFuture != null) {
replayMap.put(attributeRef, updateValueFuture);
} else {
LOG.warning("Failed to schedule replay update value for simulator replay attribute: " + attribute);
replayMap.put(attributeRef, null);
}
});
}
use of org.openremote.model.attribute.AttributeRef in project openremote by openremote.
the class SNMPProtocol method doStart.
@Override
protected void doStart(Container container) throws Exception {
String snmpBindHost = agent.getBindHost().orElseThrow(() -> {
String msg = "No SNMP bind host provided for protocol: " + this;
LOG.info(msg);
return new IllegalArgumentException(msg);
});
Integer snmpBindPort = agent.getBindPort().orElse(162);
SNMPAgent.SNMPVersion snmpVersion = agent.getSNMPVersion().orElse(SNMPAgent.SNMPVersion.V2c);
String snmpUri = String.format("snmp:%s:%d?protocol=udp&type=TRAP&snmpVersion=%d", snmpBindHost, snmpBindPort, snmpVersion.getVersion());
messageBrokerContext.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from(snmpUri).routeId(getProtocolName() + getAgent().getId()).process(exchange -> {
SnmpMessage msg = exchange.getIn(SnmpMessage.class);
LOG.fine(String.format("Message received: %s", msg));
PDU pdu = msg.getSnmpMessage();
AttributeRef wildCardAttributeRef;
if ((wildCardAttributeRef = oidMap.get("*")) != null) {
ObjectNode wildCardValue = ValueUtil.createJsonObject();
pdu.getVariableBindings().forEach(variableBinding -> {
wildCardValue.put(variableBinding.getOid().format(), variableBinding.toValueString());
});
updateLinkedAttribute(new AttributeState(wildCardAttributeRef, wildCardValue));
}
pdu.getVariableBindings().forEach(variableBinding -> {
AttributeRef attributeRef = oidMap.get(variableBinding.getOid().format());
if (attributeRef != null) {
updateLinkedAttribute(new AttributeState(attributeRef, variableBinding.toValueString()));
}
});
});
}
});
setConnectionStatus(ConnectionStatus.CONNECTED);
}
use of org.openremote.model.attribute.AttributeRef in project openremote by openremote.
the class SimulatorService method getSimulatorState.
/**
* Get info about all attributes linked to this instance (for frontend usage)
*/
protected SimulatorState getSimulatorState(SimulatorProtocol protocolInstance) {
return withLockReturning(protocolInstance.getProtocolInstanceUri() + "::getSimulatorInfo", () -> {
LOG.info("Getting simulator info for protocol instance: " + protocolInstance);
// We need asset names instead of identifiers for user-friendly display
List<String> linkedAssetIds = protocolInstance.getLinkedAttributes().keySet().stream().map(AttributeRef::getId).distinct().collect(Collectors.toList());
List<String> assetNames = assetStorageService.findNames(linkedAssetIds.toArray(new String[0]));
if (assetNames.size() != linkedAssetIds.size()) {
LOG.warning("Retrieved asset names don't match requested asset IDs");
return null;
}
SimulatorAttributeInfo[] attributeInfos = protocolInstance.getLinkedAttributes().entrySet().stream().map(refAttributeEntry -> {
String assetName = assetNames.get(linkedAssetIds.indexOf(refAttributeEntry.getKey().getId()));
return new SimulatorAttributeInfo(assetName, refAttributeEntry.getKey().getId(), refAttributeEntry.getValue(), protocolInstance.getReplayMap().containsKey(refAttributeEntry.getKey()));
}).toArray(SimulatorAttributeInfo[]::new);
return new SimulatorState(protocolInstance.getAgent().getId(), attributeInfos);
});
}
use of org.openremote.model.attribute.AttributeRef in project openremote by openremote.
the class AssetDatapointService method purgeDataPoints.
protected void purgeDataPoints() {
LOG.info("Starting data points purge daily task");
try {
// Get list of attributes that have custom durations
List<Asset<?>> assets = assetStorageService.findAll(new AssetQuery().attributes(new AttributePredicate().meta(new NameValuePredicate(MetaItemType.DATA_POINTS_MAX_AGE_DAYS, null))));
List<Pair<String, Attribute<?>>> attributes = assets.stream().map(asset -> asset.getAttributes().stream().filter(assetAttribute -> assetAttribute.hasMeta(MetaItemType.DATA_POINTS_MAX_AGE_DAYS)).map(assetAttribute -> new Pair<String, Attribute<?>>(asset.getId(), assetAttribute)).collect(toList())).flatMap(List::stream).collect(toList());
// Purge data points not in the above list using default duration
LOG.fine("Purging data points of attributes that use default max age days of " + maxDatapointAgeDays);
persistenceService.doTransaction(em -> em.createQuery("delete from AssetDatapoint dp " + "where dp.timestamp < :dt" + buildWhereClause(attributes, true)).setParameter("dt", Date.from(timerService.getNow().truncatedTo(DAYS).minus(maxDatapointAgeDays, DAYS))).executeUpdate());
if (!attributes.isEmpty()) {
// Purge data points that have specific age constraints
Map<Integer, List<Pair<String, Attribute<?>>>> ageAttributeRefMap = attributes.stream().collect(groupingBy(attributeRef -> attributeRef.value.getMetaValue(MetaItemType.DATA_POINTS_MAX_AGE_DAYS).orElse(maxDatapointAgeDays)));
ageAttributeRefMap.forEach((age, attrs) -> {
LOG.fine("Purging data points of " + attrs.size() + " attributes that use a max age of " + age);
try {
persistenceService.doTransaction(em -> em.createQuery("delete from AssetDatapoint dp " + "where dp.timestamp < :dt" + buildWhereClause(attrs, false)).setParameter("dt", Date.from(timerService.getNow().truncatedTo(DAYS).minus(age, DAYS))).executeUpdate());
} catch (Exception e) {
LOG.log(Level.SEVERE, "An error occurred whilst deleting data points, this should not happen", e);
}
});
}
} catch (Exception e) {
LOG.log(Level.WARNING, "Failed to run data points purge", e);
}
LOG.info("Finished data points purge daily task");
}
use of org.openremote.model.attribute.AttributeRef in project openremote by openremote.
the class EnergyOptimisationService method get24HAttributeValues.
protected double[] get24HAttributeValues(String assetId, Attribute<Double> attribute, double intervalSize, int intervalCount, Instant optimisationTime) {
double[] values = new double[intervalCount];
if (attribute == null) {
return values;
}
AttributeRef ref = new AttributeRef(assetId, attribute.getName());
if (attribute.hasMeta(MetaItemType.HAS_PREDICTED_DATA_POINTS)) {
LocalDateTime timestamp = LocalDateTime.ofInstant(optimisationTime, ZoneId.systemDefault());
ValueDatapoint<?>[] predictedData = assetPredictedDatapointService.getValueDatapoints(ref, DatapointInterval.MINUTE, (int) (intervalSize * 60), timestamp, timestamp.plus(24, HOURS).minus((long) (intervalSize * 60), ChronoUnit.MINUTES));
if (predictedData.length != values.length) {
LOG.warning("Returned predicted data point count does not match interval count: Ref=" + ref + ", expected=" + values.length + ", actual=" + predictedData.length);
} else {
IntStream.range(0, predictedData.length).forEach(i -> {
if (predictedData[i].getValue() != null) {
values[i] = (double) (Object) predictedData[i].getValue();
} else {
// Average previous and next values to fill in gaps (goes up to 5 back and forward) - this fixes
// issues with resolution differences between stored predicted data and optimisation interval
Double previous = null;
Double next = null;
int j = i - 1;
while (previous == null && j >= 0) {
previous = (Double) predictedData[j].getValue();
j--;
}
j = i + 1;
while (next == null && j < predictedData.length) {
next = (Double) predictedData[j].getValue();
j++;
}
if (next == null) {
next = previous;
}
if (previous == null) {
previous = next;
}
if (next != null) {
values[i] = (previous + next) / 2;
}
}
});
}
}
values[0] = attribute.getValue().orElse(0d);
return values;
}
Aggregations