use of org.openremote.model.attribute.Attribute in project openremote by openremote.
the class ZWaveProtocol method doLinkAttribute.
@Override
protected synchronized void doLinkAttribute(String assetId, Attribute<?> attribute, ZWaveAgentLink agentLink) {
if (network == null) {
return;
}
int nodeId = agentLink.getDeviceNodeId().orElse(0);
int endpoint = agentLink.getDeviceEndpoint().orElse(0);
String linkName = agentLink.getDeviceValue().orElse("");
AttributeRef attributeRef = new AttributeRef(assetId, attribute.getName());
Class<?> clazz = (attribute == null ? null : attribute.getType().getType());
Consumer<Value> sensorValueConsumer = value -> updateLinkedAttribute(new AttributeState(attributeRef, toAttributeValue(value, clazz)));
sensorValueConsumerMap.put(attributeRef, sensorValueConsumer);
network.addSensorValueConsumer(nodeId, endpoint, linkName, sensorValueConsumer);
}
use of org.openremote.model.attribute.Attribute in project openremote by openremote.
the class AssetStorageService method publishModificationEvents.
protected void publishModificationEvents(PersistenceEvent<Asset<?>> persistenceEvent) {
Asset<?> asset = persistenceEvent.getEntity();
switch(persistenceEvent.getCause()) {
case CREATE:
// Fully load the asset
Asset<?> loadedAsset = find(new AssetQuery().ids(asset.getId()));
if (loadedAsset == null) {
return;
}
if (LOG.isLoggable(Level.FINER)) {
LOG.finer("Asset created: " + loadedAsset.toStringAll());
} else {
LOG.fine("Asset created: " + loadedAsset);
}
clientEventService.publishEvent(new AssetEvent(AssetEvent.Cause.CREATE, loadedAsset, null));
// ));
break;
case UPDATE:
String[] updatedProperties = persistenceEvent.getPropertyNames();
boolean attributesChanged = Arrays.asList(updatedProperties).contains("attributes");
// String[] updatedProperties = Arrays.stream(persistenceEvent.getPropertyNames()).filter(propertyName -> {
// Object oldValue = persistenceEvent.getPreviousState(propertyName);
// Object newValue = persistenceEvent.getCurrentState(propertyName);
// return !Objects.deepEquals(oldValue, newValue);
// }).toArray(String[]::new);
// Fully load the asset
loadedAsset = find(new AssetQuery().ids(asset.getId()));
if (loadedAsset == null) {
return;
}
LOG.finer("Asset updated: " + persistenceEvent);
clientEventService.publishEvent(new AssetEvent(AssetEvent.Cause.UPDATE, loadedAsset, updatedProperties));
// Did any attributes change if so raise attribute events on the event bus
if (attributesChanged) {
AttributeMap oldAttributes = persistenceEvent.getPreviousState("attributes");
AttributeMap newAttributes = persistenceEvent.getCurrentState("attributes");
// Get removed attributes and raise an attribute event with deleted flag in attribute state
oldAttributes.stream().filter(oldAttribute -> newAttributes.stream().noneMatch(newAttribute -> oldAttribute.getName().equals(newAttribute.getName()))).forEach(obsoleteAttribute -> clientEventService.publishEvent(AttributeEvent.deletedAttribute(asset.getId(), obsoleteAttribute.getName())));
// Get new or modified attributes
getAddedOrModifiedAttributes(oldAttributes.values(), newAttributes.values()).forEach(newOrModifiedAttribute -> publishAttributeEvent(asset, newOrModifiedAttribute));
}
break;
case DELETE:
if (LOG.isLoggable(Level.FINER)) {
LOG.finer("Asset deleted: " + asset.toStringAll());
} else {
LOG.fine("Asset deleted: " + asset);
}
clientEventService.publishEvent(new AssetEvent(AssetEvent.Cause.DELETE, asset, null));
// Raise attribute event with deleted flag for each attribute
AttributeMap deletedAttributes = asset.getAttributes();
deletedAttributes.forEach(obsoleteAttribute -> clientEventService.publishEvent(AttributeEvent.deletedAttribute(asset.getId(), obsoleteAttribute.getName())));
break;
}
}
use of org.openremote.model.attribute.Attribute in project openremote by openremote.
the class StorageSimulatorProtocol method doLinkedAttributeWrite.
@SuppressWarnings("unchecked")
@Override
protected void doLinkedAttributeWrite(Attribute<?> attribute, StorageSimulatorAgentLink agentLink, AttributeEvent event, Object processedValue) {
// Power attribute is updated only by this protocol not by clients
if (attribute.getName().equals(POWER.getName())) {
return;
}
updateLinkedAttribute(new AttributeState(event.getAttributeRef(), processedValue));
// Push write value into the asset and update
String assetId = event.getAssetId();
((Attribute<Object>) attribute).setValue(processedValue);
ElectricityStorageAsset asset = assetService.findAsset(assetId, ElectricityStorageAsset.class);
asset.addOrReplaceAttributes(attribute);
updateStorageAsset(asset);
}
use of org.openremote.model.attribute.Attribute in project openremote by openremote.
the class AbstractProtocol method start.
@Override
public void start(Container container) throws Exception {
timerService = container.getService(TimerService.class);
executorService = container.getExecutorService();
assetService = container.getService(ProtocolAssetService.class);
predictedAssetService = container.getService(ProtocolPredictedAssetService.class);
messageBrokerContext = container.getService(MessageBrokerService.class).getContext();
withLock(getProtocolName() + "::start", () -> {
try {
messageBrokerContext.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from(ACTUATOR_TOPIC).routeId("Actuator-" + getProtocolName() + getAgent().getId()).process(exchange -> {
Protocol<?> protocolInstance = exchange.getIn().getHeader(ACTUATOR_TOPIC_TARGET_PROTOCOL, Protocol.class);
if (protocolInstance != AbstractProtocol.this) {
return;
}
AttributeEvent event = exchange.getIn().getBody(AttributeEvent.class);
Attribute<?> linkedAttribute = getLinkedAttributes().get(event.getAttributeRef());
if (linkedAttribute == null) {
LOG.info("Attempt to write to attribute that is not actually linked to this protocol '" + AbstractProtocol.this + "': " + linkedAttribute);
return;
}
processLinkedAttributeWrite(event);
});
}
});
doStart(container);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
});
this.producerTemplate = container.getService(MessageBrokerService.class).getProducerTemplate();
}
use of org.openremote.model.attribute.Attribute in project openremote by openremote.
the class AssetDatapointService method purgeDataPoints.
protected void purgeDataPoints() {
LOG.info("Starting data points purge daily task");
try {
// Get list of attributes that have custom durations
List<Asset<?>> assets = assetStorageService.findAll(new AssetQuery().attributes(new AttributePredicate().meta(new NameValuePredicate(MetaItemType.DATA_POINTS_MAX_AGE_DAYS, null))));
List<Pair<String, Attribute<?>>> attributes = assets.stream().map(asset -> asset.getAttributes().stream().filter(assetAttribute -> assetAttribute.hasMeta(MetaItemType.DATA_POINTS_MAX_AGE_DAYS)).map(assetAttribute -> new Pair<String, Attribute<?>>(asset.getId(), assetAttribute)).collect(toList())).flatMap(List::stream).collect(toList());
// Purge data points not in the above list using default duration
LOG.fine("Purging data points of attributes that use default max age days of " + maxDatapointAgeDays);
persistenceService.doTransaction(em -> em.createQuery("delete from AssetDatapoint dp " + "where dp.timestamp < :dt" + buildWhereClause(attributes, true)).setParameter("dt", Date.from(timerService.getNow().truncatedTo(DAYS).minus(maxDatapointAgeDays, DAYS))).executeUpdate());
if (!attributes.isEmpty()) {
// Purge data points that have specific age constraints
Map<Integer, List<Pair<String, Attribute<?>>>> ageAttributeRefMap = attributes.stream().collect(groupingBy(attributeRef -> attributeRef.value.getMetaValue(MetaItemType.DATA_POINTS_MAX_AGE_DAYS).orElse(maxDatapointAgeDays)));
ageAttributeRefMap.forEach((age, attrs) -> {
LOG.fine("Purging data points of " + attrs.size() + " attributes that use a max age of " + age);
try {
persistenceService.doTransaction(em -> em.createQuery("delete from AssetDatapoint dp " + "where dp.timestamp < :dt" + buildWhereClause(attrs, false)).setParameter("dt", Date.from(timerService.getNow().truncatedTo(DAYS).minus(age, DAYS))).executeUpdate());
} catch (Exception e) {
LOG.log(Level.SEVERE, "An error occurred whilst deleting data points, this should not happen", e);
}
});
}
} catch (Exception e) {
LOG.log(Level.WARNING, "Failed to run data points purge", e);
}
LOG.info("Finished data points purge daily task");
}
Aggregations