use of org.openremote.model.query.filter.NameValuePredicate in project openremote by openremote.
the class AgentService method startAgent.
protected void startAgent(Agent<?, ?, ?> agent) {
withLock(getClass().getSimpleName() + "::startAgent", () -> {
Protocol<?> protocol = null;
try {
protocol = agent.getProtocolInstance();
protocolInstanceMap.put(agent.getId(), protocol);
LOG.fine("Starting protocol instance: " + protocol);
protocol.start(container);
LOG.fine("Started protocol instance:" + protocol);
LOG.finer("Linking attributes to protocol instance: " + protocol);
// Get all assets that have attributes with agent link meta for this agent
List<Asset<?>> assets = assetStorageService.findAll(new AssetQuery().attributes(new AttributePredicate().meta(new NameValuePredicate(AGENT_LINK, new StringPredicate(agent.getId()), false, new NameValuePredicate.Path("id")))));
LOG.finer("Found '" + assets.size() + "' asset(s) with attributes linked to this protocol instance: " + protocol);
assets.forEach(asset -> getGroupedAgentLinkAttributes(asset.getAttributes().stream(), assetAttribute -> assetAttribute.getMetaValue(AGENT_LINK).map(agentLink -> agentLink.getId().equals(agent.getId())).orElse(false)).forEach((agnt, attributes) -> linkAttributes(agnt, asset.getId(), attributes)));
} catch (Exception e) {
if (protocol != null) {
try {
protocol.stop(container);
} catch (Exception ignored) {
}
}
protocolInstanceMap.remove(agent.getId());
LOG.log(Level.SEVERE, "Failed to start protocol instance for agent: " + agent, e);
sendAttributeEvent(new AttributeEvent(agent.getId(), Agent.STATUS.getName(), ConnectionStatus.ERROR));
}
});
}
use of org.openremote.model.query.filter.NameValuePredicate in project openremote by openremote.
the class AssetDatapointService method purgeDataPoints.
protected void purgeDataPoints() {
LOG.info("Starting data points purge daily task");
try {
// Get list of attributes that have custom durations
List<Asset<?>> assets = assetStorageService.findAll(new AssetQuery().attributes(new AttributePredicate().meta(new NameValuePredicate(MetaItemType.DATA_POINTS_MAX_AGE_DAYS, null))));
List<Pair<String, Attribute<?>>> attributes = assets.stream().map(asset -> asset.getAttributes().stream().filter(assetAttribute -> assetAttribute.hasMeta(MetaItemType.DATA_POINTS_MAX_AGE_DAYS)).map(assetAttribute -> new Pair<String, Attribute<?>>(asset.getId(), assetAttribute)).collect(toList())).flatMap(List::stream).collect(toList());
// Purge data points not in the above list using default duration
LOG.fine("Purging data points of attributes that use default max age days of " + maxDatapointAgeDays);
persistenceService.doTransaction(em -> em.createQuery("delete from AssetDatapoint dp " + "where dp.timestamp < :dt" + buildWhereClause(attributes, true)).setParameter("dt", Date.from(timerService.getNow().truncatedTo(DAYS).minus(maxDatapointAgeDays, DAYS))).executeUpdate());
if (!attributes.isEmpty()) {
// Purge data points that have specific age constraints
Map<Integer, List<Pair<String, Attribute<?>>>> ageAttributeRefMap = attributes.stream().collect(groupingBy(attributeRef -> attributeRef.value.getMetaValue(MetaItemType.DATA_POINTS_MAX_AGE_DAYS).orElse(maxDatapointAgeDays)));
ageAttributeRefMap.forEach((age, attrs) -> {
LOG.fine("Purging data points of " + attrs.size() + " attributes that use a max age of " + age);
try {
persistenceService.doTransaction(em -> em.createQuery("delete from AssetDatapoint dp " + "where dp.timestamp < :dt" + buildWhereClause(attrs, false)).setParameter("dt", Date.from(timerService.getNow().truncatedTo(DAYS).minus(age, DAYS))).executeUpdate());
} catch (Exception e) {
LOG.log(Level.SEVERE, "An error occurred whilst deleting data points, this should not happen", e);
}
});
}
} catch (Exception e) {
LOG.log(Level.WARNING, "Failed to run data points purge", e);
}
LOG.info("Finished data points purge daily task");
}
Aggregations