use of org.openremote.model.asset.Asset in project openremote by openremote.
the class RulesService method fireDeploymentsWithPredictedDataForAsset.
/**
* Trigger rules engines which have the {@link org.openremote.model.value.MetaItemDescriptor} {@link org.openremote.model.rules.Ruleset#TRIGGER_ON_PREDICTED_DATA}
* and contain {@link AssetState} of the specified asset id. Use this when {@link PredictedDatapoints} has changed for this asset.
* @param assetId of the asset which has new predicated data points.
*/
public void fireDeploymentsWithPredictedDataForAsset(String assetId) {
List<AssetState<?>> assetStates = getAssetStatesInScope(assetId);
if (assetStates.size() > 0) {
String realm = assetStates.get(0).getRealm();
String[] assetPaths = assetStates.stream().flatMap(assetState -> Arrays.stream(assetState.getPath())).toArray(String[]::new);
for (RulesEngine<?> rulesEngine : getEnginesInScope(realm, assetPaths)) {
rulesEngine.fireAllDeploymentsWithPredictedData();
}
}
}
use of org.openremote.model.asset.Asset in project openremote by openremote.
the class EnergyOptimisationService method getStoragePowerSetpoints.
/**
* Returns the power setpoint calculator for the specified asset (for producers power demand will only ever be
* negative, for consumers it will only ever be positive and for storage assets that support export (i.e. supports
* producer and consumer) it can be positive or negative at a given interval. For this to work the supplied
* parameters should be updated when the system changes and not replaced so that references maintained by the
* calculator are valid and up to date.
*/
protected double[] getStoragePowerSetpoints(OptimisationInstance optimisationInstance, ElectricityStorageAsset storageAsset, double[] normalisedEnergyLevelMins, double[] energyLevelMaxs, double[] powerNets, double[] importPowerLimits, double[] exportPowerLimits, double[] costImports, double[] costExports) {
EnergyOptimiser optimiser = optimisationInstance.energyOptimiser;
String optimisationAssetId = optimisationInstance.optimisationAsset.getId();
int intervalCount = optimiser.get24HourIntervalCount();
boolean supportsExport = storageAsset.isSupportsExport().orElse(false);
boolean supportsImport = storageAsset.isSupportsImport().orElse(false);
LOG.finer(getLogPrefix(optimisationAssetId) + "Optimising storage asset: " + storageAsset);
double energyCapacity = storageAsset.getEnergyCapacity().orElse(0d);
double energyLevel = Math.min(energyCapacity, storageAsset.getEnergyLevel().orElse(-1d));
double powerExportMax = storageAsset.getPowerExportMax().map(power -> -1 * power).orElse(Double.MIN_VALUE);
double powerImportMax = storageAsset.getPowerImportMax().orElse(Double.MAX_VALUE);
boolean isConnected = storageAssetConnected(storageAsset);
// TODO: Make these a function of energy level
Function<Integer, Double> powerImportMaxCalculator = interval -> interval == 0 && !isConnected ? 0 : powerImportMax;
Function<Integer, Double> powerExportMaxCalculator = interval -> interval == 0 && !isConnected ? 0 : powerExportMax;
double[][] exportCostAndPower = null;
double[][] importCostAndPower = null;
double[] powerSetpoints = new double[intervalCount];
Function<Integer, Double> energyLevelCalculator = interval -> energyLevel + IntStream.range(0, interval).mapToDouble(j -> powerSetpoints[j] * optimiser.getIntervalSize()).sum();
// opportunities to export energy to save/earn, taking into consideration the cost of exporting from this asset
if (supportsExport) {
LOG.finer(getLogPrefix(optimisationAssetId) + "Storage asset supports export so calculating export cost and power levels for each interval: " + storageAsset.getId());
// Find intervals that save/earn by exporting energy from this storage asset by looking at power levels
BiFunction<Integer, Double, double[]> exportOptimiser = optimiser.getExportOptimiser(powerNets, exportPowerLimits, costImports, costExports, storageAsset.getTariffExport().orElse(0d));
exportCostAndPower = IntStream.range(0, intervalCount).mapToObj(it -> exportOptimiser.apply(it, powerExportMax)).toArray(double[][]::new);
}
// energy demands are met.
if (supportsImport) {
LOG.finer(getLogPrefix(optimisationAssetId) + "Storage asset supports import so calculating export cost and power levels for each interval: " + storageAsset.getId());
BiFunction<Integer, double[], double[]> importOptimiser = optimiser.getImportOptimiser(powerNets, importPowerLimits, costImports, costExports, storageAsset.getTariffImport().orElse(0d));
importCostAndPower = IntStream.range(0, intervalCount).mapToObj(it -> importOptimiser.apply(it, new double[] { 0d, powerImportMax })).toArray(double[][]::new);
boolean hasEnergyMinRequirement = Arrays.stream(normalisedEnergyLevelMins).anyMatch(el -> el > 0);
if (hasEnergyMinRequirement) {
LOG.finer(getLogPrefix(optimisationAssetId) + "Applying imports to achieve min energy level requirements for storage asset: " + storageAsset.getId());
optimiser.applyEnergyMinImports(importCostAndPower, normalisedEnergyLevelMins, powerSetpoints, energyLevelCalculator, importOptimiser, powerImportMaxCalculator);
if (LOG.isLoggable(Level.FINEST)) {
LOG.finest(getLogPrefix(optimisationAssetId) + "Setpoints to achieve min energy level requirements for storage asset '" + storageAsset.getId() + "': " + Arrays.toString(powerSetpoints));
}
}
}
optimiser.applyEarningOpportunities(importCostAndPower, exportCostAndPower, normalisedEnergyLevelMins, energyLevelMaxs, powerSetpoints, energyLevelCalculator, powerImportMaxCalculator, powerExportMaxCalculator);
if (LOG.isLoggable(Level.FINER)) {
LOG.finer(getLogPrefix(optimisationAssetId) + "Calculated earning opportunity power set points for storage asset '" + storageAsset.getId() + "': " + Arrays.toString(powerSetpoints));
}
return powerSetpoints;
}
use of org.openremote.model.asset.Asset in project openremote by openremote.
the class GatewayService method processGatewayChange.
protected void processGatewayChange(GatewayAsset gateway, PersistenceEvent<Asset<?>> persistenceEvent) {
switch(persistenceEvent.getCause()) {
case CREATE:
createUpdateGatewayServiceUser(gateway);
synchronized (gatewayConnectorMap) {
GatewayConnector connector = new GatewayConnector(assetStorageService, assetProcessingService, executorService, gateway);
gatewayConnectorMap.put(gateway.getId().toLowerCase(Locale.ROOT), connector);
}
break;
case UPDATE:
// Check if this gateway has a connector
GatewayConnector connector = gatewayConnectorMap.get(gateway.getId().toLowerCase(Locale.ROOT));
if (connector == null) {
break;
}
connector.gateway = gateway;
// Check if disabled
boolean isNowDisabled = gateway.getDisabled().orElse(false);
if (isNowDisabled) {
connector.sendMessageToGateway(new GatewayDisconnectEvent(GatewayDisconnectEvent.Reason.DISABLED));
}
connector.setDisabled(isNowDisabled);
int attributeIndex = persistenceEvent.getPropertyNames() != null ? IntStream.range(0, persistenceEvent.getPropertyNames().length).filter(i -> "attributes".equals(persistenceEvent.getPropertyNames()[i])).findFirst().orElse(-1) : -1;
if (attributeIndex >= 0) {
// Check if disabled attribute has changed
AttributeMap oldAttributes = persistenceEvent.getPreviousState("attributes");
boolean wasDisabled = oldAttributes.getValue(GatewayAsset.DISABLED).orElse(false);
if (wasDisabled != isNowDisabled) {
createUpdateGatewayServiceUser(gateway);
}
}
break;
case DELETE:
// Check if this gateway has a connector
connector = gatewayConnectorMap.get(gateway.getId().toLowerCase(Locale.ROOT));
if (connector == null) {
break;
}
synchronized (gatewayConnectorMap) {
connector = gatewayConnectorMap.remove(gateway.getId().toLowerCase(Locale.ROOT));
if (connector != null) {
connector.disconnect();
}
}
removeGatewayServiceUser(gateway);
break;
}
}
use of org.openremote.model.asset.Asset in project openremote by openremote.
the class ValueUtil method doInitialise.
/**
* Initialise the asset model and throw an {@link IllegalStateException} exception if a problem is detected; this
* can be called by applications at startup to fail hard and fast if the asset model is un-usable
*/
protected static void doInitialise() throws IllegalStateException {
assetInfoMap = new HashMap<>();
assetTypeMap = new HashMap<>();
agentLinkMap = new HashMap<>();
metaItemDescriptors = new ArrayList<>();
valueDescriptors = new ArrayList<>();
generator = null;
// Provide basic Object Mapper and enhance once asset model is initialised
JSON = configureObjectMapper(new ObjectMapper());
LOG.info("Initialising asset model...");
Map<Class<? extends Asset<?>>, List<NameHolder>> assetDescriptorProviders = new TreeMap<>(new ClassHierarchyComparator());
// noinspection RedundantCast
assetDescriptorProviders.put((Class<? extends Asset<?>>) (Class<?>) Asset.class, new ArrayList<>(getDescriptorFields(Asset.class)));
getModelProviders().forEach(assetModelProvider -> {
LOG.fine("Processing asset model provider: " + assetModelProvider.getClass().getSimpleName());
LOG.fine("Auto scan = " + assetModelProvider.useAutoScan());
if (assetModelProvider.useAutoScan()) {
Set<Class<? extends Asset<?>>> assetClasses = getAssetClasses(assetModelProvider);
LOG.fine("Found " + assetClasses.size() + " asset class(es)");
assetClasses.forEach(assetClass -> assetDescriptorProviders.computeIfAbsent(assetClass, aClass -> new ArrayList<>(getDescriptorFields(aClass))));
ModelDescriptors modelDescriptors = assetModelProvider.getClass().getAnnotation(ModelDescriptors.class);
if (modelDescriptors != null) {
for (ModelDescriptor modelDescriptor : modelDescriptors.value()) {
Class<? extends Asset<?>> assetClass = (Class<? extends Asset<?>>) modelDescriptor.assetType();
assetDescriptorProviders.compute(assetClass, (aClass, list) -> {
if (list == null) {
list = new ArrayList<>();
}
list.addAll(getDescriptorFields(modelDescriptor.provider()));
return list;
});
}
}
}
if (assetModelProvider.getAssetDescriptors() != null) {
for (AssetDescriptor<?> assetDescriptor : assetModelProvider.getAssetDescriptors()) {
Class<? extends Asset<?>> assetClass = assetDescriptor.getType();
assetDescriptorProviders.compute(assetClass, (aClass, list) -> {
if (list == null) {
list = new ArrayList<>();
}
list.add(assetDescriptor);
return list;
});
}
}
if (assetModelProvider.getAttributeDescriptors() != null) {
assetModelProvider.getAttributeDescriptors().forEach((assetClass, attributeDescriptors) -> assetDescriptorProviders.compute(assetClass, (aClass, list) -> {
if (list == null) {
list = new ArrayList<>();
}
list.addAll(attributeDescriptors);
return list;
}));
}
if (assetModelProvider.getMetaItemDescriptors() != null) {
assetModelProvider.getMetaItemDescriptors().forEach((assetClass, metaDescriptors) -> assetDescriptorProviders.compute(assetClass, (aClass, list) -> {
if (list == null) {
list = new ArrayList<>();
}
list.addAll(metaDescriptors);
return list;
}));
}
if (assetModelProvider.getValueDescriptors() != null) {
assetModelProvider.getValueDescriptors().forEach((assetClass, valueDescriptors) -> assetDescriptorProviders.compute(assetClass, (aClass, list) -> {
if (list == null) {
list = new ArrayList<>();
}
list.addAll(valueDescriptors);
return list;
}));
}
});
// Build each asset info checking that no conflicts occur
Map<Class<? extends Asset<?>>, List<NameHolder>> copy = new HashMap<>(assetDescriptorProviders);
assetDescriptorProviders.forEach((assetClass, descriptors) -> {
// Skip abstract classes as a start point - they should be in the class hierarchy of concrete class
if (!Modifier.isAbstract(assetClass.getModifiers())) {
AssetTypeInfo assetInfo = buildAssetInfo(assetClass, copy);
assetInfoMap.put(assetClass, assetInfo);
assetTypeMap.put(assetInfo.getAssetDescriptor().getName(), assetClass);
if (assetInfo.getAssetDescriptor() instanceof AgentDescriptor) {
AgentDescriptor<?, ?, ?> agentDescriptor = (AgentDescriptor<?, ?, ?>) assetInfo.getAssetDescriptor();
String agentLinkName = agentDescriptor.getAgentLinkClass().getSimpleName();
if (agentLinkMap.containsKey(agentLinkName) && agentLinkMap.get(agentLinkName) != agentDescriptor.getAgentLinkClass()) {
throw new IllegalStateException("AgentLink simple class name must be unique, duplicate found for: " + agentDescriptor.getAgentLinkClass());
}
agentLinkMap.put(agentLinkName, agentDescriptor.getAgentLinkClass());
}
}
});
// Check each value type implements serializable interface
List<ValueDescriptor<?>> nonSerializableValueDescriptors = new ArrayList<>();
valueDescriptors.forEach(vd -> {
if (!Serializable.class.isAssignableFrom(vd.getType())) {
nonSerializableValueDescriptors.add(vd);
}
});
if (!nonSerializableValueDescriptors.isEmpty()) {
String vds = nonSerializableValueDescriptors.stream().map(ValueDescriptor::toString).collect(Collectors.joining(",\n"));
throw new IllegalStateException("One or more value types do not implement java.io.Serializable: " + vds);
}
// Call on finished on each provider
assetModelProviders.forEach(AssetModelProvider::onAssetModelFinished);
// Add agent link sub types to object mapper (need to avoid circular dependency)
NamedType[] agentLinkSubTypes = Arrays.stream(getAgentLinkClasses()).map(agentLinkClass -> new NamedType(agentLinkClass, agentLinkClass.getSimpleName())).toArray(NamedType[]::new);
JSON.registerSubtypes(agentLinkSubTypes);
doSchemaInit();
}
use of org.openremote.model.asset.Asset in project openremote by openremote.
the class ValueUtil method getAssetClasses.
protected static Set<Class<? extends Asset<?>>> getAssetClasses(AssetModelProvider assetModelProvider) {
Set<Class<? extends Asset<?>>> assetClasses;
// Search for concrete asset classes in the same JAR as the provided AssetModelProvider
Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(ClasspathHelper.forClass(assetModelProvider.getClass())).setScanners(new SubTypesScanner(true)));
LOG.fine("Scanning for Asset classes");
assetClasses = reflections.getSubTypesOf(Asset.class).stream().map(assetClass -> (Class<? extends Asset<?>>) assetClass).filter(assetClass -> assetClass.getAnnotation(ModelIgnore.class) == null).collect(Collectors.toSet());
LOG.fine("Found asset class count = " + assetClasses.size());
return assetClasses;
}
Aggregations