use of org.influxdb.dto.Serie in project openhab1-addons by openhab.
the class InfluxDBPersistenceService method store.
/**
* {@inheritDoc}
*/
@Override
public void store(Item item, String alias) {
if (item.getState() instanceof UnDefType) {
return;
}
if (!isProperlyConfigured) {
logger.warn("Configuration for influxdb08 not yet loaded or broken.");
return;
}
if (!isConnected()) {
logger.warn("InfluxDB is not yet connected");
return;
}
String realName = item.getName();
String name = (alias != null) ? alias : realName;
State state = null;
if (item instanceof DimmerItem || item instanceof RollershutterItem) {
state = item.getStateAs(PercentType.class);
} else if (item instanceof ColorItem) {
state = item.getStateAs(HSBType.class);
} else {
// All other items should return the best format by default
state = item.getState();
}
Object value = stateToObject(state);
logger.trace("storing {} in influxdb08 {}", name, value);
// For now time is calculated by influxdb08, may be this should be configurable?
Serie serie = new Serie.Builder(name).columns(VALUE_COLUMN_NAME).values(value).build();
try {
influxDB.write(dbName, TimeUnit.MILLISECONDS, serie);
} catch (RuntimeException e) {
logger.error("storing failed with exception for item: {}", name);
handleDatabaseException(e);
}
}
use of org.influxdb.dto.Serie in project openhab1-addons by openhab.
the class InfluxDBPersistenceService method query.
@Override
public Iterable<HistoricItem> query(FilterCriteria filter) {
logger.debug("got a query");
if (!isProperlyConfigured) {
logger.warn("Configuration for influxdb08 not yet loaded or broken.");
return Collections.emptyList();
}
if (!isConnected()) {
logger.warn("InfluxDB is not yet connected");
return Collections.emptyList();
}
List<HistoricItem> historicItems = new ArrayList<HistoricItem>();
StringBuffer query = new StringBuffer();
query.append("select ");
query.append(VALUE_COLUMN_NAME);
query.append(", ");
query.append(TIME_COLUMN_NAME);
query.append(" ");
query.append("from ");
if (filter.getItemName() != null) {
query.append(filter.getItemName());
} else {
query.append("/.*/");
}
logger.trace("filter itemname: {}", filter.getItemName());
logger.trace("filter ordering: {}", filter.getOrdering().toString());
logger.trace("filter state: {}", filter.getState());
logger.trace("filter operator: {}", filter.getOperator());
logger.trace("filter getBeginDate: {}", filter.getBeginDate());
logger.trace("filter getEndDate: {}", filter.getEndDate());
logger.trace("filter getPageSize: {}", filter.getPageSize());
logger.trace("filter getPageNumber: {}", filter.getPageNumber());
if ((filter.getState() != null && filter.getOperator() != null) || filter.getBeginDate() != null || filter.getEndDate() != null) {
query.append(" where ");
boolean foundState = false;
boolean foundBeginDate = false;
if (filter.getState() != null && filter.getOperator() != null) {
String value = stateToString(filter.getState());
if (value != null) {
foundState = true;
query.append(VALUE_COLUMN_NAME);
query.append(" ");
query.append(filter.getOperator().toString());
query.append(" ");
query.append(value);
}
}
if (filter.getBeginDate() != null) {
foundBeginDate = true;
if (foundState) {
query.append(" and");
}
query.append(" ");
query.append(TIME_COLUMN_NAME);
query.append(" > ");
query.append(getTimeFilter(filter.getBeginDate()));
query.append(" ");
}
if (filter.getEndDate() != null) {
if (foundState || foundBeginDate) {
query.append(" and");
}
query.append(" ");
query.append(TIME_COLUMN_NAME);
query.append(" < ");
query.append(getTimeFilter(filter.getEndDate()));
query.append(" ");
}
}
// http://influxdb.com/docs/v0.7/api/query_language.html#select-and-time-ranges
if (filter.getOrdering() == Ordering.ASCENDING) {
query.append(" order asc");
}
int limit = (filter.getPageNumber() + 1) * filter.getPageSize();
query.append(" limit " + limit);
logger.trace("appending limit {}", limit);
int totalEntriesAffected = ((filter.getPageNumber() + 1) * filter.getPageSize());
int startEntryNum = totalEntriesAffected - (totalEntriesAffected - (filter.getPageSize() * filter.getPageNumber()));
logger.trace("startEntryNum {}", startEntryNum);
logger.debug("query string: {}", query.toString());
List<Serie> results = Collections.emptyList();
try {
results = influxDB.query(dbName, query.toString(), TimeUnit.MILLISECONDS);
} catch (RuntimeException e) {
logger.error("query failed with database error");
handleDatabaseException(e);
}
for (Serie result : results) {
String historicItemName = result.getName();
logger.trace("item name {}", historicItemName);
int entryCount = 0;
for (Map<String, Object> row : result.getRows()) {
entryCount++;
if (entryCount >= startEntryNum) {
Double rawTime = (Double) row.get(TIME_COLUMN_NAME);
Object rawValue = row.get(VALUE_COLUMN_NAME);
logger.trace("adding historic item {}: time {} value {}", historicItemName, rawTime, rawValue);
Date time = new Date(rawTime.longValue());
State value = objectToState(rawValue, historicItemName);
historicItems.add(new InfluxdbItem(historicItemName, value, time));
} else {
logger.trace("omitting item value for {}", historicItemName);
}
}
}
return historicItems;
}
use of org.influxdb.dto.Serie in project fabric8 by jboss-fuse.
the class InfluxDBMetricsStorage method store.
@Override
public void store(String type, long timestamp, QueryResult queryResult) {
assertValid();
if (influxDB == null) {
throw new IllegalStateException("No influxDB available!");
}
List<Serie> series = new LinkedList<>();
Map<String, Result<?>> results = queryResult.getResults();
if (results != null) {
Map<String, Object> data = new HashMap<>();
Set<Map.Entry<String, Result<?>>> entries = results.entrySet();
for (Map.Entry<String, Result<?>> entry : entries) {
String key = entry.getKey();
Result<?> result = entry.getValue();
if (result instanceof MBeanOpersResult) {
MBeanOpersResult opersResult = (MBeanOpersResult) result;
List<MBeanOperResult> operResults = opersResult.getResults();
if (operResults != null) {
for (MBeanOperResult operResult : operResults) {
Object value = operResult.getValue();
Double doubleValue = toDouble(value);
if (doubleValue != null) {
String id = Metrics.metricId(type, opersResult.getRequest());
data.put(id, doubleValue);
}
}
}
} else if (result instanceof MBeanAttrsResult) {
MBeanAttrsResult attrsResult = (MBeanAttrsResult) result;
List<MBeanAttrResult> attrResults = attrsResult.getResults();
if (attrResults != null) {
for (MBeanAttrResult attrResult : attrResults) {
Map<String, Object> attrs = attrResult.getAttrs();
if (attrs != null) {
Set<Map.Entry<String, Object>> attrEntries = attrs.entrySet();
for (Map.Entry<String, Object> attrEntry : attrEntries) {
String attributeName = attrEntry.getKey();
Object value = attrEntry.getValue();
Double doubleValue = toDouble(value);
if (doubleValue != null) {
String id = Metrics.metricId(type, attrsResult.getRequest(), attributeName);
data.put(id, doubleValue);
}
}
}
}
}
}
if (!data.isEmpty()) {
data.put("time", timestamp);
series.add(new Serie.Builder("insight").columns(data.keySet().toArray(new String[data.size()])).values(data.values().toArray(new Object[data.size()])).build());
}
}
if (!series.isEmpty()) {
influxDB.get().write("fabric", TimeUnit.MILLISECONDS, series.toArray(new Serie[series.size()]));
if (LOG.isDebugEnabled()) {
LOG.debug("added " + series.size() + " metrics");
}
}
}
}
Aggregations