use of org.influxdb.dto.QueryResult.Series in project openhab1-addons by openhab.
the class InfluxDBPersistenceService method query.
@Override
public Iterable<HistoricItem> query(FilterCriteria filter) {
logger.debug("got a query");
if (!isProperlyConfigured) {
logger.warn("Configuration for influxdb not yet loaded or broken.");
return Collections.emptyList();
}
if (!isConnected()) {
logger.warn("InfluxDB is not yet connected");
return Collections.emptyList();
}
List<HistoricItem> historicItems = new ArrayList<HistoricItem>();
StringBuffer query = new StringBuffer();
query.append("select ").append(VALUE_COLUMN_NAME).append(' ').append("from \"").append(retentionPolicy).append("\".");
if (filter.getItemName() != null) {
query.append('"').append(filter.getItemName()).append('"');
} else {
query.append("/.*/");
}
logger.trace("Filter: itemname: {}, ordering: {}, state: {}, operator: {}, getBeginDate: {}, getEndDate: {}, getPageSize: {}, getPageNumber: {}", filter.getItemName(), filter.getOrdering().toString(), filter.getState(), filter.getOperator(), filter.getBeginDate(), filter.getEndDate(), filter.getPageSize(), filter.getPageNumber());
if ((filter.getState() != null && filter.getOperator() != null) || filter.getBeginDate() != null || filter.getEndDate() != null) {
query.append(" where ");
boolean foundState = false;
boolean foundBeginDate = false;
if (filter.getState() != null && filter.getOperator() != null) {
String value = stateToString(filter.getState());
if (value != null) {
foundState = true;
query.append(VALUE_COLUMN_NAME);
query.append(" ");
query.append(filter.getOperator().toString());
query.append(" ");
query.append(value);
}
}
if (filter.getBeginDate() != null) {
foundBeginDate = true;
if (foundState) {
query.append(" and");
}
query.append(" ");
query.append(TIME_COLUMN_NAME);
query.append(" > ");
query.append(getTimeFilter(filter.getBeginDate()));
query.append(" ");
}
if (filter.getEndDate() != null) {
if (foundState || foundBeginDate) {
query.append(" and");
}
query.append(" ");
query.append(TIME_COLUMN_NAME);
query.append(" < ");
query.append(getTimeFilter(filter.getEndDate()));
query.append(" ");
}
}
if (filter.getOrdering() == Ordering.DESCENDING) {
query.append(String.format(" ORDER BY %s DESC", TIME_COLUMN_NAME));
logger.debug("descending ordering ");
}
int limit = (filter.getPageNumber() + 1) * filter.getPageSize();
query.append(" limit " + limit);
logger.trace("appending limit {}", limit);
int totalEntriesAffected = ((filter.getPageNumber() + 1) * filter.getPageSize());
int startEntryNum = totalEntriesAffected - (totalEntriesAffected - (filter.getPageSize() * filter.getPageNumber()));
logger.trace("startEntryNum {}", startEntryNum);
logger.debug("query string: {}", query.toString());
Query influxdbQuery = new Query(query.toString(), dbName);
List<Result> results = Collections.emptyList();
results = influxDB.query(influxdbQuery, timeUnit).getResults();
for (Result result : results) {
List<Series> seriess = result.getSeries();
if (result.getError() != null) {
logger.error(result.getError());
continue;
}
if (seriess == null) {
logger.debug("query returned no series");
} else {
for (Series series : seriess) {
logger.trace("series {}", series.toString());
String historicItemName = series.getName();
List<List<Object>> valuess = series.getValues();
if (valuess == null) {
logger.debug("query returned no values");
} else {
List<String> columns = series.getColumns();
logger.trace("columns {}", columns);
Integer timestampColumn = null;
Integer valueColumn = null;
for (int i = 0; i < columns.size(); i++) {
String columnName = columns.get(i);
if (columnName.equals(TIME_COLUMN_NAME)) {
timestampColumn = i;
} else if (columnName.equals(VALUE_COLUMN_NAME)) {
valueColumn = i;
}
}
if (valueColumn == null || timestampColumn == null) {
throw new RuntimeException("missing column");
}
for (int i = 0; i < valuess.size(); i++) {
Double rawTime = (Double) valuess.get(i).get(timestampColumn);
Date time = new Date(rawTime.longValue());
State value = objectToState(valuess.get(i).get(valueColumn), historicItemName);
logger.trace("adding historic item {}: time {} value {}", historicItemName, time, value);
historicItems.add(new InfluxdbItem(historicItemName, value, time));
}
}
}
}
}
return historicItems;
}
use of org.influxdb.dto.QueryResult.Series in project openems by OpenEMS.
the class InfluxdbQueryWrapper method queryData.
private static JsonArray queryData(InfluxDB influxdb, Optional<Integer> fems, ZonedDateTime fromDate, ZonedDateTime toDate, JsonObject channels, int resolution, String dbName) throws OpenemsException {
// Prepare query string
StringBuilder query = new StringBuilder("SELECT ");
query.append(toChannelAddressList(channels));
query.append(" FROM data WHERE ");
if (fems.isPresent()) {
query.append("fems = '");
query.append(fems.get());
query.append("' AND ");
}
query.append("time > ");
query.append(String.valueOf(fromDate.toEpochSecond()));
query.append("s");
query.append(" AND time < ");
query.append(String.valueOf(toDate.toEpochSecond()));
query.append("s");
query.append(" GROUP BY time(");
query.append(resolution);
query.append("s) fill(previous)");
QueryResult queryResult = executeQuery(influxdb, query.toString(), dbName);
JsonArray j = new JsonArray();
for (Result result : queryResult.getResults()) {
List<Series> seriess = result.getSeries();
if (seriess != null) {
for (Series series : seriess) {
// create thing/channel index
ArrayList<ChannelAddress> addressIndex = new ArrayList<>();
for (String column : series.getColumns()) {
if (column.equals("time")) {
continue;
}
addressIndex.add(ChannelAddress.fromString(column));
}
// first: create empty timestamp objects
for (List<Object> values : series.getValues()) {
JsonObject jTimestamp = new JsonObject();
// get timestamp
Instant timestampInstant = Instant.ofEpochMilli((long) ((Double) values.get(0)).doubleValue());
ZonedDateTime timestamp = ZonedDateTime.ofInstant(timestampInstant, fromDate.getZone());
String timestampString = timestamp.format(DateTimeFormatter.ISO_INSTANT);
jTimestamp.addProperty("time", timestampString);
// add empty channels by copying "channels" parameter
JsonObject jChannels = new JsonObject();
for (Entry<String, JsonElement> entry : channels.entrySet()) {
String thingId = entry.getKey();
JsonObject jThing = new JsonObject();
JsonArray channelIds = JsonUtils.getAsJsonArray(entry.getValue());
for (JsonElement channelElement : channelIds) {
String channelId = JsonUtils.getAsString(channelElement);
jThing.add(channelId, JsonNull.INSTANCE);
}
jChannels.add(thingId, jThing);
}
jTimestamp.add("channels", jChannels);
j.add(jTimestamp);
}
// then: add all data
for (int columnIndex = 1; columnIndex < series.getColumns().size(); columnIndex++) {
for (int timeIndex = 0; timeIndex < series.getValues().size(); timeIndex++) {
Double value = (Double) series.getValues().get(timeIndex).get(columnIndex);
ChannelAddress address = addressIndex.get(columnIndex - 1);
j.get(timeIndex).getAsJsonObject().get("channels").getAsJsonObject().get(address.getThingId()).getAsJsonObject().addProperty(address.getChannelId(), value);
}
}
}
}
}
return j;
}
use of org.influxdb.dto.QueryResult.Series in project openems by OpenEMS.
the class InfluxdbUtils method queryHistoricData.
public static JsonArray queryHistoricData(InfluxDB influxdb, String database, Optional<Integer> influxIdOpt, ZonedDateTime fromDate, ZonedDateTime toDate, JsonObject channels, int resolution) throws OpenemsException {
// Prepare query string
StringBuilder query = new StringBuilder("SELECT ");
query.append(toChannelAddressList(channels));
query.append(" FROM data WHERE ");
if (influxIdOpt.isPresent()) {
query.append("fems = '" + influxIdOpt.get() + "' AND ");
}
query.append("time > ");
query.append(String.valueOf(fromDate.toEpochSecond()));
query.append("s");
query.append(" AND time < ");
query.append(String.valueOf(toDate.toEpochSecond()));
query.append("s");
query.append(" GROUP BY time(");
query.append(resolution);
query.append("s) fill(null)");
QueryResult queryResult = executeQuery(influxdb, database, query.toString());
JsonArray j = new JsonArray();
for (Result result : queryResult.getResults()) {
List<Series> seriess = result.getSeries();
if (seriess != null) {
for (Series series : seriess) {
// create thing/channel index
ArrayList<ChannelAddress> addressIndex = new ArrayList<>();
for (String column : series.getColumns()) {
if (column.equals("time")) {
continue;
}
addressIndex.add(ChannelAddress.fromString(column));
}
// first: create empty timestamp objects
for (List<Object> values : series.getValues()) {
JsonObject jTimestamp = new JsonObject();
// get timestamp
Instant timestampInstant = Instant.ofEpochMilli((long) ((Double) values.get(0)).doubleValue());
ZonedDateTime timestamp = ZonedDateTime.ofInstant(timestampInstant, fromDate.getZone());
String timestampString = timestamp.format(DateTimeFormatter.ISO_INSTANT);
jTimestamp.addProperty("time", timestampString);
// add empty channels by copying "channels" parameter
JsonObject jChannels = new JsonObject();
for (Entry<String, JsonElement> entry : channels.entrySet()) {
String thingId = entry.getKey();
JsonObject jThing = new JsonObject();
JsonArray channelIds = JsonUtils.getAsJsonArray(entry.getValue());
for (JsonElement channelElement : channelIds) {
String channelId = JsonUtils.getAsString(channelElement);
jThing.add(channelId, JsonNull.INSTANCE);
}
jChannels.add(thingId, jThing);
}
jTimestamp.add("channels", jChannels);
j.add(jTimestamp);
}
// then: add all data
for (int columnIndex = 1; columnIndex < series.getColumns().size(); columnIndex++) {
for (int timeIndex = 0; timeIndex < series.getValues().size(); timeIndex++) {
Double value = (Double) series.getValues().get(timeIndex).get(columnIndex);
ChannelAddress address = addressIndex.get(columnIndex - 1);
j.get(timeIndex).getAsJsonObject().get("channels").getAsJsonObject().get(address.getThingId()).getAsJsonObject().addProperty(address.getChannelId(), value);
}
}
}
}
}
return j;
}
use of org.influxdb.dto.QueryResult.Series in project beam by apache.
the class InfluxDbIOTest method mockResult.
private QueryResult mockResult(String metricName, int numberOfRecords) {
QueryResult queryResult = new QueryResult();
QueryResult.Series series = new Series();
series.setName(metricName);
series.setColumns(Arrays.asList("time", "value"));
List<List<Object>> values = new ArrayList<>();
for (int i = 0; i < numberOfRecords; i++) {
List<Object> metricData = new ArrayList<>();
Date now = new Date();
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ssZ");
metricData.add(simpleDateFormat.format(now));
metricData.add(new Random().nextInt(100));
values.add(metricData);
}
series.setValues(values);
List<QueryResult.Series> queryResultSeries = new ArrayList<>();
queryResultSeries.add(series);
QueryResult.Result result = new QueryResult.Result();
result.setSeries(queryResultSeries);
List<QueryResult.Result> listResult = new ArrayList<>();
listResult.add(result);
queryResult.setResults(listResult);
return queryResult;
}
use of org.influxdb.dto.QueryResult.Series in project beam by apache.
the class InfluxDbIO method getDBShardedInformation.
private static List<ShardInformation> getDBShardedInformation(String database, DataSourceConfiguration configuration, boolean disableCertificateValidation) {
String query = "SHOW SHARDS";
DBShardInformation dbInfo = new DBShardInformation();
try (InfluxDB connection = getConnection(configuration, disableCertificateValidation)) {
QueryResult queryResult = connection.query(new Query(query));
List<Result> results = queryResult.getResults();
for (Result result : results) {
for (Series series : result.getSeries()) {
dbInfo.loadShardInformation(database, series);
}
}
}
Collections.sort(dbInfo.getShardInformation(database), new ShardInformationByStartDate());
return dbInfo.getShardInformation(database);
}
Aggregations