use of com.influxdb.query.FluxRecord in project IginX by thulab.
the class InfluxDBQueryRowStream method next.
@Override
public Row next() throws PhysicalException {
long timestamp = Long.MAX_VALUE;
for (int i = 0; i < this.tables.size(); i++) {
int index = indices[i];
FluxTable table = this.tables.get(i);
List<FluxRecord> records = table.getRecords();
if (index == records.size()) {
// 数据已经消费完毕了
continue;
}
FluxRecord record = records.get(index);
timestamp = Math.min(record.getTime().toEpochMilli(), timestamp);
}
if (timestamp == Long.MAX_VALUE) {
return null;
}
Object[] values = new Object[this.tables.size()];
for (int i = 0; i < this.tables.size(); i++) {
int index = indices[i];
FluxTable table = this.tables.get(i);
List<FluxRecord> records = table.getRecords();
if (index == records.size()) {
// 数据已经消费完毕了
continue;
}
FluxRecord record = records.get(index);
if (record.getTime().toEpochMilli() == timestamp) {
DataType dataType = header.getField(i).getType();
Object value = record.getValue();
if (dataType == DataType.BINARY) {
value = ((String) value).getBytes();
}
values[i] = value;
indices[i]++;
if (indices[i] == records.size()) {
hasMoreRecords--;
}
}
}
return new Row(header, timestamp, values);
}
use of com.influxdb.query.FluxRecord in project oner365-cloud by xiaozhao32.
the class InfluxdbTest method queryData.
/**
* 查询
*/
public static void queryData(InfluxDBClient client, String bucket, String org) {
String query = String.format("from(bucket: \"%s\") |> range(start: 0) |> filter(fn: (r) => r._measurement == \"%s\")", bucket, "mem");
List<FluxTable> tables = client.getQueryApi().query(query, org);
for (FluxTable table : tables) {
for (FluxRecord record : table.getRecords()) {
LOGGER.info("Table:{}, Measurement:{}, Field:{}, Value:{}", record.getTable(), record.getMeasurement(), record.getField(), record.getValue());
}
}
}
use of com.influxdb.query.FluxRecord in project openhab-addons by openhab.
the class InfluxDBClientFacadeMock method query.
@Override
public void query(String queryString, BiConsumer<Cancellable, FluxRecord> onNext, Consumer<? super Throwable> onError, Runnable onComplete) {
if (!connected) {
throw new DatabaseException("Client not connected");
}
if (INVALID_QUERY.equals(queryString)) {
onError.accept(new RuntimeException("Invalid query"));
} else if (EMPTY_QUERY.equals(queryString)) {
onComplete.run();
} else if (SCALAR_QUERY.equals(queryString)) {
FluxRecord scalar = new FluxRecord(0);
scalar.getValues().put("result", "_result");
scalar.getValues().put("table", 0);
scalar.getValues().put(VALUE_COLUMN, SCALAR_RESULT);
onNext.accept(mock(Cancellable.class), scalar);
onComplete.run();
} else if (MULTIPLE_ROWS_QUERY.equals(queryString)) {
onNext.accept(mock(Cancellable.class), createRowRecord(0, MULTIPLE_ROWS_VALUE_PREFIX + 1));
onNext.accept(mock(Cancellable.class), createRowRecord(0, MULTIPLE_ROWS_VALUE_PREFIX + 2));
onNext.accept(mock(Cancellable.class), createRowRecord(1, MULTIPLE_ROWS_VALUE_PREFIX + 3));
onComplete.run();
}
}
use of com.influxdb.query.FluxRecord in project openhab-addons by openhab.
the class InfluxDBClientFacadeMock method createRowRecord.
private static FluxRecord createRowRecord(int table, String value) {
FluxRecord record = new FluxRecord(0);
record.getValues().put("result", "_result");
record.getValues().put("table", table);
record.getValues().put(VALUE_COLUMN, value);
record.getValues().put(TIME_COLUMN, Instant.now());
record.getValues().put("_start", Instant.now());
record.getValues().put("_stop", Instant.now());
record.getValues().put("_measurement", "measurementName");
return record;
}
use of com.influxdb.query.FluxRecord in project nifi-influxdb-bundle by influxdata.
the class AbstractTestGetInfluxDatabaseRecord_2 method before.
@Before
public void before() throws IOException, GeneralSecurityException, InitializationException {
InfluxDBClient mockInfluxDBClient = Mockito.mock(InfluxDBClient.class);
QueryApi mockQueryApi = Mockito.mock(QueryApi.class);
Mockito.doAnswer(invocation -> mockQueryApi).when(mockInfluxDBClient).getQueryApi();
Mockito.doAnswer(invocation -> {
if (queryOnErrorValue != null) {
// noinspection unchecked
Consumer<Exception> onError = invocation.getArgument(3, Consumer.class);
onError.accept(queryOnErrorValue);
}
queryOnResponseRecords.forEach(record -> {
// noinspection unchecked
BiConsumer<Cancellable, FluxRecord> onRecord = invocation.getArgument(2, BiConsumer.class);
onRecord.accept(Mockito.mock(Cancellable.class), record);
});
boolean wasException = queryOnErrorValue != null;
try {
return queryAnswer.answer(invocation);
} catch (Exception e) {
wasException = true;
throw e;
} finally {
if (!wasException) {
Runnable onComplete = invocation.getArgument(4, Runnable.class);
onComplete.run();
}
}
}).when(mockQueryApi).query(Mockito.any(Query.class), Mockito.anyString(), Mockito.any(), Mockito.any(), Mockito.any(Runnable.class));
processor = Mockito.spy(new GetInfluxDatabaseRecord_2());
runner = TestRunners.newTestRunner(processor);
runner.setProperty(GetInfluxDatabaseRecord_2.ORG, "my-org");
runner.setProperty(GetInfluxDatabaseRecord_2.QUERY, "from(bucket:\"my-bucket\") |> range(start: 0) |> last()");
runner.setProperty(GetInfluxDatabaseRecord_2.INFLUX_DB_SERVICE, "influxdb-service");
runner.setProperty(GetInfluxDatabaseRecord_2.WRITER_FACTORY, "writer");
InfluxDatabaseService_2 influxDatabaseService = Mockito.spy(new StandardInfluxDatabaseService_2());
Mockito.doAnswer(invocation -> mockInfluxDBClient).when(influxDatabaseService).create();
runner.addControllerService("influxdb-service", influxDatabaseService);
runner.setProperty(influxDatabaseService, INFLUX_DB_ACCESS_TOKEN, "my-token");
runner.enableControllerService(influxDatabaseService);
writer = new ArrayListRecordWriter(null) {
@Override
public RecordSchema getSchema(final Map<String, String> variables, final RecordSchema readSchema) {
return readSchema;
}
};
runner.addControllerService("writer", writer);
runner.enableControllerService(writer);
MockProcessContext context = new MockProcessContext(processor);
MockProcessorInitializationContext initContext = new MockProcessorInitializationContext(processor, context);
logger = initContext.getLogger();
processor.initialize(initContext);
processor.onScheduled(runner.getProcessContext());
processor.initWriterFactory(runner.getProcessContext());
}
Aggregations