use of com.influxdb.client.QueryApi in project nifi-influxdb-bundle by influxdata.
the class AbstractITInfluxDB_2 method init.
protected void init() throws Exception {
influxDBClient = InfluxDBClientFactory.create(INFLUX_DB_2, "my-token".toCharArray());
organization = influxDBClient.getOrganizationsApi().findOrganizations().stream().filter(it -> it.getName().equals("my-org")).findFirst().orElseThrow(IllegalStateException::new);
bucketName = "nifi-bucket-" + System.currentTimeMillis();
Bucket bucket = influxDBClient.getBucketsApi().createBucket(bucketName, null, organization);
PermissionResource resource = new PermissionResource();
resource.setId(bucket.getId());
resource.setOrgID(organization.getId());
resource.setType(PermissionResource.TypeEnum.BUCKETS);
//
// Add Permissions to read and write to the Bucket
//
Permission readBucket = new Permission();
readBucket.setResource(resource);
readBucket.setAction(Permission.ActionEnum.READ);
Permission writeBucket = new Permission();
writeBucket.setResource(resource);
writeBucket.setAction(Permission.ActionEnum.WRITE);
Authorization authorization = influxDBClient.getAuthorizationsApi().createAuthorization(organization, Arrays.asList(readBucket, writeBucket));
String token = authorization.getToken();
influxDBClient.close();
InfluxDBClientOptions options = InfluxDBClientOptions.builder().url(INFLUX_DB_2).authenticateToken(token.toCharArray()).org(organization.getId()).bucket(bucket.getId()).build();
influxDBClient = InfluxDBClientFactory.create(options);
queryApi = influxDBClient.getQueryApi();
}
use of com.influxdb.client.QueryApi in project addons by smarthomej.
the class InfluxDB2RepositoryImpl method getStoredItemsCount.
/**
* Return all stored item names with it's count of stored points
*
* @return Map with <ItemName,ItemCount> entries
*/
@Override
public Map<String, Integer> getStoredItemsCount() {
final QueryApi currentQueryAPI = queryAPI;
if (currentQueryAPI != null) {
Map<String, Integer> result = new LinkedHashMap<>();
// Query wrote by hand https://github.com/influxdata/influxdb-client-java/issues/75
String query = "from(bucket: \"" + configuration.getRetentionPolicy() + "\")\n" + " |> range(start:-365d)\n" + " |> filter(fn: (r) => exists r." + TAG_ITEM_NAME + " )\n" + " |> group(columns: [\"" + TAG_ITEM_NAME + "\"], mode:\"by\")\n" + " |> count()\n" + " |> group()";
List<FluxTable> queryResult = currentQueryAPI.query(query);
Objects.requireNonNull(queryResult.stream().findFirst().orElse(new FluxTable())).getRecords().forEach(row -> {
result.put((String) row.getValueByKey(TAG_ITEM_NAME), ((Number) row.getValue()).intValue());
});
return result;
} else {
logger.warn("Returning empty result because queryAPI isn't present");
return Collections.emptyMap();
}
}
use of com.influxdb.client.QueryApi in project zeppelin by apache.
the class InfluxDBInterpreter method internalInterpret.
@Override
protected InterpreterResult internalInterpret(String query, InterpreterContext context) throws InterpreterException {
LOGGER.debug("Run Flux command '{}'", query);
query = query.trim();
QueryApi queryService = getInfluxDBClient(context);
final int[] actualIndex = { -1 };
AtomicReference<InterpreterResult> resultRef = new AtomicReference<>();
CountDownLatch countDownLatch = new CountDownLatch(1);
StringBuilder result = new StringBuilder();
queryService.query(query, // process record
(cancellable, fluxRecord) -> {
Integer tableIndex = fluxRecord.getTable();
if (actualIndex[0] != tableIndex) {
result.append(NEWLINE);
result.append(TABLE_MAGIC_TAG);
actualIndex[0] = tableIndex;
// add column names to table header
StringJoiner joiner = new StringJoiner(TAB);
fluxRecord.getValues().keySet().forEach(c -> joiner.add(replaceReservedChars(c)));
result.append(joiner.toString());
result.append(NEWLINE);
}
StringJoiner rowsJoiner = new StringJoiner(TAB);
for (Object value : fluxRecord.getValues().values()) {
if (value == null) {
value = EMPTY_COLUMN_VALUE;
}
rowsJoiner.add(replaceReservedChars(value.toString()));
}
result.append(rowsJoiner.toString());
result.append(NEWLINE);
}, throwable -> {
LOGGER.error(throwable.getMessage(), throwable);
resultRef.set(new InterpreterResult(InterpreterResult.Code.ERROR, throwable.getMessage()));
countDownLatch.countDown();
}, () -> {
// on complete
InterpreterResult intpResult = new InterpreterResult(InterpreterResult.Code.SUCCESS);
intpResult.add(result.toString());
resultRef.set(intpResult);
countDownLatch.countDown();
});
try {
countDownLatch.await();
} catch (InterruptedException e) {
throw new InterpreterException(e);
}
return resultRef.get();
}
use of com.influxdb.client.QueryApi in project openhab-addons by openhab.
the class InfluxDB2RepositoryImpl method getStoredItemsCount.
/**
* Return all stored item names with it's count of stored points
*
* @return Map with <ItemName,ItemCount> entries
*/
@Override
public Map<String, Integer> getStoredItemsCount() {
final QueryApi currentQueryAPI = queryAPI;
if (currentQueryAPI != null) {
Map<String, Integer> result = new LinkedHashMap<>();
// Query wrote by hand https://github.com/influxdata/influxdb-client-java/issues/75
String query = "from(bucket: \"" + configuration.getRetentionPolicy() + "\")\n" + " |> range(start:-365d)\n" + " |> filter(fn: (r) => exists r." + TAG_ITEM_NAME + " )\n" + " |> group(columns: [\"" + TAG_ITEM_NAME + "\"], mode:\"by\")\n" + " |> count()\n" + " |> group()";
List<FluxTable> queryResult = currentQueryAPI.query(query);
queryResult.stream().findFirst().orElse(new FluxTable()).getRecords().forEach(row -> {
result.put((String) row.getValueByKey(TAG_ITEM_NAME), ((Number) row.getValue()).intValue());
});
return result;
} else {
logger.warn("Returning empty result because queryAPI isn't present");
return Collections.emptyMap();
}
}
use of com.influxdb.client.QueryApi in project nifi-influxdb-bundle by influxdata.
the class AbstractTestGetInfluxDatabaseRecord_2 method before.
@Before
public void before() throws IOException, GeneralSecurityException, InitializationException {
InfluxDBClient mockInfluxDBClient = Mockito.mock(InfluxDBClient.class);
QueryApi mockQueryApi = Mockito.mock(QueryApi.class);
Mockito.doAnswer(invocation -> mockQueryApi).when(mockInfluxDBClient).getQueryApi();
Mockito.doAnswer(invocation -> {
if (queryOnErrorValue != null) {
// noinspection unchecked
Consumer<Exception> onError = invocation.getArgument(3, Consumer.class);
onError.accept(queryOnErrorValue);
}
queryOnResponseRecords.forEach(record -> {
// noinspection unchecked
BiConsumer<Cancellable, FluxRecord> onRecord = invocation.getArgument(2, BiConsumer.class);
onRecord.accept(Mockito.mock(Cancellable.class), record);
});
boolean wasException = queryOnErrorValue != null;
try {
return queryAnswer.answer(invocation);
} catch (Exception e) {
wasException = true;
throw e;
} finally {
if (!wasException) {
Runnable onComplete = invocation.getArgument(4, Runnable.class);
onComplete.run();
}
}
}).when(mockQueryApi).query(Mockito.any(Query.class), Mockito.anyString(), Mockito.any(), Mockito.any(), Mockito.any(Runnable.class));
processor = Mockito.spy(new GetInfluxDatabaseRecord_2());
runner = TestRunners.newTestRunner(processor);
runner.setProperty(GetInfluxDatabaseRecord_2.ORG, "my-org");
runner.setProperty(GetInfluxDatabaseRecord_2.QUERY, "from(bucket:\"my-bucket\") |> range(start: 0) |> last()");
runner.setProperty(GetInfluxDatabaseRecord_2.INFLUX_DB_SERVICE, "influxdb-service");
runner.setProperty(GetInfluxDatabaseRecord_2.WRITER_FACTORY, "writer");
InfluxDatabaseService_2 influxDatabaseService = Mockito.spy(new StandardInfluxDatabaseService_2());
Mockito.doAnswer(invocation -> mockInfluxDBClient).when(influxDatabaseService).create();
runner.addControllerService("influxdb-service", influxDatabaseService);
runner.setProperty(influxDatabaseService, INFLUX_DB_ACCESS_TOKEN, "my-token");
runner.enableControllerService(influxDatabaseService);
writer = new ArrayListRecordWriter(null) {
@Override
public RecordSchema getSchema(final Map<String, String> variables, final RecordSchema readSchema) {
return readSchema;
}
};
runner.addControllerService("writer", writer);
runner.enableControllerService(writer);
MockProcessContext context = new MockProcessContext(processor);
MockProcessorInitializationContext initContext = new MockProcessorInitializationContext(processor, context);
logger = initContext.getLogger();
processor.initialize(initContext);
processor.onScheduled(runner.getProcessContext());
processor.initWriterFactory(runner.getProcessContext());
}
Aggregations