use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class PreviewDataPipelineTest method getTotalMetric.
private long getTotalMetric(Map<String, String> tags, String metricName, PreviewManager previewManager) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
Collection<MetricTimeSeries> result = previewManager.getMetricsQueryHelper().getMetricStore().query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class AppFabricTestBase method getTotalMetric.
protected long getTotalMetric(String metricName, Map<String, String> tags) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, "system." + metricName, AggregationFunction.SUM, tags, Collections.emptyList());
Collection<MetricTimeSeries> results = metricStore.query(query);
if (results.isEmpty()) {
return 0;
}
// since it is totals query and not groupBy specified, we know there's one time series
List<TimeValue> timeValues = results.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
// since it is totals, we know there's one value only
return timeValues.get(0).getValue();
}
use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class WorkerProgramRunnerTest method testWorkerDatasetWithMetrics.
@Test
public void testWorkerDatasetWithMetrics() throws Throwable {
final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(AppWithWorker.class, TEMP_FOLDER_SUPPLIER);
ProgramController controller = startProgram(app, AppWithWorker.TableWriter.class);
// validate worker wrote the "initialize" and "run" rows
final TransactionExecutor executor = txExecutorFactory.createExecutor(datasetCache);
// wait at most 5 seconds until the "RUN" row is set (indicates the worker has started running)
Tasks.waitFor(AppWithWorker.RUN, new Callable<String>() {
@Override
public String call() throws Exception {
return executor.execute(new Callable<String>() {
@Override
public String call() throws Exception {
KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
return Bytes.toString(kvTable.read(AppWithWorker.RUN));
}
});
}
}, 5, TimeUnit.SECONDS);
stopProgram(controller);
txExecutorFactory.createExecutor(datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
Assert.assertEquals(AppWithWorker.RUN, Bytes.toString(kvTable.read(AppWithWorker.RUN)));
Assert.assertEquals(AppWithWorker.INITIALIZE, Bytes.toString(kvTable.read(AppWithWorker.INITIALIZE)));
Assert.assertEquals(AppWithWorker.STOP, Bytes.toString(kvTable.read(AppWithWorker.STOP)));
}
});
// validate that the table emitted metrics
Tasks.waitFor(3L, new Callable<Long>() {
@Override
public Long call() throws Exception {
Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getEntityName(), Constants.Metrics.Tag.APP, AppWithWorker.NAME, Constants.Metrics.Tag.WORKER, AppWithWorker.WORKER, Constants.Metrics.Tag.DATASET, AppWithWorker.DATASET), Collections.<String>emptyList()));
if (metrics.isEmpty()) {
return 0L;
}
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
return ts.getTimeValues().get(0).getValue();
}
}, 5L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class MetricsQueryHelper method executeQuery.
private MetricQueryResult executeQuery(MetricQueryRequest queryRequest) throws Exception {
if (queryRequest.getMetrics().size() == 0) {
throw new IllegalArgumentException("Missing metrics parameter in the query");
}
MetricQueryRequest.TimeRange timeRange = queryRequest.getTimeRange();
AggregationOption aggregation = timeRange.getAggregation();
if (timeRange.getCount() <= 0) {
throw new IllegalArgumentException("Invalid metrics aggregation request, the limit must be greater than 0");
}
Map<String, String> tagsSliceBy = humanToTagNames(transformTagMap(queryRequest.getTags()));
MetricDataQuery query = new MetricDataQuery(timeRange.getStart(), timeRange.getEnd(), timeRange.getResolutionInSeconds(), timeRange.getCount(), toMetrics(queryRequest.getMetrics()), tagsSliceBy, transformGroupByTags(queryRequest.getGroupBy()), aggregation, timeRange.getInterpolate());
Collection<MetricTimeSeries> queryResult = metricStore.query(query);
long endTime = timeRange.getEnd();
if (timeRange.getResolutionInSeconds() == Integer.MAX_VALUE && endTime == 0) {
// for aggregate query, we set the end time to be query time (current time)
endTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
}
return decorate(queryResult, timeRange.getStart(), endTime, timeRange.getResolutionInSeconds());
}
use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class SparkMetricsIntegrationTestRun method getTotalCounter.
private long getTotalCounter(Map<String, String> context, String metricName) throws Exception {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, context, new ArrayList<String>());
try {
Collection<MetricTimeSeries> result = getMetricsManager().query(query);
if (result.isEmpty()) {
return 0;
}
// since it is totals query and not groupBy specified, we know there's one time series
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
// since it is totals, we know there's one value only
return timeValues.get(0).getValue();
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
Aggregations