use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class LocalMRJobInfoFetcher method getAggregates.
private void getAggregates(Map<String, String> tags, Map<String, String> metricsToCounters, Map<String, Long> result) throws IOException {
Collection<MetricTimeSeries> query = metricsSystemClient.query(tags, metricsToCounters.keySet());
// initialize elements to zero
for (String counterName : metricsToCounters.values()) {
result.put(counterName, 0L);
}
for (MetricTimeSeries metricTimeSeries : query) {
List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
TimeValue timeValue = Iterables.getOnlyElement(timeValues);
result.put(metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
}
}
use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class LocalMRJobInfoFetcher method queryGroupedAggregates.
// queries MetricStore for one metric across all tasks of a certain TaskType, using GroupBy InstanceId
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) throws IOException {
Collection<MetricTimeSeries> query = metricsSystemClient.query(tags, metricsToCounters.keySet(), Collections.singleton(Constants.Metrics.Tag.INSTANCE_ID));
for (MetricTimeSeries metricTimeSeries : query) {
List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
TimeValue timeValue = Iterables.getOnlyElement(timeValues);
String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID);
allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
}
}
use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class PreviewDataPipelineTest method getTotalMetric.
private long getTotalMetric(Map<String, String> tags, String metricName, PreviewManager previewManager) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
Collection<MetricTimeSeries> result = previewManager.getMetricsQueryHelper().getMetricStore().query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class AppFabricTestBase method getTotalMetric.
protected long getTotalMetric(String metricName, Map<String, String> tags) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, "system." + metricName, AggregationFunction.SUM, tags, Collections.emptyList());
Collection<MetricTimeSeries> results = metricStore.query(query);
if (results.isEmpty()) {
return 0;
}
// since it is totals query and not groupBy specified, we know there's one time series
List<TimeValue> timeValues = results.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
// since it is totals, we know there's one value only
return timeValues.get(0).getValue();
}
use of io.cdap.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class WorkerProgramRunnerTest method testWorkerDatasetWithMetrics.
@Test
public void testWorkerDatasetWithMetrics() throws Throwable {
final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(AppWithWorker.class, TEMP_FOLDER_SUPPLIER);
ProgramController controller = startProgram(app, AppWithWorker.TableWriter.class);
// validate worker wrote the "initialize" and "run" rows
final TransactionExecutor executor = txExecutorFactory.createExecutor(datasetCache);
// wait at most 5 seconds until the "RUN" row is set (indicates the worker has started running)
Tasks.waitFor(AppWithWorker.RUN, new Callable<String>() {
@Override
public String call() throws Exception {
return executor.execute(new Callable<String>() {
@Override
public String call() throws Exception {
KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
return Bytes.toString(kvTable.read(AppWithWorker.RUN));
}
});
}
}, 5, TimeUnit.SECONDS);
stopProgram(controller);
txExecutorFactory.createExecutor(datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
Assert.assertEquals(AppWithWorker.RUN, Bytes.toString(kvTable.read(AppWithWorker.RUN)));
Assert.assertEquals(AppWithWorker.INITIALIZE, Bytes.toString(kvTable.read(AppWithWorker.INITIALIZE)));
Assert.assertEquals(AppWithWorker.STOP, Bytes.toString(kvTable.read(AppWithWorker.STOP)));
}
});
// validate that the table emitted metrics
Tasks.waitFor(3L, new Callable<Long>() {
@Override
public Long call() throws Exception {
Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getEntityName(), Constants.Metrics.Tag.APP, AppWithWorker.NAME, Constants.Metrics.Tag.WORKER, AppWithWorker.WORKER, Constants.Metrics.Tag.DATASET, AppWithWorker.DATASET), Collections.<String>emptyList()));
if (metrics.isEmpty()) {
return 0L;
}
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
return ts.getTimeValues().get(0).getValue();
}
}, 5L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
Aggregations