use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class FlowTest method getPending.
private static long getPending(Map<String, String> tags) throws Exception {
MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.queue.pending", AggregationFunction.SUM, tags, ImmutableList.<String>of());
Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery);
if (query.isEmpty()) {
return 0;
}
MetricTimeSeries timeSeries = Iterables.getOnlyElement(query);
List<TimeValue> timeValues = timeSeries.getTimeValues();
TimeValue timeValue = Iterables.getOnlyElement(timeValues);
return timeValue.getValue();
}
use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class LocalMRJobInfoFetcher method getAggregates.
private void getAggregates(Map<String, String> tags, Map<String, String> metricsToCounters, Map<String, Long> result) {
Map<String, AggregationFunction> metrics = Maps.newHashMap();
// all map-reduce metrics are gauges
for (String metric : metricsToCounters.keySet()) {
metrics.put(metric, AggregationFunction.LATEST);
}
MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.<String>of());
Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery);
// initialize elements to zero
for (String counterName : metricsToCounters.values()) {
result.put(counterName, 0L);
}
for (MetricTimeSeries metricTimeSeries : query) {
List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
TimeValue timeValue = Iterables.getOnlyElement(timeValues);
result.put(metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
}
}
use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class LocalMRJobInfoFetcher method queryGroupedAggregates.
// queries MetricStore for one metric across all tasks of a certain TaskType, using GroupBy InstanceId
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) {
Map<String, AggregationFunction> metrics = Maps.newHashMap();
// all map-reduce metrics are gauges
for (String metric : metricsToCounters.keySet()) {
metrics.put(metric, AggregationFunction.LATEST);
}
MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.of(Constants.Metrics.Tag.INSTANCE_ID));
Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery);
for (MetricTimeSeries metricTimeSeries : query) {
List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
TimeValue timeValue = Iterables.getOnlyElement(timeValues);
String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID);
allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
}
}
use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class SparkTestRun method testSparkWithObjectStore.
@Test
public void testSparkWithObjectStore() throws Exception {
ApplicationManager applicationManager = deploy(SparkAppUsingObjectStore.class);
DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
prepareInputData(keysManager);
SparkManager sparkManager = applicationManager.getSparkManager(CharCountProgram.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 1, TimeUnit.MINUTES);
DataSetManager<KeyValueTable> countManager = getDataset("count");
checkOutputData(countManager);
// validate that the table emitted metrics
// one read + one write in beforeSubmit(), increment (= read + write) in main -> 4
Tasks.waitFor(4L, new Callable<Long>() {
@Override
public Long call() throws Exception {
Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, SparkAppUsingObjectStore.class.getSimpleName(), Constants.Metrics.Tag.SPARK, CharCountProgram.class.getSimpleName(), Constants.Metrics.Tag.DATASET, "totals"), Collections.<String>emptyList()));
if (metrics.isEmpty()) {
return 0L;
}
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
return ts.getTimeValues().get(0).getValue();
}
}, 10L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.
the class AbstractStreamService method getStreamEventsSize.
/**
* Get the size of events ingested by a stream since its creation, in bytes.
* @param streamId id of the stream
* @return Size of events ingested by a stream since its creation
* @throws IOException when getting an error retrieving the metric
*/
protected long getStreamEventsSize(StreamId streamId) throws IOException {
MetricDataQuery metricDataQuery = new MetricDataQuery(0L, TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis()), Integer.MAX_VALUE, "system.collect.bytes", AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, streamId.getNamespace(), Constants.Metrics.Tag.STREAM, streamId.getEntityName()), ImmutableList.<String>of());
try {
Collection<MetricTimeSeries> metrics = metricStore.query(metricDataQuery);
if (metrics == null || metrics.isEmpty()) {
// Data is not yet available, which means no data has been ingested by the stream yet
return 0L;
}
MetricTimeSeries metric = metrics.iterator().next();
List<TimeValue> timeValues = metric.getTimeValues();
if (timeValues == null || timeValues.size() != 1) {
throw new IOException("Should collect exactly one time value");
}
return timeValues.get(0).getValue();
} catch (Exception e) {
Throwables.propagateIfInstanceOf(e, IOException.class);
throw new IOException(e);
}
}
Aggregations