use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class PreviewDataStreamsTest method getTotalMetric.
private long getTotalMetric(Map<String, String> tags, String metricName, PreviewRunner runner) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<String>());
Collection<MetricTimeSeries> result = runner.getMetricsQueryHelper().getMetricStore().query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class PreviewDataPipelineTest method getTotalMetric.
private long getTotalMetric(Map<String, String> tags, String metricName, PreviewRunner runner) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<String>());
Collection<MetricTimeSeries> result = runner.getMetricsQueryHelper().getMetricStore().query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class SparkTestRun method testSparkWithObjectStore.
@Test
public void testSparkWithObjectStore() throws Exception {
ApplicationManager applicationManager = deploy(SparkAppUsingObjectStore.class);
DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
prepareInputData(keysManager);
SparkManager sparkManager = applicationManager.getSparkManager(CharCountProgram.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 1, TimeUnit.MINUTES);
DataSetManager<KeyValueTable> countManager = getDataset("count");
checkOutputData(countManager);
// validate that the table emitted metrics
// one read + one write in beforeSubmit(), increment (= read + write) in main -> 4
Tasks.waitFor(4L, new Callable<Long>() {
@Override
public Long call() throws Exception {
Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, SparkAppUsingObjectStore.class.getSimpleName(), Constants.Metrics.Tag.SPARK, CharCountProgram.class.getSimpleName(), Constants.Metrics.Tag.DATASET, "totals"), Collections.<String>emptyList()));
if (metrics.isEmpty()) {
return 0L;
}
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
return ts.getTimeValues().get(0).getValue();
}
}, 10L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class CDAPTransactions method collect.
@Override
public void collect() throws Exception {
Collection<MetricTimeSeries> collection = metricStore.query(new MetricDataQuery(0, 0, Integer.MAX_VALUE, Integer.MAX_VALUE, METRICS, Constants.Metrics.TRANSACTION_MANAGER_CONTEXT, Collections.<String>emptyList(), null));
for (MetricTimeSeries metricTimeSeries : collection) {
if (metricTimeSeries.getMetricName().equals("system.committing.size")) {
numCommittingChangeSets = (int) aggregateMetricValue(metricTimeSeries);
}
if (metricTimeSeries.getMetricName().equals("system.committed.size")) {
numCommittedChangeSets = (int) aggregateMetricValue(metricTimeSeries);
}
}
Transaction transaction = txClient.startShort();
readPointer = transaction.getReadPointer();
writePointer = transaction.getWritePointer();
numInProgressTx = transaction.getInProgress().length;
numInvalidTx = transaction.getInvalids().length;
txClient.abort(transaction);
}
use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.
the class MetricStoreRequestExecutor method queryTimeSeries.
private Iterator<TimeValue> queryTimeSeries(MetricDataQuery query) throws Exception {
Collection<MetricTimeSeries> result = metricStore.query(query);
if (result.size() == 0) {
return new ArrayList<TimeValue>().iterator();
}
// since there's no group by condition, it'll return single time series always
MetricTimeSeries timeSeries = result.iterator().next();
return Iterables.transform(timeSeries.getTimeValues(), new Function<TimeValue, TimeValue>() {
@Override
public TimeValue apply(TimeValue input) {
return new TimeValue(input.getTimestamp(), input.getValue());
}
}).iterator();
}
Aggregations