Search in sources :

Example 16 with MetricDataQuery

use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class FlowTest method getPending.

private static long getPending(Map<String, String> tags) throws Exception {
    MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.queue.pending", AggregationFunction.SUM, tags, ImmutableList.<String>of());
    Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery);
    if (query.isEmpty()) {
        return 0;
    }
    MetricTimeSeries timeSeries = Iterables.getOnlyElement(query);
    List<TimeValue> timeValues = timeSeries.getTimeValues();
    TimeValue timeValue = Iterables.getOnlyElement(timeValues);
    return timeValue.getValue();
}
Also used : MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 17 with MetricDataQuery

use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class LocalMRJobInfoFetcher method getAggregates.

private void getAggregates(Map<String, String> tags, Map<String, String> metricsToCounters, Map<String, Long> result) {
    Map<String, AggregationFunction> metrics = Maps.newHashMap();
    // all map-reduce metrics are gauges
    for (String metric : metricsToCounters.keySet()) {
        metrics.put(metric, AggregationFunction.LATEST);
    }
    MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.<String>of());
    Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery);
    // initialize elements to zero
    for (String counterName : metricsToCounters.values()) {
        result.put(counterName, 0L);
    }
    for (MetricTimeSeries metricTimeSeries : query) {
        List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
        TimeValue timeValue = Iterables.getOnlyElement(timeValues);
        result.put(metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
    }
}
Also used : AggregationFunction(co.cask.cdap.api.dataset.lib.cube.AggregationFunction) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 18 with MetricDataQuery

use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class LocalMRJobInfoFetcher method queryGroupedAggregates.

// queries MetricStore for one metric across all tasks of a certain TaskType, using GroupBy InstanceId
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) {
    Map<String, AggregationFunction> metrics = Maps.newHashMap();
    // all map-reduce metrics are gauges
    for (String metric : metricsToCounters.keySet()) {
        metrics.put(metric, AggregationFunction.LATEST);
    }
    MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.of(Constants.Metrics.Tag.INSTANCE_ID));
    Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery);
    for (MetricTimeSeries metricTimeSeries : query) {
        List<TimeValue> timeValues = metricTimeSeries.getTimeValues();
        TimeValue timeValue = Iterables.getOnlyElement(timeValues);
        String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID);
        allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue());
    }
}
Also used : AggregationFunction(co.cask.cdap.api.dataset.lib.cube.AggregationFunction) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 19 with MetricDataQuery

use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class SparkTestRun method testSparkWithObjectStore.

@Test
public void testSparkWithObjectStore() throws Exception {
    ApplicationManager applicationManager = deploy(SparkAppUsingObjectStore.class);
    DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
    prepareInputData(keysManager);
    SparkManager sparkManager = applicationManager.getSparkManager(CharCountProgram.class.getSimpleName()).start();
    sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 1, TimeUnit.MINUTES);
    DataSetManager<KeyValueTable> countManager = getDataset("count");
    checkOutputData(countManager);
    // validate that the table emitted metrics
    // one read + one write in beforeSubmit(), increment (= read + write) in main -> 4
    Tasks.waitFor(4L, new Callable<Long>() {

        @Override
        public Long call() throws Exception {
            Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, SparkAppUsingObjectStore.class.getSimpleName(), Constants.Metrics.Tag.SPARK, CharCountProgram.class.getSimpleName(), Constants.Metrics.Tag.DATASET, "totals"), Collections.<String>emptyList()));
            if (metrics.isEmpty()) {
                return 0L;
            }
            Assert.assertEquals(1, metrics.size());
            MetricTimeSeries ts = metrics.iterator().next();
            Assert.assertEquals(1, ts.getTimeValues().size());
            return ts.getTimeValues().get(0).getValue();
        }
    }, 10L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
Also used : ApplicationManager(co.cask.cdap.test.ApplicationManager) ObjectStore(co.cask.cdap.api.dataset.lib.ObjectStore) SparkAppUsingObjectStore(co.cask.cdap.spark.app.SparkAppUsingObjectStore) SparkManager(co.cask.cdap.test.SparkManager) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) IOException(java.io.IOException) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) Collection(java.util.Collection) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) Test(org.junit.Test)

Example 20 with MetricDataQuery

use of co.cask.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class AbstractStreamService method getStreamEventsSize.

/**
 * Get the size of events ingested by a stream since its creation, in bytes.
 * @param streamId id of the stream
 * @return Size of events ingested by a stream since its creation
 * @throws IOException when getting an error retrieving the metric
 */
protected long getStreamEventsSize(StreamId streamId) throws IOException {
    MetricDataQuery metricDataQuery = new MetricDataQuery(0L, TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis()), Integer.MAX_VALUE, "system.collect.bytes", AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, streamId.getNamespace(), Constants.Metrics.Tag.STREAM, streamId.getEntityName()), ImmutableList.<String>of());
    try {
        Collection<MetricTimeSeries> metrics = metricStore.query(metricDataQuery);
        if (metrics == null || metrics.isEmpty()) {
            // Data is not yet available, which means no data has been ingested by the stream yet
            return 0L;
        }
        MetricTimeSeries metric = metrics.iterator().next();
        List<TimeValue> timeValues = metric.getTimeValues();
        if (timeValues == null || timeValues.size() != 1) {
            throw new IOException("Should collect exactly one time value");
        }
        return timeValues.get(0).getValue();
    } catch (Exception e) {
        Throwables.propagateIfInstanceOf(e, IOException.class);
        throw new IOException(e);
    }
}
Also used : MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) IOException(java.io.IOException) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue) IOException(java.io.IOException)

Aggregations

MetricDataQuery (co.cask.cdap.api.metrics.MetricDataQuery)42 Test (org.junit.Test)25 MetricTimeSeries (co.cask.cdap.api.metrics.MetricTimeSeries)21 TimeValue (co.cask.cdap.api.dataset.lib.cube.TimeValue)11 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5 KeyValueTable (co.cask.cdap.api.dataset.lib.KeyValueTable)4 ApplicationManager (co.cask.cdap.test.ApplicationManager)4 AggregationFunction (co.cask.cdap.api.dataset.lib.cube.AggregationFunction)3 ImmutableMap (com.google.common.collect.ImmutableMap)3 Collection (java.util.Collection)3 ObjectStore (co.cask.cdap.api.dataset.lib.ObjectStore)2 MetricStore (co.cask.cdap.api.metrics.MetricStore)2 QueueName (co.cask.cdap.common.queue.QueueName)2 DatasetFramework (co.cask.cdap.data2.dataset2.DatasetFramework)2 SparkAppUsingObjectStore (co.cask.cdap.spark.app.SparkAppUsingObjectStore)2 SparkManager (co.cask.cdap.test.SparkManager)2 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)2 LoggerContext (ch.qos.logback.classic.LoggerContext)1