Search in sources :

Example 11 with MetricTimeSeries

use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.

the class SparkMetricsIntegrationTestRun method getTotalCounter.

private long getTotalCounter(Map<String, String> context, String metricName) throws Exception {
    MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, context, new ArrayList<String>());
    try {
        Collection<MetricTimeSeries> result = getMetricsManager().query(query);
        if (result.isEmpty()) {
            return 0;
        }
        // since it is totals query and not groupBy specified, we know there's one time series
        List<TimeValue> timeValues = result.iterator().next().getTimeValues();
        if (timeValues.isEmpty()) {
            return 0;
        }
        // since it is totals, we know there's one value only
        return timeValues.get(0).getValue();
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}
Also used : MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 12 with MetricTimeSeries

use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.

the class MapReduceProgramRunnerTest method testMapreduceWithDynamicDatasets.

@Test
public void testMapreduceWithDynamicDatasets() throws Exception {
    DatasetId rtInput1 = DefaultId.NAMESPACE.dataset("rtInput1");
    DatasetId rtInput2 = DefaultId.NAMESPACE.dataset("rtInput2");
    DatasetId rtOutput1 = DefaultId.NAMESPACE.dataset("rtOutput1");
    // create the datasets here because they are not created by the app
    dsFramework.addInstance("fileSet", rtInput1, FileSetProperties.builder().setBasePath("rtInput1").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    dsFramework.addInstance("fileSet", rtOutput1, FileSetProperties.builder().setBasePath("rtOutput1").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    // build runtime args for app
    Map<String, String> runtimeArguments = Maps.newHashMap();
    // Make sure there is only one mapper running at a time since this test has the Mapper writing
    // to a dataset using increment and the in-memory table doesn't really support concurrent increment
    runtimeArguments.put("mr.job.conf.mapreduce.local.map.tasks.maximum", "1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_NAME, "rtInput1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_PATHS, "abc, xyz");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_NAME, "rtOutput1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_PATH, "a001");
    // test reading and writing distinct datasets, reading more than one path
    testMapreduceWithFile("rtInput1", "abc, xyz", "rtOutput1", "a001", AppWithMapReduceUsingRuntimeDatasets.class, AppWithMapReduceUsingRuntimeDatasets.ComputeSum.class, runtimeArguments, AppWithMapReduceUsingRuntimeDatasets.COUNTERS, null);
    // validate that the table emitted metrics
    Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, AppWithMapReduceUsingRuntimeDatasets.APP_NAME, Constants.Metrics.Tag.MAPREDUCE, AppWithMapReduceUsingRuntimeDatasets.MR_NAME, Constants.Metrics.Tag.DATASET, "rtt"), Collections.<String>emptyList()));
    Assert.assertEquals(1, metrics.size());
    MetricTimeSeries ts = metrics.iterator().next();
    Assert.assertEquals(1, ts.getTimeValues().size());
    Assert.assertEquals(1, ts.getTimeValues().get(0).getValue());
    // test reading and writing same dataset
    dsFramework.addInstance("fileSet", rtInput2, FileSetProperties.builder().setBasePath("rtInput2").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    runtimeArguments = Maps.newHashMap();
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_NAME, "rtInput2");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_PATHS, "zzz");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_NAME, "rtInput2");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_PATH, "f123");
    testMapreduceWithFile("rtInput2", "zzz", "rtInput2", "f123", AppWithMapReduceUsingRuntimeDatasets.class, AppWithMapReduceUsingRuntimeDatasets.ComputeSum.class, runtimeArguments, AppWithMapReduceUsingRuntimeDatasets.COUNTERS, null);
}
Also used : TextOutputFormat(org.apache.hadoop.mapreduce.lib.output.TextOutputFormat) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) DatasetId(co.cask.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 13 with MetricTimeSeries

use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.

the class AbstractStreamService method getStreamEventsSize.

/**
   * Get the size of events ingested by a stream since its creation, in bytes.
   * @param streamId id of the stream
   * @return Size of events ingested by a stream since its creation
   * @throws IOException when getting an error retrieving the metric
   */
protected long getStreamEventsSize(StreamId streamId) throws IOException {
    MetricDataQuery metricDataQuery = new MetricDataQuery(0L, TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis()), Integer.MAX_VALUE, "system.collect.bytes", AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, streamId.getNamespace(), Constants.Metrics.Tag.STREAM, streamId.getEntityName()), ImmutableList.<String>of());
    try {
        Collection<MetricTimeSeries> metrics = metricStore.query(metricDataQuery);
        if (metrics == null || metrics.isEmpty()) {
            // Data is not yet available, which means no data has been ingested by the stream yet
            return 0L;
        }
        MetricTimeSeries metric = metrics.iterator().next();
        List<TimeValue> timeValues = metric.getTimeValues();
        if (timeValues == null || timeValues.size() != 1) {
            throw new IOException("Should collect exactly one time value");
        }
        return timeValues.get(0).getValue();
    } catch (Exception e) {
        Throwables.propagateIfInstanceOf(e, IOException.class);
        throw new IOException(e);
    }
}
Also used : MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) IOException(java.io.IOException) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue) IOException(java.io.IOException)

Example 14 with MetricTimeSeries

use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.

the class WorkflowStatsSLAHttpHandler method getSparkDetails.

private Map<String, Long> getSparkDetails(ProgramId sparkProgram, String runId) throws Exception {
    Map<String, String> context = new HashMap<>();
    context.put(Constants.Metrics.Tag.NAMESPACE, sparkProgram.getNamespace());
    context.put(Constants.Metrics.Tag.APP, sparkProgram.getApplication());
    context.put(Constants.Metrics.Tag.SPARK, sparkProgram.getProgram());
    context.put(Constants.Metrics.Tag.RUN_ID, runId);
    List<TagValue> tags = new ArrayList<>();
    for (Map.Entry<String, String> entry : context.entrySet()) {
        tags.add(new TagValue(entry.getKey(), entry.getValue()));
    }
    MetricSearchQuery metricSearchQuery = new MetricSearchQuery(0, 0, Integer.MAX_VALUE, tags);
    Collection<String> metricNames = metricStore.findMetricNames(metricSearchQuery);
    Map<String, Long> overallResult = new HashMap<>();
    for (String metricName : metricNames) {
        Collection<MetricTimeSeries> resultPerQuery = metricStore.query(new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, context, new ArrayList<String>()));
        for (MetricTimeSeries metricTimeSeries : resultPerQuery) {
            overallResult.put(metricTimeSeries.getMetricName(), metricTimeSeries.getTimeValues().get(0).getValue());
        }
    }
    return overallResult;
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricSearchQuery(co.cask.cdap.api.metrics.MetricSearchQuery) TagValue(co.cask.cdap.api.metrics.TagValue) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) HashMap(java.util.HashMap) Map(java.util.Map)

Example 15 with MetricTimeSeries

use of co.cask.cdap.api.metrics.MetricTimeSeries in project cdap by caskdata.

the class WorkerProgramRunnerTest method testWorkerDatasetWithMetrics.

@Test
public void testWorkerDatasetWithMetrics() throws Throwable {
    final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(AppWithWorker.class, TEMP_FOLDER_SUPPLIER);
    ProgramController controller = startProgram(app, AppWithWorker.TableWriter.class);
    // validate worker wrote the "initialize" and "run" rows
    final TransactionExecutor executor = txExecutorFactory.createExecutor(datasetCache);
    // wait at most 5 seconds until the "RUN" row is set (indicates the worker has started running)
    Tasks.waitFor(AppWithWorker.RUN, new Callable<String>() {

        @Override
        public String call() throws Exception {
            return executor.execute(new Callable<String>() {

                @Override
                public String call() throws Exception {
                    KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
                    return Bytes.toString(kvTable.read(AppWithWorker.RUN));
                }
            });
        }
    }, 5, TimeUnit.SECONDS);
    stopProgram(controller);
    txExecutorFactory.createExecutor(datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
            Assert.assertEquals(AppWithWorker.RUN, Bytes.toString(kvTable.read(AppWithWorker.RUN)));
            Assert.assertEquals(AppWithWorker.INITIALIZE, Bytes.toString(kvTable.read(AppWithWorker.INITIALIZE)));
            Assert.assertEquals(AppWithWorker.STOP, Bytes.toString(kvTable.read(AppWithWorker.STOP)));
        }
    });
    // validate that the table emitted metrics
    Tasks.waitFor(3L, new Callable<Long>() {

        @Override
        public Long call() throws Exception {
            Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getEntityName(), Constants.Metrics.Tag.APP, AppWithWorker.NAME, Constants.Metrics.Tag.WORKER, AppWithWorker.WORKER, Constants.Metrics.Tag.DATASET, AppWithWorker.DATASET), Collections.<String>emptyList()));
            if (metrics.isEmpty()) {
                return 0L;
            }
            Assert.assertEquals(1, metrics.size());
            MetricTimeSeries ts = metrics.iterator().next();
            Assert.assertEquals(1, ts.getTimeValues().size());
            return ts.getTimeValues().get(0).getValue();
        }
    }, 5L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
Also used : ProgramController(co.cask.cdap.app.runtime.ProgramController) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) TransactionExecutor(org.apache.tephra.TransactionExecutor) AppWithWorker(co.cask.cdap.AppWithWorker) IOException(java.io.IOException) Callable(java.util.concurrent.Callable) ApplicationWithPrograms(co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) Collection(java.util.Collection) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) Test(org.junit.Test)

Aggregations

MetricTimeSeries (co.cask.cdap.api.metrics.MetricTimeSeries)24 MetricDataQuery (co.cask.cdap.api.metrics.MetricDataQuery)19 TimeValue (co.cask.cdap.api.dataset.lib.cube.TimeValue)13 AggregationFunction (co.cask.cdap.api.dataset.lib.cube.AggregationFunction)4 IOException (java.io.IOException)4 Test (org.junit.Test)4 KeyValueTable (co.cask.cdap.api.dataset.lib.KeyValueTable)3 Function (com.google.common.base.Function)2 ArrayList (java.util.ArrayList)2 Collection (java.util.Collection)2 Map (java.util.Map)2 AppWithWorker (co.cask.cdap.AppWithWorker)1 ObjectStore (co.cask.cdap.api.dataset.lib.ObjectStore)1 TimeSeries (co.cask.cdap.api.dataset.lib.cube.TimeSeries)1 TopicNotFoundException (co.cask.cdap.api.messaging.TopicNotFoundException)1 MetricSearchQuery (co.cask.cdap.api.metrics.MetricSearchQuery)1 MetricStore (co.cask.cdap.api.metrics.MetricStore)1 MetricsContext (co.cask.cdap.api.metrics.MetricsContext)1 NoopMetricsContext (co.cask.cdap.api.metrics.NoopMetricsContext)1 TagValue (co.cask.cdap.api.metrics.TagValue)1