Search in sources :

Example 16 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class MapReduceProgramRunnerTest method testMapreduceWithDynamicDatasets.

@Test
public void testMapreduceWithDynamicDatasets() throws Exception {
    DatasetId rtInput1 = DefaultId.NAMESPACE.dataset("rtInput1");
    DatasetId rtInput2 = DefaultId.NAMESPACE.dataset("rtInput2");
    DatasetId rtOutput1 = DefaultId.NAMESPACE.dataset("rtOutput1");
    // create the datasets here because they are not created by the app
    dsFramework.addInstance("fileSet", rtInput1, FileSetProperties.builder().setBasePath("rtInput1").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    dsFramework.addInstance("fileSet", rtOutput1, FileSetProperties.builder().setBasePath("rtOutput1").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    // build runtime args for app
    Map<String, String> runtimeArguments = Maps.newHashMap();
    // Make sure there is only one mapper running at a time since this test has the Mapper writing
    // to a dataset using increment and the in-memory table doesn't really support concurrent increment
    runtimeArguments.put("mr.job.conf.mapreduce.local.map.tasks.maximum", "1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_NAME, "rtInput1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_PATHS, "abc, xyz");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_NAME, "rtOutput1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_PATH, "a001");
    // test reading and writing distinct datasets, reading more than one path
    testMapreduceWithFile("rtInput1", "abc, xyz", "rtOutput1", "a001", AppWithMapReduceUsingRuntimeDatasets.class, AppWithMapReduceUsingRuntimeDatasets.ComputeSum.class, runtimeArguments, AppWithMapReduceUsingRuntimeDatasets.COUNTERS, null);
    // validate that the table emitted metrics
    Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, AppWithMapReduceUsingRuntimeDatasets.APP_NAME, Constants.Metrics.Tag.MAPREDUCE, AppWithMapReduceUsingRuntimeDatasets.MR_NAME, Constants.Metrics.Tag.DATASET, "rtt"), Collections.<String>emptyList()));
    Assert.assertEquals(1, metrics.size());
    MetricTimeSeries ts = metrics.iterator().next();
    Assert.assertEquals(1, ts.getTimeValues().size());
    Assert.assertEquals(1, ts.getTimeValues().get(0).getValue());
    // test reading and writing same dataset
    dsFramework.addInstance("fileSet", rtInput2, FileSetProperties.builder().setBasePath("rtInput2").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    runtimeArguments = Maps.newHashMap();
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_NAME, "rtInput2");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_PATHS, "zzz");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_NAME, "rtInput2");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_PATH, "f123");
    testMapreduceWithFile("rtInput2", "zzz", "rtInput2", "f123", AppWithMapReduceUsingRuntimeDatasets.class, AppWithMapReduceUsingRuntimeDatasets.ComputeSum.class, runtimeArguments, AppWithMapReduceUsingRuntimeDatasets.COUNTERS, null);
}
Also used : TextOutputFormat(org.apache.hadoop.mapreduce.lib.output.TextOutputFormat) MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) DatasetId(io.cdap.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 17 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class Spark2Test method testSparkWithObjectStore.

@Test
public void testSparkWithObjectStore() throws Exception {
    ApplicationManager applicationManager = deploy(NamespaceId.DEFAULT, SparkAppUsingObjectStore.class);
    DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
    prepareInputData(keysManager);
    SparkManager sparkManager = applicationManager.getSparkManager(CharCountProgram.class.getSimpleName()).start();
    sparkManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
    sparkManager.waitForStopped(60, TimeUnit.SECONDS);
    DataSetManager<KeyValueTable> countManager = getDataset("count");
    checkOutputData(countManager);
    // validate that the table emitted metrics
    // one read + one write in beforeSubmit(), increment (= read + write) in main -> 4
    Tasks.waitFor(4L, new Callable<Long>() {

        @Override
        public Long call() throws Exception {
            Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, SparkAppUsingObjectStore.class.getSimpleName(), Constants.Metrics.Tag.SPARK, CharCountProgram.class.getSimpleName(), Constants.Metrics.Tag.DATASET, "totals"), Collections.<String>emptyList()));
            if (metrics.isEmpty()) {
                return 0L;
            }
            Assert.assertEquals(1, metrics.size());
            MetricTimeSeries ts = metrics.iterator().next();
            Assert.assertEquals(1, ts.getTimeValues().size());
            return ts.getTimeValues().get(0).getValue();
        }
    }, 10L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
Also used : ApplicationManager(io.cdap.cdap.test.ApplicationManager) ObjectStore(io.cdap.cdap.api.dataset.lib.ObjectStore) SparkAppUsingObjectStore(io.cdap.cdap.spark.app.SparkAppUsingObjectStore) SparkManager(io.cdap.cdap.test.SparkManager) MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) Assume.assumeNoException(org.junit.Assume.assumeNoException) IOException(java.io.IOException) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) Collection(java.util.Collection) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) Test(org.junit.Test)

Example 18 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class PreviewDataStreamsTest method getTotalMetric.

private long getTotalMetric(Map<String, String> tags, String metricName, PreviewManager previewManager) {
    MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<String>());
    Collection<MetricTimeSeries> result = previewManager.getMetricsQueryHelper().getMetricStore().query(query);
    if (result.isEmpty()) {
        return 0;
    }
    List<TimeValue> timeValues = result.iterator().next().getTimeValues();
    if (timeValues.isEmpty()) {
        return 0;
    }
    return timeValues.get(0).getValue();
}
Also used : MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) TimeValue(io.cdap.cdap.api.dataset.lib.cube.TimeValue)

Example 19 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class ServiceLifeCycleTestRun method testContentConsumerLifecycle.

@Test
public void testContentConsumerLifecycle() throws Exception {
    try {
        ApplicationManager appManager = deployWithArtifact(ServiceLifecycleApp.class, artifactJar);
        // Set to have one thread only for testing context capture and release
        serviceManager = appManager.getServiceManager("test").start(ImmutableMap.of(SystemArguments.SERVICE_THREADS, "1"));
        CountDownLatch uploadLatch = new CountDownLatch(1);
        // Create five concurrent upload
        List<ListenableFuture<Integer>> completions = new ArrayList<>();
        for (int i = 0; i < 5; i++) {
            completions.add(slowUpload(serviceManager, "PUT", "upload", uploadLatch));
        }
        // Get the states, there should be six handler instances initialized.
        // Five for the in-progress upload, one for the getStates call
        Tasks.waitFor(6, () -> getStates(serviceManager).size(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
        // Finish the upload
        uploadLatch.countDown();
        Futures.successfulAsList(completions).get(10, TimeUnit.SECONDS);
        // Verify the result
        for (ListenableFuture<Integer> future : completions) {
            Assert.assertEquals(200, future.get().intValue());
        }
        // Get the states, there should still be six handler instances initialized.
        final Multimap<Integer, String> states = getStates(serviceManager);
        Assert.assertEquals(6, states.size());
        // Do another round of six concurrent upload. It should reuse all of the existing six contexts
        completions.clear();
        uploadLatch = new CountDownLatch(1);
        for (int i = 0; i < 6; i++) {
            completions.add(slowUpload(serviceManager, "PUT", "upload", uploadLatch));
        }
        // Get the states, there should be seven handler instances initialized.
        // Six for the in-progress upload, one for the getStates call
        // Out of the 7 states, six of them should be the same as the old one
        Tasks.waitFor(true, () -> {
            Multimap<Integer, String> newStates = getStates(serviceManager);
            if (newStates.size() != 7) {
                return false;
            }
            for (Map.Entry<Integer, String> entry : states.entries()) {
                if (!newStates.containsEntry(entry.getKey(), entry.getValue())) {
                    return false;
                }
            }
            return true;
        }, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
        // Complete the upload
        uploadLatch.countDown();
        Futures.successfulAsList(completions).get(10, TimeUnit.SECONDS);
        // Verify the result
        for (ListenableFuture<Integer> future : completions) {
            Assert.assertEquals(200, future.get().intValue());
        }
        // Query the queue size metrics. Expect the maximum be 6.
        // This is because only the six from the concurrent upload will get captured added back to the queue,
        // while the one created for the getState() call will be stated in the thread cache, but not in the queue.
        Tasks.waitFor(6L, () -> {
            Map<String, String> context = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, NamespaceId.DEFAULT.getNamespace(), Constants.Metrics.Tag.APP, ServiceLifecycleApp.class.getSimpleName(), Constants.Metrics.Tag.SERVICE, "test");
            MetricDataQuery metricQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.context.pool.size", AggregationFunction.MAX, context, Collections.emptyList());
            Iterator<MetricTimeSeries> result = getMetricsManager().query(metricQuery).iterator();
            return result.hasNext() ? result.next().getTimeValues().get(0).getValue() : 0L;
        }, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    } finally {
        serviceManager.stop();
        serviceManager.waitForStopped(10, TimeUnit.SECONDS);
    }
}
Also used : ApplicationManager(io.cdap.cdap.test.ApplicationManager) ArrayList(java.util.ArrayList) MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) ServiceLifecycleApp(io.cdap.cdap.test.app.ServiceLifecycleApp) CountDownLatch(java.util.concurrent.CountDownLatch) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Test(org.junit.Test)

Example 20 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by caskdata.

the class TestFrameworkTestRun method verifyMapperJobOutput.

private void verifyMapperJobOutput(Class<?> appClass, DataSetManager<KeyValueTable> outTableManager) throws Exception {
    KeyValueTable outputTable = outTableManager.get();
    Assert.assertEquals("world", Bytes.toString(outputTable.read("hello")));
    // Verify dataset metrics
    String readCountName = "system." + Constants.Metrics.Name.Dataset.READ_COUNT;
    String writeCountName = "system." + Constants.Metrics.Name.Dataset.WRITE_COUNT;
    Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000, Integer.MAX_VALUE, ImmutableMap.of(readCountName, AggregationFunction.SUM, writeCountName, AggregationFunction.SUM), ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, appClass.getSimpleName(), Constants.Metrics.Tag.MAPREDUCE, DatasetWithMRApp.MAPREDUCE_PROGRAM), ImmutableList.<String>of()));
    // Transform the collection of metrics into a map from metrics name to aggregated sum
    Map<String, Long> aggs = Maps.transformEntries(Maps.uniqueIndex(metrics, new Function<MetricTimeSeries, String>() {

        @Override
        public String apply(MetricTimeSeries input) {
            return input.getMetricName();
        }
    }), new Maps.EntryTransformer<String, MetricTimeSeries, Long>() {

        @Override
        public Long transformEntry(String key, MetricTimeSeries value) {
            Preconditions.checkArgument(value.getTimeValues().size() == 1, "Expected one value for aggregated sum for metrics %s", key);
            return value.getTimeValues().get(0).getValue();
        }
    });
    Assert.assertEquals(Long.valueOf(1), aggs.get(readCountName));
    Assert.assertEquals(Long.valueOf(1), aggs.get(writeCountName));
}
Also used : Function(com.google.common.base.Function) AggregationFunction(io.cdap.cdap.api.dataset.lib.cube.AggregationFunction) Maps(com.google.common.collect.Maps) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery)

Aggregations

MetricDataQuery (io.cdap.cdap.api.metrics.MetricDataQuery)32 MetricTimeSeries (io.cdap.cdap.api.metrics.MetricTimeSeries)30 TimeValue (io.cdap.cdap.api.dataset.lib.cube.TimeValue)18 Test (org.junit.Test)12 KeyValueTable (io.cdap.cdap.api.dataset.lib.KeyValueTable)6 ArrayList (java.util.ArrayList)6 ImmutableMap (com.google.common.collect.ImmutableMap)4 MetricStore (io.cdap.cdap.api.metrics.MetricStore)4 ApplicationManager (io.cdap.cdap.test.ApplicationManager)4 IOException (java.io.IOException)4 Collection (java.util.Collection)4 Map (java.util.Map)3 LoggerContext (ch.qos.logback.classic.LoggerContext)2 Function (com.google.common.base.Function)2 Maps (com.google.common.collect.Maps)2 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)2 Injector (com.google.inject.Injector)2 AppWithWorker (io.cdap.cdap.AppWithWorker)2 ObjectStore (io.cdap.cdap.api.dataset.lib.ObjectStore)2 AggregationFunction (io.cdap.cdap.api.dataset.lib.cube.AggregationFunction)2