Search in sources :

Example 11 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.

the class KafkaLogProcessorPipelineTest method testMetricsAppender.

@Test
public void testMetricsAppender() throws Exception {
    Injector injector = KAFKA_TESTER.getInjector();
    MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
    collectionService.startAndWait();
    LoggerContext loggerContext = new LocalAppenderContext(injector.getInstance(TransactionRunner.class), injector.getInstance(LocationFactory.class), injector.getInstance(MetricsCollectionService.class));
    final File logDir = TEMP_FOLDER.newFolder();
    loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
    LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
    configurator.setContext(loggerContext);
    URL configURL = getClass().getClassLoader().getResource("pipeline-metric-appender.xml");
    Assert.assertNotNull(configURL);
    configurator.doConfigure(configURL);
    String topic = "metricsPipeline";
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMetricAppender", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Publish some log messages to Kafka
    long now = System.currentTimeMillis();
    WorkerLoggingContext loggingContext = new WorkerLoggingContext("default", "app1", "worker1", "run1", "instance1");
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)), loggingContext);
    WorkflowProgramLoggingContext workflowProgramLoggingContext = new WorkflowProgramLoggingContext("default", "app1", "wflow1", "run1", ProgramType.MAPREDUCE, "mr1", "mrun1");
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.TRACE, "3", now - 500)), workflowProgramLoggingContext);
    ServiceLoggingContext serviceLoggingContext = new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.TRANSACTION);
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900)), serviceLoggingContext);
    final MetricStore metricStore = injector.getInstance(MetricStore.class);
    try {
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.info", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 5L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.debug", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.warn", AggregationFunction.SUM, // mapreduce metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.MAPREDUCE, "mr1", Constants.Metrics.Tag.RUN_ID, "mrun1"), new ArrayList<>()), 2L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.trace", AggregationFunction.SUM, // workflow metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.WORKFLOW, "wflow1", Constants.Metrics.Tag.RUN_ID, "run1"), new ArrayList<>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.services.log.error", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(serviceLoggingContext), new ArrayList<>()), 3L);
    } finally {
        pipeline.stopAndWait();
        loggerContext.stop();
        collectionService.stopAndWait();
    }
}
Also used : MetricStore(io.cdap.cdap.api.metrics.MetricStore) WorkerLoggingContext(io.cdap.cdap.logging.context.WorkerLoggingContext) LocalMetricsCollectionService(io.cdap.cdap.metrics.collect.LocalMetricsCollectionService) MetricsCollectionService(io.cdap.cdap.api.metrics.MetricsCollectionService) ArrayList(java.util.ArrayList) LogProcessorPipelineContext(io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext) ServiceLoggingContext(io.cdap.cdap.common.logging.ServiceLoggingContext) LoggerContext(ch.qos.logback.classic.LoggerContext) URL(java.net.URL) LocationFactory(org.apache.twill.filesystem.LocationFactory) LocalAppenderContext(io.cdap.cdap.logging.framework.LocalAppenderContext) WorkflowProgramLoggingContext(io.cdap.cdap.logging.context.WorkflowProgramLoggingContext) Injector(com.google.inject.Injector) TransactionRunner(io.cdap.cdap.spi.data.transaction.TransactionRunner) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) File(java.io.File) LogPipelineConfigurator(io.cdap.cdap.logging.pipeline.LogPipelineConfigurator) Test(org.junit.Test)

Example 12 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.

the class MetricsProcessorServiceTest method testMetricsProcessor.

@Test
public void testMetricsProcessor() throws Exception {
    injector.getInstance(TransactionManager.class).startAndWait();
    StoreDefinition.createAllTables(injector.getInstance(StructuredTableAdmin.class));
    injector.getInstance(DatasetOpExecutorService.class).startAndWait();
    injector.getInstance(DatasetService.class).startAndWait();
    final MetricStore metricStore = injector.getInstance(MetricStore.class);
    Set<Integer> partitions = new HashSet<>();
    for (int i = 0; i < cConf.getInt(Constants.Metrics.MESSAGING_TOPIC_NUM); i++) {
        partitions.add(i);
    }
    // Start KafkaMetricsProcessorService after metrics are published to Kafka
    // Intentionally set queue size to a small value, so that MessagingMetricsProcessorManagerService
    // internally can persist metrics when more messages are to be fetched
    MessagingMetricsProcessorManagerService messagingMetricsProcessorManagerService = new MessagingMetricsProcessorManagerService(cConf, injector.getInstance(MetricDatasetFactory.class), messagingService, injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, injector.getInstance(MetricsWriterProvider.class), partitions, new NoopMetricsContext(), 50, 0);
    messagingMetricsProcessorManagerService.startAndWait();
    long startTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
    // Publish metrics with messaging service and record expected metrics
    for (int i = 10; i < 20; i++) {
        publishMessagingMetrics(i, startTime, METRICS_CONTEXT, expected, SYSTEM_METRIC_PREFIX, MetricType.COUNTER);
    }
    Thread.sleep(500);
    // Stop and restart messagingMetricsProcessorManagerService
    messagingMetricsProcessorManagerService.stopAndWait();
    // Intentionally set queue size to a large value, so that MessagingMetricsProcessorManagerService
    // internally only persists metrics during terminating.
    messagingMetricsProcessorManagerService = new MessagingMetricsProcessorManagerService(cConf, injector.getInstance(MetricDatasetFactory.class), messagingService, injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, injector.getInstance(MetricsWriterProvider.class), partitions, new NoopMetricsContext(), 50, 0);
    messagingMetricsProcessorManagerService.startAndWait();
    // Publish metrics after MessagingMetricsProcessorManagerService restarts and record expected metrics
    for (int i = 20; i < 30; i++) {
        publishMessagingMetrics(i, startTime, METRICS_CONTEXT, expected, SYSTEM_METRIC_PREFIX, MetricType.GAUGE);
    }
    final List<String> missingMetricNames = new ArrayList<>();
    // are retrieved when timeout occurs, print out the missing metrics
    try {
        Tasks.waitFor(true, new Callable<Boolean>() {

            @Override
            public Boolean call() throws Exception {
                return canQueryAllMetrics(metricStore, METRICS_CONTEXT, expected, missingMetricNames);
            }
        }, 10000, TimeUnit.MILLISECONDS, "Failed to get all metrics");
    } catch (TimeoutException e) {
        Assert.fail(String.format("Metrics: [%s] cannot be found in the metrics store.", Joiner.on(", ").join(missingMetricNames)));
    }
    // Query metrics from the metricStore and compare them with the expected ones
    assertMetricsResult(metricStore, METRICS_CONTEXT, expected);
    // Query for the 5 counter metrics published with messaging between time 5 - 14
    Collection<MetricTimeSeries> queryResult = metricStore.query(new MetricDataQuery(5, 14, 1, Integer.MAX_VALUE, ImmutableMap.of(SYSTEM_METRIC_PREFIX + COUNTER_METRIC_NAME, AggregationFunction.SUM), METRICS_CONTEXT, ImmutableList.<String>of(), null));
    MetricTimeSeries timeSeries = Iterables.getOnlyElement(queryResult);
    Assert.assertEquals(5, timeSeries.getTimeValues().size());
    for (TimeValue timeValue : timeSeries.getTimeValues()) {
        Assert.assertEquals(1L, timeValue.getValue());
    }
    // Stop services and servers
    messagingMetricsProcessorManagerService.stopAndWait();
    // Delete all metrics
    metricStore.deleteAll();
}
Also used : MetricStore(io.cdap.cdap.api.metrics.MetricStore) DatumReaderFactory(io.cdap.cdap.internal.io.DatumReaderFactory) StructuredTableAdmin(io.cdap.cdap.spi.data.StructuredTableAdmin) ArrayList(java.util.ArrayList) MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) DatasetService(io.cdap.cdap.data2.datafabric.dataset.service.DatasetService) NoopMetricsContext(io.cdap.cdap.api.metrics.NoopMetricsContext) MetricDatasetFactory(io.cdap.cdap.metrics.store.MetricDatasetFactory) MetricsWriterProvider(io.cdap.cdap.metrics.process.loader.MetricsWriterProvider) TimeValue(io.cdap.cdap.api.dataset.lib.cube.TimeValue) HashSet(java.util.HashSet) TimeoutException(java.util.concurrent.TimeoutException) SchemaGenerator(io.cdap.cdap.internal.io.SchemaGenerator) TimeoutException(java.util.concurrent.TimeoutException) TransactionManager(org.apache.tephra.TransactionManager) DatasetOpExecutorService(io.cdap.cdap.data2.datafabric.dataset.service.executor.DatasetOpExecutorService) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) Test(org.junit.Test)

Example 13 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.

the class Spark2Test method testSparkWithObjectStore.

@Test
public void testSparkWithObjectStore() throws Exception {
    ApplicationManager applicationManager = deploy(NamespaceId.DEFAULT, SparkAppUsingObjectStore.class);
    DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
    prepareInputData(keysManager);
    SparkManager sparkManager = applicationManager.getSparkManager(CharCountProgram.class.getSimpleName()).start();
    sparkManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
    sparkManager.waitForStopped(60, TimeUnit.SECONDS);
    DataSetManager<KeyValueTable> countManager = getDataset("count");
    checkOutputData(countManager);
    // validate that the table emitted metrics
    // one read + one write in beforeSubmit(), increment (= read + write) in main -> 4
    Tasks.waitFor(4L, new Callable<Long>() {

        @Override
        public Long call() throws Exception {
            Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, SparkAppUsingObjectStore.class.getSimpleName(), Constants.Metrics.Tag.SPARK, CharCountProgram.class.getSimpleName(), Constants.Metrics.Tag.DATASET, "totals"), Collections.<String>emptyList()));
            if (metrics.isEmpty()) {
                return 0L;
            }
            Assert.assertEquals(1, metrics.size());
            MetricTimeSeries ts = metrics.iterator().next();
            Assert.assertEquals(1, ts.getTimeValues().size());
            return ts.getTimeValues().get(0).getValue();
        }
    }, 10L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
Also used : ApplicationManager(io.cdap.cdap.test.ApplicationManager) ObjectStore(io.cdap.cdap.api.dataset.lib.ObjectStore) SparkAppUsingObjectStore(io.cdap.cdap.spark.app.SparkAppUsingObjectStore) SparkManager(io.cdap.cdap.test.SparkManager) MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) Assume.assumeNoException(org.junit.Assume.assumeNoException) IOException(java.io.IOException) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) Collection(java.util.Collection) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) Test(org.junit.Test)

Example 14 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.

the class MapReduceProgramRunnerTest method testMapreduceWithDynamicDatasets.

@Test
public void testMapreduceWithDynamicDatasets() throws Exception {
    DatasetId rtInput1 = DefaultId.NAMESPACE.dataset("rtInput1");
    DatasetId rtInput2 = DefaultId.NAMESPACE.dataset("rtInput2");
    DatasetId rtOutput1 = DefaultId.NAMESPACE.dataset("rtOutput1");
    // create the datasets here because they are not created by the app
    dsFramework.addInstance("fileSet", rtInput1, FileSetProperties.builder().setBasePath("rtInput1").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    dsFramework.addInstance("fileSet", rtOutput1, FileSetProperties.builder().setBasePath("rtOutput1").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    // build runtime args for app
    Map<String, String> runtimeArguments = Maps.newHashMap();
    // Make sure there is only one mapper running at a time since this test has the Mapper writing
    // to a dataset using increment and the in-memory table doesn't really support concurrent increment
    runtimeArguments.put("mr.job.conf.mapreduce.local.map.tasks.maximum", "1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_NAME, "rtInput1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_PATHS, "abc, xyz");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_NAME, "rtOutput1");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_PATH, "a001");
    // test reading and writing distinct datasets, reading more than one path
    testMapreduceWithFile("rtInput1", "abc, xyz", "rtOutput1", "a001", AppWithMapReduceUsingRuntimeDatasets.class, AppWithMapReduceUsingRuntimeDatasets.ComputeSum.class, runtimeArguments, AppWithMapReduceUsingRuntimeDatasets.COUNTERS, null);
    // validate that the table emitted metrics
    Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, AppWithMapReduceUsingRuntimeDatasets.APP_NAME, Constants.Metrics.Tag.MAPREDUCE, AppWithMapReduceUsingRuntimeDatasets.MR_NAME, Constants.Metrics.Tag.DATASET, "rtt"), Collections.<String>emptyList()));
    Assert.assertEquals(1, metrics.size());
    MetricTimeSeries ts = metrics.iterator().next();
    Assert.assertEquals(1, ts.getTimeValues().size());
    Assert.assertEquals(1, ts.getTimeValues().get(0).getValue());
    // test reading and writing same dataset
    dsFramework.addInstance("fileSet", rtInput2, FileSetProperties.builder().setBasePath("rtInput2").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
    runtimeArguments = Maps.newHashMap();
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_NAME, "rtInput2");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_PATHS, "zzz");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_NAME, "rtInput2");
    runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_PATH, "f123");
    testMapreduceWithFile("rtInput2", "zzz", "rtInput2", "f123", AppWithMapReduceUsingRuntimeDatasets.class, AppWithMapReduceUsingRuntimeDatasets.ComputeSum.class, runtimeArguments, AppWithMapReduceUsingRuntimeDatasets.COUNTERS, null);
}
Also used : TextOutputFormat(org.apache.hadoop.mapreduce.lib.output.TextOutputFormat) MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) DatasetId(io.cdap.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 15 with MetricDataQuery

use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.

the class PreviewDataPipelineTest method getTotalMetric.

private long getTotalMetric(Map<String, String> tags, String metricName, PreviewManager previewManager) {
    MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
    Collection<MetricTimeSeries> result = previewManager.getMetricsQueryHelper().getMetricStore().query(query);
    if (result.isEmpty()) {
        return 0;
    }
    List<TimeValue> timeValues = result.iterator().next().getTimeValues();
    if (timeValues.isEmpty()) {
        return 0;
    }
    return timeValues.get(0).getValue();
}
Also used : MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) TimeValue(io.cdap.cdap.api.dataset.lib.cube.TimeValue)

Aggregations

MetricDataQuery (io.cdap.cdap.api.metrics.MetricDataQuery)32 MetricTimeSeries (io.cdap.cdap.api.metrics.MetricTimeSeries)30 TimeValue (io.cdap.cdap.api.dataset.lib.cube.TimeValue)18 Test (org.junit.Test)12 KeyValueTable (io.cdap.cdap.api.dataset.lib.KeyValueTable)6 ArrayList (java.util.ArrayList)6 ImmutableMap (com.google.common.collect.ImmutableMap)4 MetricStore (io.cdap.cdap.api.metrics.MetricStore)4 ApplicationManager (io.cdap.cdap.test.ApplicationManager)4 IOException (java.io.IOException)4 Collection (java.util.Collection)4 Map (java.util.Map)3 LoggerContext (ch.qos.logback.classic.LoggerContext)2 Function (com.google.common.base.Function)2 Maps (com.google.common.collect.Maps)2 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)2 Injector (com.google.inject.Injector)2 AppWithWorker (io.cdap.cdap.AppWithWorker)2 ObjectStore (io.cdap.cdap.api.dataset.lib.ObjectStore)2 AggregationFunction (io.cdap.cdap.api.dataset.lib.cube.AggregationFunction)2