Search in sources :

Example 1 with MetricStore

use of io.cdap.cdap.api.metrics.MetricStore in project cdap by caskdata.

the class MapReduceRunnerTestBase method beforeClass.

@BeforeClass
public static void beforeClass() throws Exception {
    CConfiguration conf = CConfiguration.create();
    // allow subclasses to override the following two parameters
    Integer txTimeout = Integer.getInteger(TxConstants.Manager.CFG_TX_TIMEOUT);
    if (txTimeout != null) {
        conf.setInt(TxConstants.Manager.CFG_TX_TIMEOUT, txTimeout);
    }
    Integer txCleanupInterval = Integer.getInteger(TxConstants.Manager.CFG_TX_CLEANUP_INTERVAL);
    if (txCleanupInterval != null) {
        conf.setInt(TxConstants.Manager.CFG_TX_CLEANUP_INTERVAL, txCleanupInterval);
    }
    injector = AppFabricTestHelper.getInjector(conf);
    txService = injector.getInstance(TransactionManager.class);
    txExecutorFactory = injector.getInstance(TransactionExecutorFactory.class);
    dsFramework = injector.getInstance(DatasetFramework.class);
    datasetCache = new SingleThreadDatasetCache(new SystemDatasetInstantiator(dsFramework, MapReduceRunnerTestBase.class.getClassLoader(), null), injector.getInstance(TransactionSystemClient.class), NamespaceId.DEFAULT, DatasetDefinition.NO_ARGUMENTS, null, null);
    metricStore = injector.getInstance(MetricStore.class);
    txService.startAndWait();
    // Always create the default namespace
    injector.getInstance(NamespaceAdmin.class).create(NamespaceMeta.DEFAULT);
}
Also used : DatasetFramework(io.cdap.cdap.data2.dataset2.DatasetFramework) MetricStore(io.cdap.cdap.api.metrics.MetricStore) TransactionManager(org.apache.tephra.TransactionManager) SystemDatasetInstantiator(io.cdap.cdap.data.dataset.SystemDatasetInstantiator) NamespaceAdmin(io.cdap.cdap.common.namespace.NamespaceAdmin) SingleThreadDatasetCache(io.cdap.cdap.data2.dataset2.SingleThreadDatasetCache) CConfiguration(io.cdap.cdap.common.conf.CConfiguration) TransactionExecutorFactory(io.cdap.cdap.data2.transaction.TransactionExecutorFactory) BeforeClass(org.junit.BeforeClass)

Example 2 with MetricStore

use of io.cdap.cdap.api.metrics.MetricStore in project cdap by caskdata.

the class ProfileMetricServiceTest method testRoundingLogic.

@Test
public void testRoundingLogic() throws Exception {
    ProgramRunId runId = NamespaceId.DEFAULT.app("round").workflow("round").run(RunIds.generate());
    ProfileId profileId = NamespaceId.DEFAULT.profile("roundProfile");
    MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
    MetricStore metricStore = injector.getInstance(MetricStore.class);
    ProfileMetricService scheduledService = new ProfileMetricService(collectionService, runId, profileId, 1, 1);
    // start and stop the service, the metric should still go up by 1
    scheduledService.startUp();
    scheduledService.shutDown();
    Tasks.waitFor(1L, () -> getMetric(metricStore, runId, profileId, "system." + Constants.Metrics.Program.PROGRAM_NODE_MINUTES), 10, TimeUnit.SECONDS);
    scheduledService.startUp();
    // set the start up time to 90 seconds before the current time
    scheduledService.setStartUpTime(TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis()) - 90);
    // 90 seconds should round up to 2 mins, so emit 1 min and test the rounding logic
    scheduledService.emitMetric();
    scheduledService.shutDown();
    // the metric should go up by 2
    Tasks.waitFor(3L, () -> getMetric(metricStore, runId, profileId, "system." + Constants.Metrics.Program.PROGRAM_NODE_MINUTES), 10, TimeUnit.SECONDS);
    scheduledService.startUp();
    // set the start up time to 65 seconds before the current time
    scheduledService.setStartUpTime(TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis()) - 65);
    // 65 seconds should round down to 1 min, so emit 1 min and test the rest seconds are ignored
    scheduledService.emitMetric();
    scheduledService.shutDown();
    // the metric should go up by 1
    Tasks.waitFor(4L, () -> getMetric(metricStore, runId, profileId, "system." + Constants.Metrics.Program.PROGRAM_NODE_MINUTES), 10, TimeUnit.SECONDS);
}
Also used : ProfileId(io.cdap.cdap.proto.id.ProfileId) MetricStore(io.cdap.cdap.api.metrics.MetricStore) MetricsCollectionService(io.cdap.cdap.api.metrics.MetricsCollectionService) ProgramRunId(io.cdap.cdap.proto.id.ProgramRunId) Test(org.junit.Test)

Example 3 with MetricStore

use of io.cdap.cdap.api.metrics.MetricStore in project cdap by caskdata.

the class ProfileMetricServiceTest method testProfileMetrics.

@Test
public void testProfileMetrics() throws Exception {
    ProgramRunId runId = NamespaceId.DEFAULT.app("myApp").workflow("myProgram").run(RunIds.generate());
    ProfileId profileId = NamespaceId.DEFAULT.profile("myProfile");
    MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
    MetricStore metricStore = injector.getInstance(MetricStore.class);
    // There are 5 nodes, we emit the metrics each 2 mins, so each time the node minute should go up by 10 min
    ProfileMetricService scheduledService = new ProfileMetricService(collectionService, runId, profileId, 5, 2);
    // emit and verify the results
    scheduledService.emitMetric();
    Tasks.waitFor(10L, () -> getMetric(metricStore, runId, profileId, "system." + Constants.Metrics.Program.PROGRAM_NODE_MINUTES), 10, TimeUnit.SECONDS);
    scheduledService.emitMetric();
    Tasks.waitFor(20L, () -> getMetric(metricStore, runId, profileId, "system." + Constants.Metrics.Program.PROGRAM_NODE_MINUTES), 10, TimeUnit.SECONDS);
    scheduledService.emitMetric();
    Tasks.waitFor(30L, () -> getMetric(metricStore, runId, profileId, "system." + Constants.Metrics.Program.PROGRAM_NODE_MINUTES), 10, TimeUnit.SECONDS);
}
Also used : ProfileId(io.cdap.cdap.proto.id.ProfileId) MetricStore(io.cdap.cdap.api.metrics.MetricStore) MetricsCollectionService(io.cdap.cdap.api.metrics.MetricsCollectionService) ProgramRunId(io.cdap.cdap.proto.id.ProgramRunId) Test(org.junit.Test)

Example 4 with MetricStore

use of io.cdap.cdap.api.metrics.MetricStore in project cdap by caskdata.

the class KafkaLogProcessorPipelineTest method testMetricsAppender.

@Test
public void testMetricsAppender() throws Exception {
    Injector injector = KAFKA_TESTER.getInjector();
    MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
    collectionService.startAndWait();
    LoggerContext loggerContext = new LocalAppenderContext(injector.getInstance(TransactionRunner.class), injector.getInstance(LocationFactory.class), injector.getInstance(MetricsCollectionService.class));
    final File logDir = TEMP_FOLDER.newFolder();
    loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
    LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
    configurator.setContext(loggerContext);
    URL configURL = getClass().getClassLoader().getResource("pipeline-metric-appender.xml");
    Assert.assertNotNull(configURL);
    configurator.doConfigure(configURL);
    String topic = "metricsPipeline";
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMetricAppender", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Publish some log messages to Kafka
    long now = System.currentTimeMillis();
    WorkerLoggingContext loggingContext = new WorkerLoggingContext("default", "app1", "worker1", "run1", "instance1");
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)), loggingContext);
    WorkflowProgramLoggingContext workflowProgramLoggingContext = new WorkflowProgramLoggingContext("default", "app1", "wflow1", "run1", ProgramType.MAPREDUCE, "mr1", "mrun1");
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.TRACE, "3", now - 500)), workflowProgramLoggingContext);
    ServiceLoggingContext serviceLoggingContext = new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.TRANSACTION);
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900)), serviceLoggingContext);
    final MetricStore metricStore = injector.getInstance(MetricStore.class);
    try {
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.info", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 5L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.debug", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.warn", AggregationFunction.SUM, // mapreduce metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.MAPREDUCE, "mr1", Constants.Metrics.Tag.RUN_ID, "mrun1"), new ArrayList<>()), 2L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.trace", AggregationFunction.SUM, // workflow metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.WORKFLOW, "wflow1", Constants.Metrics.Tag.RUN_ID, "run1"), new ArrayList<>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.services.log.error", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(serviceLoggingContext), new ArrayList<>()), 3L);
    } finally {
        pipeline.stopAndWait();
        loggerContext.stop();
        collectionService.stopAndWait();
    }
}
Also used : MetricStore(io.cdap.cdap.api.metrics.MetricStore) WorkerLoggingContext(io.cdap.cdap.logging.context.WorkerLoggingContext) LocalMetricsCollectionService(io.cdap.cdap.metrics.collect.LocalMetricsCollectionService) MetricsCollectionService(io.cdap.cdap.api.metrics.MetricsCollectionService) ArrayList(java.util.ArrayList) LogProcessorPipelineContext(io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext) ServiceLoggingContext(io.cdap.cdap.common.logging.ServiceLoggingContext) LoggerContext(ch.qos.logback.classic.LoggerContext) URL(java.net.URL) LocationFactory(org.apache.twill.filesystem.LocationFactory) LocalAppenderContext(io.cdap.cdap.logging.framework.LocalAppenderContext) WorkflowProgramLoggingContext(io.cdap.cdap.logging.context.WorkflowProgramLoggingContext) Injector(com.google.inject.Injector) TransactionRunner(io.cdap.cdap.spi.data.transaction.TransactionRunner) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) File(java.io.File) LogPipelineConfigurator(io.cdap.cdap.logging.pipeline.LogPipelineConfigurator) Test(org.junit.Test)

Example 5 with MetricStore

use of io.cdap.cdap.api.metrics.MetricStore in project cdap by caskdata.

the class MetricsProcessorServiceTest method testMetricsProcessor.

@Test
public void testMetricsProcessor() throws Exception {
    injector.getInstance(TransactionManager.class).startAndWait();
    StoreDefinition.createAllTables(injector.getInstance(StructuredTableAdmin.class));
    injector.getInstance(DatasetOpExecutorService.class).startAndWait();
    injector.getInstance(DatasetService.class).startAndWait();
    final MetricStore metricStore = injector.getInstance(MetricStore.class);
    Set<Integer> partitions = new HashSet<>();
    for (int i = 0; i < cConf.getInt(Constants.Metrics.MESSAGING_TOPIC_NUM); i++) {
        partitions.add(i);
    }
    // Start KafkaMetricsProcessorService after metrics are published to Kafka
    // Intentionally set queue size to a small value, so that MessagingMetricsProcessorManagerService
    // internally can persist metrics when more messages are to be fetched
    MessagingMetricsProcessorManagerService messagingMetricsProcessorManagerService = new MessagingMetricsProcessorManagerService(cConf, injector.getInstance(MetricDatasetFactory.class), messagingService, injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, injector.getInstance(MetricsWriterProvider.class), partitions, new NoopMetricsContext(), 50, 0);
    messagingMetricsProcessorManagerService.startAndWait();
    long startTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
    // Publish metrics with messaging service and record expected metrics
    for (int i = 10; i < 20; i++) {
        publishMessagingMetrics(i, startTime, METRICS_CONTEXT, expected, SYSTEM_METRIC_PREFIX, MetricType.COUNTER);
    }
    Thread.sleep(500);
    // Stop and restart messagingMetricsProcessorManagerService
    messagingMetricsProcessorManagerService.stopAndWait();
    // Intentionally set queue size to a large value, so that MessagingMetricsProcessorManagerService
    // internally only persists metrics during terminating.
    messagingMetricsProcessorManagerService = new MessagingMetricsProcessorManagerService(cConf, injector.getInstance(MetricDatasetFactory.class), messagingService, injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, injector.getInstance(MetricsWriterProvider.class), partitions, new NoopMetricsContext(), 50, 0);
    messagingMetricsProcessorManagerService.startAndWait();
    // Publish metrics after MessagingMetricsProcessorManagerService restarts and record expected metrics
    for (int i = 20; i < 30; i++) {
        publishMessagingMetrics(i, startTime, METRICS_CONTEXT, expected, SYSTEM_METRIC_PREFIX, MetricType.GAUGE);
    }
    final List<String> missingMetricNames = new ArrayList<>();
    // are retrieved when timeout occurs, print out the missing metrics
    try {
        Tasks.waitFor(true, new Callable<Boolean>() {

            @Override
            public Boolean call() throws Exception {
                return canQueryAllMetrics(metricStore, METRICS_CONTEXT, expected, missingMetricNames);
            }
        }, 10000, TimeUnit.MILLISECONDS, "Failed to get all metrics");
    } catch (TimeoutException e) {
        Assert.fail(String.format("Metrics: [%s] cannot be found in the metrics store.", Joiner.on(", ").join(missingMetricNames)));
    }
    // Query metrics from the metricStore and compare them with the expected ones
    assertMetricsResult(metricStore, METRICS_CONTEXT, expected);
    // Query for the 5 counter metrics published with messaging between time 5 - 14
    Collection<MetricTimeSeries> queryResult = metricStore.query(new MetricDataQuery(5, 14, 1, Integer.MAX_VALUE, ImmutableMap.of(SYSTEM_METRIC_PREFIX + COUNTER_METRIC_NAME, AggregationFunction.SUM), METRICS_CONTEXT, ImmutableList.<String>of(), null));
    MetricTimeSeries timeSeries = Iterables.getOnlyElement(queryResult);
    Assert.assertEquals(5, timeSeries.getTimeValues().size());
    for (TimeValue timeValue : timeSeries.getTimeValues()) {
        Assert.assertEquals(1L, timeValue.getValue());
    }
    // Stop services and servers
    messagingMetricsProcessorManagerService.stopAndWait();
    // Delete all metrics
    metricStore.deleteAll();
}
Also used : MetricStore(io.cdap.cdap.api.metrics.MetricStore) DatumReaderFactory(io.cdap.cdap.internal.io.DatumReaderFactory) StructuredTableAdmin(io.cdap.cdap.spi.data.StructuredTableAdmin) ArrayList(java.util.ArrayList) MetricTimeSeries(io.cdap.cdap.api.metrics.MetricTimeSeries) DatasetService(io.cdap.cdap.data2.datafabric.dataset.service.DatasetService) NoopMetricsContext(io.cdap.cdap.api.metrics.NoopMetricsContext) MetricDatasetFactory(io.cdap.cdap.metrics.store.MetricDatasetFactory) MetricsWriterProvider(io.cdap.cdap.metrics.process.loader.MetricsWriterProvider) TimeValue(io.cdap.cdap.api.dataset.lib.cube.TimeValue) HashSet(java.util.HashSet) TimeoutException(java.util.concurrent.TimeoutException) SchemaGenerator(io.cdap.cdap.internal.io.SchemaGenerator) TimeoutException(java.util.concurrent.TimeoutException) TransactionManager(org.apache.tephra.TransactionManager) DatasetOpExecutorService(io.cdap.cdap.data2.datafabric.dataset.service.executor.DatasetOpExecutorService) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) Test(org.junit.Test)

Aggregations

MetricStore (io.cdap.cdap.api.metrics.MetricStore)11 Test (org.junit.Test)6 MetricsCollectionService (io.cdap.cdap.api.metrics.MetricsCollectionService)5 ProfileId (io.cdap.cdap.proto.id.ProfileId)4 ProgramRunId (io.cdap.cdap.proto.id.ProgramRunId)4 TransactionManager (org.apache.tephra.TransactionManager)4 BeforeClass (org.junit.BeforeClass)4 Injector (com.google.inject.Injector)3 CConfiguration (io.cdap.cdap.common.conf.CConfiguration)3 LocationFactory (org.apache.twill.filesystem.LocationFactory)3 MetricDataQuery (io.cdap.cdap.api.metrics.MetricDataQuery)2 SystemDatasetInstantiator (io.cdap.cdap.data.dataset.SystemDatasetInstantiator)2 DatasetService (io.cdap.cdap.data2.datafabric.dataset.service.DatasetService)2 DatasetOpExecutorService (io.cdap.cdap.data2.datafabric.dataset.service.executor.DatasetOpExecutorService)2 DatasetFramework (io.cdap.cdap.data2.dataset2.DatasetFramework)2 SingleThreadDatasetCache (io.cdap.cdap.data2.dataset2.SingleThreadDatasetCache)2 TransactionExecutorFactory (io.cdap.cdap.data2.transaction.TransactionExecutorFactory)2 ArrayList (java.util.ArrayList)2 LoggerContext (ch.qos.logback.classic.LoggerContext)1 Service (com.google.common.util.concurrent.Service)1