Search in sources :

Example 1 with LogProcessorPipelineContext

use of co.cask.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.

the class LocalLogAppender method start.

@Override
public void start() {
    if (!started.compareAndSet(false, true)) {
        return;
    }
    // Load and starts all configured log processing pipelines
    LogPipelineLoader pipelineLoader = new LogPipelineLoader(cConf);
    Map<String, LogPipelineSpecification<AppenderContext>> specs = pipelineLoader.load(new Provider<AppenderContext>() {

        @Override
        public AppenderContext get() {
            return new LocalAppenderContext(datasetFramework, txClient, locationFactory, metricsCollectionService);
        }
    });
    // Use the event delay as the sync interval
    long syncIntervalMillis = cConf.getLong(Constants.Logging.PIPELINE_EVENT_DELAY_MS);
    for (LogPipelineSpecification<AppenderContext> spec : specs.values()) {
        LogProcessorPipelineContext context = new LogProcessorPipelineContext(cConf, spec.getName(), spec.getContext(), spec.getContext().getMetricsContext(), spec.getContext().getInstanceId());
        LocalLogProcessorPipeline pipeline = new LocalLogProcessorPipeline(context, syncIntervalMillis);
        pipeline.startAndWait();
        pipelines.add(pipeline);
    }
    super.start();
}
Also used : LocalAppenderContext(co.cask.cdap.logging.framework.LocalAppenderContext) LogPipelineSpecification(co.cask.cdap.logging.framework.LogPipelineSpecification) LocalAppenderContext(co.cask.cdap.logging.framework.LocalAppenderContext) AppenderContext(co.cask.cdap.api.logging.AppenderContext) LogPipelineLoader(co.cask.cdap.logging.framework.LogPipelineLoader) LogProcessorPipelineContext(co.cask.cdap.logging.pipeline.LogProcessorPipelineContext)

Example 2 with LogProcessorPipelineContext

use of co.cask.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.

the class KafkaLogProcessorPipelineTest method testMultiAppenders.

@Test
public void testMultiAppenders() throws Exception {
    final File logDir = TEMP_FOLDER.newFolder();
    LoggerContext loggerContext = new LoggerContext();
    loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
    LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
    configurator.setContext(loggerContext);
    URL configURL = getClass().getClassLoader().getResource("pipeline-multi-appenders.xml");
    Assert.assertNotNull(configURL);
    configurator.doConfigure(configURL);
    String topic = "testMultiAppenders";
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMultiAppenders", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Publish some log messages to Kafka using a non-specific logger
    long now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(createLoggingEvent("logger.trace", Level.TRACE, "TRACE", now - 1000), createLoggingEvent("logger.debug", Level.DEBUG, "DEBUG", now - 900), createLoggingEvent("logger.info", Level.INFO, "INFO", now - 800), createLoggingEvent("logger.warn", Level.WARN, "WARN", now - 700), createLoggingEvent("logger.error", Level.ERROR, "ERROR", now - 600)));
    // All logs should get logged to the default.log file
    Tasks.waitFor(true, new Callable<Boolean>() {

        @Override
        public Boolean call() throws Exception {
            File logFile = new File(logDir, "default.log");
            List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
            return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR").equals(lines);
        }
    }, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // Publish some more log messages via the non-additive "test.info" logger.
    now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.info.trace", Level.TRACE, "TRACE", now - 1000), createLoggingEvent("test.info.debug", Level.DEBUG, "DEBUG", now - 900), createLoggingEvent("test.info", Level.INFO, "INFO", now - 800), createLoggingEvent("test.info.warn", Level.WARN, "WARN", now - 700), createLoggingEvent("test.info.error", Level.ERROR, "ERROR", now - 600)));
    // Only logs with INFO or above level should get written to the info.log file
    Tasks.waitFor(true, new Callable<Boolean>() {

        @Override
        public Boolean call() throws Exception {
            File logFile = new File(logDir, "info.log");
            List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
            return Arrays.asList("INFO", "WARN", "ERROR").equals(lines);
        }
    }, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // The default.log file shouldn't be changed, because the test.info logger is non additive
    File defaultLogFile = new File(logDir, "default.log");
    List<String> lines = Files.readAllLines(defaultLogFile.toPath(), StandardCharsets.UTF_8);
    Assert.assertEquals(Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR"), lines);
    // Publish a log messages via the additive "test.error" logger.
    now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.error.1.2", Level.ERROR, "ERROR", now - 1000)));
    // Expect the log get appended to both the error.log file as well as the default.log file
    Tasks.waitFor(true, new Callable<Boolean>() {

        @Override
        public Boolean call() throws Exception {
            File logFile = new File(logDir, "error.log");
            List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
            if (!Collections.singletonList("ERROR").equals(lines)) {
                return false;
            }
            logFile = new File(logDir, "default.log");
            lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
            return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR", "ERROR").equals(lines);
        }
    }, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    pipeline.stopAndWait();
    loggerContext.stop();
}
Also used : LogProcessorPipelineContext(co.cask.cdap.logging.pipeline.LogProcessorPipelineContext) LoggerContext(ch.qos.logback.classic.LoggerContext) URL(java.net.URL) NoSuchElementException(java.util.NoSuchElementException) IOException(java.io.IOException) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) LinkedList(java.util.LinkedList) File(java.io.File) LogPipelineConfigurator(co.cask.cdap.logging.pipeline.LogPipelineConfigurator) Test(org.junit.Test)

Example 3 with LogProcessorPipelineContext

use of co.cask.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.

the class KafkaLogProcessorPipelineTest method testMetricsAppender.

@Test
public void testMetricsAppender() throws Exception {
    Injector injector = KAFKA_TESTER.getInjector();
    MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
    collectionService.startAndWait();
    LoggerContext loggerContext = new LocalAppenderContext(injector.getInstance(DatasetFramework.class), injector.getInstance(TransactionSystemClient.class), injector.getInstance(LocationFactory.class), injector.getInstance(MetricsCollectionService.class));
    final File logDir = TEMP_FOLDER.newFolder();
    loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
    LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
    configurator.setContext(loggerContext);
    URL configURL = getClass().getClassLoader().getResource("pipeline-metric-appender.xml");
    Assert.assertNotNull(configURL);
    configurator.doConfigure(configURL);
    String topic = "metricsPipeline";
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMetricAppender", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Publish some log messages to Kafka
    long now = System.currentTimeMillis();
    FlowletLoggingContext flowletLoggingContext = new FlowletLoggingContext("default", "app1", "flow1", "flowlet1", "run1", "instance1");
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), createLoggingEvent("test.logger", Level.INFO, "2", now - 700), createLoggingEvent("test.logger", Level.INFO, "3", now - 500), createLoggingEvent("test.logger", Level.INFO, "1", now - 900), createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), createLoggingEvent("test.logger", Level.INFO, "4", now - 100)), flowletLoggingContext);
    WorkflowProgramLoggingContext workflowProgramLoggingContext = new WorkflowProgramLoggingContext("default", "app1", "wflow1", "run1", ProgramType.MAPREDUCE, "mr1", "mrun1");
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.logger", Level.WARN, "0", now - 1000), createLoggingEvent("test.logger", Level.WARN, "2", now - 700), createLoggingEvent("test.logger", Level.TRACE, "3", now - 500)), workflowProgramLoggingContext);
    ServiceLoggingContext serviceLoggingContext = new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.TRANSACTION);
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.logger", Level.ERROR, "0", now - 1000), createLoggingEvent("test.logger", Level.ERROR, "2", now - 700), createLoggingEvent("test.logger", Level.ERROR, "3", now - 500), createLoggingEvent("test.logger", Level.INFO, "1", now - 900)), serviceLoggingContext);
    final MetricStore metricStore = injector.getInstance(MetricStore.class);
    try {
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.info", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(flowletLoggingContext), new ArrayList<String>()), 5L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.debug", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(flowletLoggingContext), new ArrayList<String>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.warn", AggregationFunction.SUM, // mapreduce metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.MAPREDUCE, "mr1", Constants.Metrics.Tag.RUN_ID, "mrun1"), new ArrayList<String>()), 2L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.trace", AggregationFunction.SUM, // workflow metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.WORKFLOW, "wflow1", Constants.Metrics.Tag.RUN_ID, "run1"), new ArrayList<String>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.services.log.error", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(serviceLoggingContext), new ArrayList<String>()), 3L);
    } finally {
        pipeline.stopAndWait();
        loggerContext.stop();
        collectionService.stopAndWait();
    }
}
Also used : MetricStore(co.cask.cdap.api.metrics.MetricStore) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) LocalMetricsCollectionService(co.cask.cdap.metrics.collect.LocalMetricsCollectionService) ArrayList(java.util.ArrayList) LogProcessorPipelineContext(co.cask.cdap.logging.pipeline.LogProcessorPipelineContext) ServiceLoggingContext(co.cask.cdap.common.logging.ServiceLoggingContext) LoggerContext(ch.qos.logback.classic.LoggerContext) URL(java.net.URL) LocationFactory(org.apache.twill.filesystem.LocationFactory) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) LocalAppenderContext(co.cask.cdap.logging.framework.LocalAppenderContext) WorkflowProgramLoggingContext(co.cask.cdap.logging.context.WorkflowProgramLoggingContext) Injector(com.google.inject.Injector) FlowletLoggingContext(co.cask.cdap.logging.context.FlowletLoggingContext) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) File(java.io.File) LogPipelineConfigurator(co.cask.cdap.logging.pipeline.LogPipelineConfigurator) Test(org.junit.Test)

Example 4 with LogProcessorPipelineContext

use of co.cask.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.

the class KafkaLogProcessorPipelineTest method testRegularFlush.

@Test
public void testRegularFlush() throws Exception {
    String topic = "testFlush";
    LoggerContext loggerContext = createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), TestAppender.class.getName());
    final TestAppender appender = getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", TestAppender.class);
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    // Use a longer checkpoint time and a short event delay. Should expect flush called at least once
    // per event delay.
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024, 100, 1048576, 2000);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Even when there is no event, the flush should still get called.
    Tasks.waitFor(5, new Callable<Integer>() {

        @Override
        public Integer call() throws Exception {
            return appender.getFlushCount();
        }
    }, 3, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // Publish some logs
    long now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.logger", Level.INFO, "0", now - 500), createLoggingEvent("test.logger", Level.INFO, "1", now - 300), createLoggingEvent("test.logger", Level.INFO, "2", now + 100)));
    // Wait until getting all logs.
    Tasks.waitFor(3, new Callable<Integer>() {

        @Override
        public Integer call() throws Exception {
            return appender.getEvents().size();
        }
    }, 3, TimeUnit.SECONDS, 200, TimeUnit.MILLISECONDS);
    pipeline.stopAndWait();
    // Should get at least 20 flush calls, since the checkpoint is every 2 seconds
    Assert.assertTrue(appender.getFlushCount() >= 20);
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) LogProcessorPipelineContext(co.cask.cdap.logging.pipeline.LogProcessorPipelineContext) LoggerContext(ch.qos.logback.classic.LoggerContext) NoSuchElementException(java.util.NoSuchElementException) IOException(java.io.IOException) Test(org.junit.Test)

Example 5 with LogProcessorPipelineContext

use of co.cask.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.

the class DistributedLogFramework method createService.

@Override
protected Service createService(Set<Integer> partitions) {
    Map<String, LogPipelineSpecification<AppenderContext>> specs = new LogPipelineLoader(cConf).load(contextProvider);
    int pipelineCount = specs.size();
    // Create one KafkaLogProcessorPipeline per spec
    final List<Service> pipelines = new ArrayList<>();
    for (final LogPipelineSpecification<AppenderContext> pipelineSpec : specs.values()) {
        final CConfiguration cConf = pipelineSpec.getConf();
        final AppenderContext context = pipelineSpec.getContext();
        long bufferSize = getBufferSize(pipelineCount, cConf, partitions.size());
        final String topic = cConf.get(Constants.Logging.KAFKA_TOPIC);
        final KafkaPipelineConfig config = new KafkaPipelineConfig(topic, partitions, bufferSize, cConf.getLong(Constants.Logging.PIPELINE_EVENT_DELAY_MS), cConf.getInt(Constants.Logging.PIPELINE_KAFKA_FETCH_SIZE), cConf.getLong(Constants.Logging.PIPELINE_CHECKPOINT_INTERVAL_MS));
        RetryStrategy retryStrategy = RetryStrategies.fromConfiguration(cConf, "system.log.process.");
        pipelines.add(new RetryOnStartFailureService(new Supplier<Service>() {

            @Override
            public Service get() {
                return new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(cConf, context.getName(), context, context.getMetricsContext(), context.getInstanceId()), checkpointManagerFactory.create(topic, pipelineSpec.getCheckpointPrefix()), brokerService, config);
            }
        }, retryStrategy));
    }
    // Returns a Service that start/stop all pipelines.
    return new AbstractIdleService() {

        @Override
        protected void startUp() throws Exception {
            // Starts all pipeline
            validateAllFutures(Iterables.transform(pipelines, new Function<Service, ListenableFuture<State>>() {

                @Override
                public ListenableFuture<State> apply(Service service) {
                    return service.start();
                }
            }));
        }

        @Override
        protected void shutDown() throws Exception {
            // Stops all pipeline
            validateAllFutures(Iterables.transform(pipelines, new Function<Service, ListenableFuture<State>>() {

                @Override
                public ListenableFuture<State> apply(Service service) {
                    return service.stop();
                }
            }));
        }
    };
}
Also used : LogPipelineSpecification(co.cask.cdap.logging.framework.LogPipelineSpecification) ArrayList(java.util.ArrayList) KafkaPipelineConfig(co.cask.cdap.logging.pipeline.kafka.KafkaPipelineConfig) ResourceBalancerService(co.cask.cdap.common.resource.ResourceBalancerService) AbstractIdleService(com.google.common.util.concurrent.AbstractIdleService) RetryOnStartFailureService(co.cask.cdap.common.service.RetryOnStartFailureService) DiscoveryService(org.apache.twill.discovery.DiscoveryService) Service(com.google.common.util.concurrent.Service) BrokerService(org.apache.twill.kafka.client.BrokerService) LogPipelineLoader(co.cask.cdap.logging.framework.LogPipelineLoader) LogProcessorPipelineContext(co.cask.cdap.logging.pipeline.LogProcessorPipelineContext) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Function(com.google.common.base.Function) KafkaLogProcessorPipeline(co.cask.cdap.logging.pipeline.kafka.KafkaLogProcessorPipeline) AppenderContext(co.cask.cdap.api.logging.AppenderContext) RetryOnStartFailureService(co.cask.cdap.common.service.RetryOnStartFailureService) Supplier(com.google.common.base.Supplier) AbstractIdleService(com.google.common.util.concurrent.AbstractIdleService) RetryStrategy(co.cask.cdap.common.service.RetryStrategy)

Aggregations

LogProcessorPipelineContext (co.cask.cdap.logging.pipeline.LogProcessorPipelineContext)6 LoggerContext (ch.qos.logback.classic.LoggerContext)4 ArrayList (java.util.ArrayList)4 Test (org.junit.Test)4 IOException (java.io.IOException)3 NoSuchElementException (java.util.NoSuchElementException)3 AppenderContext (co.cask.cdap.api.logging.AppenderContext)2 LocalAppenderContext (co.cask.cdap.logging.framework.LocalAppenderContext)2 LogPipelineLoader (co.cask.cdap.logging.framework.LogPipelineLoader)2 LogPipelineSpecification (co.cask.cdap.logging.framework.LogPipelineSpecification)2 LogPipelineConfigurator (co.cask.cdap.logging.pipeline.LogPipelineConfigurator)2 File (java.io.File)2 URL (java.net.URL)2 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 ILoggingEvent (ch.qos.logback.classic.spi.ILoggingEvent)1 MetricDataQuery (co.cask.cdap.api.metrics.MetricDataQuery)1 MetricStore (co.cask.cdap.api.metrics.MetricStore)1 MetricsCollectionService (co.cask.cdap.api.metrics.MetricsCollectionService)1 CConfiguration (co.cask.cdap.common.conf.CConfiguration)1 ServiceLoggingContext (co.cask.cdap.common.logging.ServiceLoggingContext)1