Search in sources :

Example 6 with LogProcessorPipelineContext

use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.

the class KafkaLogProcessorPipelineTest method testMetricsAppender.

@Test
public void testMetricsAppender() throws Exception {
    Injector injector = KAFKA_TESTER.getInjector();
    MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
    collectionService.startAndWait();
    LoggerContext loggerContext = new LocalAppenderContext(injector.getInstance(TransactionRunner.class), injector.getInstance(LocationFactory.class), injector.getInstance(MetricsCollectionService.class));
    final File logDir = TEMP_FOLDER.newFolder();
    loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
    LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
    configurator.setContext(loggerContext);
    URL configURL = getClass().getClassLoader().getResource("pipeline-metric-appender.xml");
    Assert.assertNotNull(configURL);
    configurator.doConfigure(configURL);
    String topic = "metricsPipeline";
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMetricAppender", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Publish some log messages to Kafka
    long now = System.currentTimeMillis();
    WorkerLoggingContext loggingContext = new WorkerLoggingContext("default", "app1", "worker1", "run1", "instance1");
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)), loggingContext);
    WorkflowProgramLoggingContext workflowProgramLoggingContext = new WorkflowProgramLoggingContext("default", "app1", "wflow1", "run1", ProgramType.MAPREDUCE, "mr1", "mrun1");
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.TRACE, "3", now - 500)), workflowProgramLoggingContext);
    ServiceLoggingContext serviceLoggingContext = new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.TRANSACTION);
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900)), serviceLoggingContext);
    final MetricStore metricStore = injector.getInstance(MetricStore.class);
    try {
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.info", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 5L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.debug", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.warn", AggregationFunction.SUM, // mapreduce metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.MAPREDUCE, "mr1", Constants.Metrics.Tag.RUN_ID, "mrun1"), new ArrayList<>()), 2L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.trace", AggregationFunction.SUM, // workflow metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.WORKFLOW, "wflow1", Constants.Metrics.Tag.RUN_ID, "run1"), new ArrayList<>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.services.log.error", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(serviceLoggingContext), new ArrayList<>()), 3L);
    } finally {
        pipeline.stopAndWait();
        loggerContext.stop();
        collectionService.stopAndWait();
    }
}
Also used : MetricStore(io.cdap.cdap.api.metrics.MetricStore) WorkerLoggingContext(io.cdap.cdap.logging.context.WorkerLoggingContext) LocalMetricsCollectionService(io.cdap.cdap.metrics.collect.LocalMetricsCollectionService) MetricsCollectionService(io.cdap.cdap.api.metrics.MetricsCollectionService) ArrayList(java.util.ArrayList) LogProcessorPipelineContext(io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext) ServiceLoggingContext(io.cdap.cdap.common.logging.ServiceLoggingContext) LoggerContext(ch.qos.logback.classic.LoggerContext) URL(java.net.URL) LocationFactory(org.apache.twill.filesystem.LocationFactory) LocalAppenderContext(io.cdap.cdap.logging.framework.LocalAppenderContext) WorkflowProgramLoggingContext(io.cdap.cdap.logging.context.WorkflowProgramLoggingContext) Injector(com.google.inject.Injector) TransactionRunner(io.cdap.cdap.spi.data.transaction.TransactionRunner) MetricDataQuery(io.cdap.cdap.api.metrics.MetricDataQuery) File(java.io.File) LogPipelineConfigurator(io.cdap.cdap.logging.pipeline.LogPipelineConfigurator) Test(org.junit.Test)

Example 7 with LogProcessorPipelineContext

use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.

the class KafkaLogProcessorPipelineTest method testRegularFlush.

@Test
public void testRegularFlush() throws Exception {
    String topic = "testFlush";
    LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
    final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    // Use a longer checkpoint time and a short event delay. Should expect flush called at least once
    // per event delay.
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024, 100, 1048576, 2000);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Even when there is no event, the flush should still get called.
    Tasks.waitFor(5, appender::getFlushCount, 3, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // Publish some logs
    long now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 300), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now + 100)));
    // Wait until getting all logs.
    Tasks.waitFor(3, () -> appender.getEvents().size(), 3, TimeUnit.SECONDS, 200, TimeUnit.MILLISECONDS);
    pipeline.stopAndWait();
    // Should get at least 20 flush calls, since the checkpoint is every 2 seconds
    Assert.assertTrue(appender.getFlushCount() >= 20);
}
Also used : MockAppender(io.cdap.cdap.logging.pipeline.MockAppender) LogProcessorPipelineContext(io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext) LoggerContext(ch.qos.logback.classic.LoggerContext) Test(org.junit.Test)

Example 8 with LogProcessorPipelineContext

use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.

the class KafkaLogProcessorPipelineTest method testMultiAppenders.

@Test
public void testMultiAppenders() throws Exception {
    final File logDir = TEMP_FOLDER.newFolder();
    LoggerContext loggerContext = new LoggerContext();
    loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
    LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
    configurator.setContext(loggerContext);
    URL configURL = getClass().getClassLoader().getResource("pipeline-multi-appenders.xml");
    Assert.assertNotNull(configURL);
    configurator.doConfigure(configURL);
    String topic = "testMultiAppenders";
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMultiAppenders", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Publish some log messages to Kafka using a non-specific logger
    long now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("logger.trace", Level.TRACE, "TRACE", now - 1000), LogPipelineTestUtil.createLoggingEvent("logger.debug", Level.DEBUG, "DEBUG", now - 900), LogPipelineTestUtil.createLoggingEvent("logger.info", Level.INFO, "INFO", now - 800), LogPipelineTestUtil.createLoggingEvent("logger.warn", Level.WARN, "WARN", now - 700), LogPipelineTestUtil.createLoggingEvent("logger.error", Level.ERROR, "ERROR", now - 600)));
    // All logs should get logged to the default.log file
    Tasks.waitFor(true, () -> {
        File logFile = new File(logDir, "default.log");
        List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
        return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR").equals(lines);
    }, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // Publish some more log messages via the non-additive "test.info" logger.
    now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.info.trace", Level.TRACE, "TRACE", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.info.debug", Level.DEBUG, "DEBUG", now - 900), LogPipelineTestUtil.createLoggingEvent("test.info", Level.INFO, "INFO", now - 800), LogPipelineTestUtil.createLoggingEvent("test.info.warn", Level.WARN, "WARN", now - 700), LogPipelineTestUtil.createLoggingEvent("test.info.error", Level.ERROR, "ERROR", now - 600)));
    // Only logs with INFO or above level should get written to the info.log file
    Tasks.waitFor(true, () -> {
        File logFile = new File(logDir, "info.log");
        List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
        return Arrays.asList("INFO", "WARN", "ERROR").equals(lines);
    }, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // The default.log file shouldn't be changed, because the test.info logger is non additive
    File defaultLogFile = new File(logDir, "default.log");
    List<String> lines = Files.readAllLines(defaultLogFile.toPath(), StandardCharsets.UTF_8);
    Assert.assertEquals(Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR"), lines);
    // Publish a log messages via the additive "test.error" logger.
    now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.error.1.2", Level.ERROR, "ERROR", now - 1000)));
    // Expect the log get appended to both the error.log file as well as the default.log file
    Tasks.waitFor(true, () -> {
        File logFile = new File(logDir, "error.log");
        List<String> lines1 = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
        if (!Collections.singletonList("ERROR").equals(lines1)) {
            return false;
        }
        logFile = new File(logDir, "default.log");
        lines1 = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
        return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR", "ERROR").equals(lines1);
    }, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    pipeline.stopAndWait();
    loggerContext.stop();
}
Also used : LogProcessorPipelineContext(io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext) File(java.io.File) LoggerContext(ch.qos.logback.classic.LoggerContext) LogPipelineConfigurator(io.cdap.cdap.logging.pipeline.LogPipelineConfigurator) URL(java.net.URL) Test(org.junit.Test)

Example 9 with LogProcessorPipelineContext

use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.

the class KafkaLogProcessorPipelineTest method testBasicSort.

@Test
public void testBasicSort() throws Exception {
    String topic = "testPipeline";
    LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
    final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 300L, 1048576, 500L);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Publish some log messages to Kafka
    long now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)));
    // Since the messages are published in one batch, the processor should be able to fetch all of them,
    // hence the sorting order should be deterministic.
    // The DEBUG message should get filtered out
    Tasks.waitFor(5, () -> appender.getEvents().size(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    for (int i = 0; i < 5; i++) {
        Assert.assertEquals(Integer.toString(i), appender.getEvents().poll().getMessage());
    }
    // Now publish large messages that exceed the maximum queue size (1024). It should trigger writing regardless of
    // the event timestamp
    List<ILoggingEvent> events = new ArrayList<>(500);
    now = System.currentTimeMillis();
    for (int i = 0; i < 500; i++) {
        // The event timestamp is 10 seconds in future.
        events.add(LogPipelineTestUtil.createLoggingEvent("test.large.logger", Level.WARN, "Large logger " + i, now + 10000));
    }
    publishLog(topic, events);
    Tasks.waitFor(true, () -> !appender.getEvents().isEmpty(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    events.clear();
    events.addAll(appender.getEvents());
    for (int i = 0; i < events.size(); i++) {
        Assert.assertEquals("Large logger " + i, events.get(i).getMessage());
    }
    pipeline.stopAndWait();
    loggerContext.stop();
    Assert.assertNull(appender.getEvents());
}
Also used : MockAppender(io.cdap.cdap.logging.pipeline.MockAppender) ArrayList(java.util.ArrayList) LogProcessorPipelineContext(io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext) ILoggingEvent(ch.qos.logback.classic.spi.ILoggingEvent) LoggerContext(ch.qos.logback.classic.LoggerContext) Checkpoint(io.cdap.cdap.logging.meta.Checkpoint) Test(org.junit.Test)

Example 10 with LogProcessorPipelineContext

use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.

the class LogBufferProcessorPipelineTest method testSingleAppender.

@Test
public void testSingleAppender() throws Exception {
    LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
    final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
    MockCheckpointManager checkpointManager = new MockCheckpointManager();
    LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
    loggerContext.start();
    LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
    // start the pipeline
    pipeline.startAndWait();
    // start thread to write to incomingEventQueue
    List<ILoggingEvent> events = getLoggingEvents();
    AtomicInteger i = new AtomicInteger(0);
    List<LogBufferEvent> bufferEvents = events.stream().map(event -> {
        LogBufferEvent lbe = new LogBufferEvent(event, serializer.toBytes(event).length, new LogBufferFileOffset(0, i.get()));
        i.incrementAndGet();
        return lbe;
    }).collect(Collectors.toList());
    // start a thread to send log buffer events to pipeline
    ExecutorService executorService = Executors.newSingleThreadExecutor();
    executorService.execute(() -> {
        for (int count = 0; count < 40; count++) {
            pipeline.processLogEvents(bufferEvents.iterator());
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
            // should not happen
            }
        }
    });
    // wait for pipeline to append all the logs to appender. The DEBUG message should get filtered out.
    Tasks.waitFor(200, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    executorService.shutdown();
    pipeline.stopAndWait();
    loggerContext.stop();
}
Also used : ILoggingEvent(ch.qos.logback.classic.spi.ILoggingEvent) LogBufferFileOffset(io.cdap.cdap.logging.logbuffer.LogBufferFileOffset) LoggerContext(ch.qos.logback.classic.LoggerContext) CheckpointManager(io.cdap.cdap.logging.meta.CheckpointManager) MockAppender(io.cdap.cdap.logging.pipeline.MockAppender) LoggingEventSerializer(io.cdap.cdap.logging.serialize.LoggingEventSerializer) ImmutableList(com.google.common.collect.ImmutableList) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) LogProcessorPipelineContext(io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext) Map(java.util.Map) ExecutorService(java.util.concurrent.ExecutorService) NoopMetricsContext(io.cdap.cdap.api.metrics.NoopMetricsContext) Tasks(io.cdap.cdap.common.utils.Tasks) ImmutableMap(com.google.common.collect.ImmutableMap) Checkpoint(io.cdap.cdap.logging.meta.Checkpoint) Set(java.util.Set) Test(org.junit.Test) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) Executors(java.util.concurrent.Executors) TimeUnit(java.util.concurrent.TimeUnit) Level(ch.qos.logback.classic.Level) MetricsContext(io.cdap.cdap.api.metrics.MetricsContext) List(java.util.List) CConfiguration(io.cdap.cdap.common.conf.CConfiguration) Logger(ch.qos.logback.classic.Logger) LogBufferEvent(io.cdap.cdap.logging.logbuffer.LogBufferEvent) Collections(java.util.Collections) LogPipelineTestUtil(io.cdap.cdap.logging.pipeline.LogPipelineTestUtil) LogProcessorPipelineContext(io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext) ILoggingEvent(ch.qos.logback.classic.spi.ILoggingEvent) LoggerContext(ch.qos.logback.classic.LoggerContext) Checkpoint(io.cdap.cdap.logging.meta.Checkpoint) MockAppender(io.cdap.cdap.logging.pipeline.MockAppender) LogBufferEvent(io.cdap.cdap.logging.logbuffer.LogBufferEvent) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ExecutorService(java.util.concurrent.ExecutorService) LogBufferFileOffset(io.cdap.cdap.logging.logbuffer.LogBufferFileOffset) Test(org.junit.Test)

Aggregations

LogProcessorPipelineContext (io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext)26 LoggerContext (ch.qos.logback.classic.LoggerContext)18 Test (org.junit.Test)18 MockAppender (io.cdap.cdap.logging.pipeline.MockAppender)14 ILoggingEvent (ch.qos.logback.classic.spi.ILoggingEvent)10 CConfiguration (io.cdap.cdap.common.conf.CConfiguration)10 LogBufferPipelineConfig (io.cdap.cdap.logging.pipeline.logbuffer.LogBufferPipelineConfig)10 LogBufferProcessorPipeline (io.cdap.cdap.logging.pipeline.logbuffer.LogBufferProcessorPipeline)10 ArrayList (java.util.ArrayList)10 AppenderContext (io.cdap.cdap.api.logging.AppenderContext)6 Checkpoint (io.cdap.cdap.logging.meta.Checkpoint)6 LogPipelineLoader (io.cdap.cdap.logging.framework.LogPipelineLoader)5 LogPipelineSpecification (io.cdap.cdap.logging.framework.LogPipelineSpecification)5 ImmutableMap (com.google.common.collect.ImmutableMap)4 RetryOnStartFailureService (io.cdap.cdap.common.service.RetryOnStartFailureService)4 LocalAppenderContext (io.cdap.cdap.logging.framework.LocalAppenderContext)4 LogPipelineConfigurator (io.cdap.cdap.logging.pipeline.LogPipelineConfigurator)4 DataInputStream (java.io.DataInputStream)4 File (java.io.File)4 FileInputStream (java.io.FileInputStream)4