use of io.cdap.cdap.logging.pipeline.MockAppender in project cdap by caskdata.
the class KafkaLogProcessorPipelineTest method testRegularFlush.
@Test
public void testRegularFlush() throws Exception {
String topic = "testFlush";
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
TestCheckpointManager checkpointManager = new TestCheckpointManager();
// Use a longer checkpoint time and a short event delay. Should expect flush called at least once
// per event delay.
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024, 100, 1048576, 2000);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Even when there is no event, the flush should still get called.
Tasks.waitFor(5, appender::getFlushCount, 3, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Publish some logs
long now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 300), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now + 100)));
// Wait until getting all logs.
Tasks.waitFor(3, () -> appender.getEvents().size(), 3, TimeUnit.SECONDS, 200, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
// Should get at least 20 flush calls, since the checkpoint is every 2 seconds
Assert.assertTrue(appender.getFlushCount() >= 20);
}
use of io.cdap.cdap.logging.pipeline.MockAppender in project cdap by caskdata.
the class LogBufferProcessorPipelineTest method testSingleAppender.
@Test
public void testSingleAppender() throws Exception {
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
// start thread to write to incomingEventQueue
List<ILoggingEvent> events = getLoggingEvents();
AtomicInteger i = new AtomicInteger(0);
List<LogBufferEvent> bufferEvents = events.stream().map(event -> {
LogBufferEvent lbe = new LogBufferEvent(event, serializer.toBytes(event).length, new LogBufferFileOffset(0, i.get()));
i.incrementAndGet();
return lbe;
}).collect(Collectors.toList());
// start a thread to send log buffer events to pipeline
ExecutorService executorService = Executors.newSingleThreadExecutor();
executorService.execute(() -> {
for (int count = 0; count < 40; count++) {
pipeline.processLogEvents(bufferEvents.iterator());
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// should not happen
}
}
});
// wait for pipeline to append all the logs to appender. The DEBUG message should get filtered out.
Tasks.waitFor(200, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
executorService.shutdown();
pipeline.stopAndWait();
loggerContext.stop();
}
Aggregations