use of io.cdap.cdap.logging.pipeline.MockAppender in project cdap by caskdata.
the class KafkaLogProcessorPipelineTest method testBasicSort.
@Test
public void testBasicSort() throws Exception {
String topic = "testPipeline";
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 300L, 1048576, 500L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka
long now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)));
// Since the messages are published in one batch, the processor should be able to fetch all of them,
// hence the sorting order should be deterministic.
// The DEBUG message should get filtered out
Tasks.waitFor(5, () -> appender.getEvents().size(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
for (int i = 0; i < 5; i++) {
Assert.assertEquals(Integer.toString(i), appender.getEvents().poll().getMessage());
}
// Now publish large messages that exceed the maximum queue size (1024). It should trigger writing regardless of
// the event timestamp
List<ILoggingEvent> events = new ArrayList<>(500);
now = System.currentTimeMillis();
for (int i = 0; i < 500; i++) {
// The event timestamp is 10 seconds in future.
events.add(LogPipelineTestUtil.createLoggingEvent("test.large.logger", Level.WARN, "Large logger " + i, now + 10000));
}
publishLog(topic, events);
Tasks.waitFor(true, () -> !appender.getEvents().isEmpty(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
events.clear();
events.addAll(appender.getEvents());
for (int i = 0; i < events.size(); i++) {
Assert.assertEquals("Large logger " + i, events.get(i).getMessage());
}
pipeline.stopAndWait();
loggerContext.stop();
Assert.assertNull(appender.getEvents());
}
use of io.cdap.cdap.logging.pipeline.MockAppender in project cdap by caskdata.
the class LogBufferHandlerTest method testHandler.
@Test
public void testHandler() throws Exception {
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
LogBufferProcessorPipeline pipeline = getLogPipeline(loggerContext);
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
NettyHttpService httpService = NettyHttpService.builder("RemoteAppenderTest").setHttpHandlers(new LogBufferHandler(writer)).setExceptionHandler(new HttpExceptionHandler()).build();
httpService.start();
RemoteLogAppender remoteLogAppender = getRemoteAppender(cConf, httpService);
remoteLogAppender.start();
List<ILoggingEvent> events = getLoggingEvents();
WorkerLoggingContext loggingContext = new WorkerLoggingContext("default", "app1", "worker1", "run1", "instance1");
for (int i = 0; i < 1000; i++) {
remoteLogAppender.append(new LogMessage(events.get(i % events.size()), loggingContext));
}
Tasks.waitFor(1000, () -> appender.getEvents().size(), 120, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
remoteLogAppender.stop();
httpService.stop();
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.MockAppender in project cdap by caskdata.
the class ConcurrentLogBufferWriterTest method testWrites.
@Test
public void testWrites() throws Exception {
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
ImmutableList<byte[]> events = getLoggingEvents();
writer.process(new LogBufferRequest(0, events));
// verify if the events were written to log buffer
try (DataInputStream dis = new DataInputStream(new FileInputStream(absolutePath + "/0.buf"))) {
for (byte[] eventBytes : events) {
ILoggingEvent event = serializer.fromBytes(ByteBuffer.wrap(eventBytes));
Assert.assertEquals(event.getMessage(), getEvent(dis, serializer.toBytes(event).length).getMessage());
}
}
// verify if the pipeline has processed the messages.
Tasks.waitFor(5, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.MockAppender in project cdap by caskdata.
the class ConcurrentLogBufferWriterTest method testConcurrentWrites.
@Test
public void testConcurrentWrites() throws Exception {
int threadCount = 20;
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
ImmutableList<byte[]> events = getLoggingEvents();
ExecutorService executor = Executors.newFixedThreadPool(threadCount);
final CyclicBarrier barrier = new CyclicBarrier(threadCount + 1);
for (int i = 0; i < threadCount; i++) {
executor.submit(() -> {
try {
barrier.await();
writer.process(new LogBufferRequest(0, events));
} catch (Exception e) {
LOG.error("Exception raised when processing log events.", e);
}
});
}
barrier.await();
executor.shutdown();
Assert.assertTrue(executor.awaitTermination(1, TimeUnit.MINUTES));
// verify if the events were written to log buffer
try (DataInputStream dis = new DataInputStream(new FileInputStream(absolutePath + "/0.buf"))) {
for (int i = 0; i < threadCount; i++) {
for (byte[] eventBytes : events) {
ILoggingEvent event = serializer.fromBytes(ByteBuffer.wrap(eventBytes));
Assert.assertEquals(event.getMessage(), getEvent(dis, serializer.toBytes(event).length).getMessage());
}
}
}
// verify if the pipeline has processed the messages.
Tasks.waitFor(100, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.MockAppender in project cdap by caskdata.
the class LogBufferRecoveryServiceTest method testLogBufferRecoveryService.
@Test
public void testLogBufferRecoveryService() throws Exception {
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
// create and start pipeline
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
// write directly to log buffer
LogBufferWriter writer = new LogBufferWriter(absolutePath, 250, () -> {
});
ImmutableList<byte[]> events = getLoggingEvents();
writer.write(events.iterator()).iterator();
writer.close();
// start log buffer reader to read log events from files. keep the batch size as 2 so that there are more than 1
// iterations
LogBufferRecoveryService service = new LogBufferRecoveryService(ImmutableList.of(pipeline), ImmutableList.of(checkpointManager), absolutePath, 2, new AtomicBoolean(true));
service.startAndWait();
Tasks.waitFor(5, () -> appender.getEvents().size(), 120, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
service.stopAndWait();
pipeline.stopAndWait();
loggerContext.stop();
}
Aggregations