use of io.cdap.cdap.logging.pipeline.logbuffer.LogBufferProcessorPipeline in project cdap by caskdata.
the class LogBufferHandlerTest method getLogPipeline.
private LogBufferProcessorPipeline getLogPipeline(LoggerContext loggerContext) {
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
return new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
}
use of io.cdap.cdap.logging.pipeline.logbuffer.LogBufferProcessorPipeline in project cdap by caskdata.
the class LogBufferHandlerTest method testHandler.
@Test
public void testHandler() throws Exception {
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
LogBufferProcessorPipeline pipeline = getLogPipeline(loggerContext);
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
NettyHttpService httpService = NettyHttpService.builder("RemoteAppenderTest").setHttpHandlers(new LogBufferHandler(writer)).setExceptionHandler(new HttpExceptionHandler()).build();
httpService.start();
RemoteLogAppender remoteLogAppender = getRemoteAppender(cConf, httpService);
remoteLogAppender.start();
List<ILoggingEvent> events = getLoggingEvents();
WorkerLoggingContext loggingContext = new WorkerLoggingContext("default", "app1", "worker1", "run1", "instance1");
for (int i = 0; i < 1000; i++) {
remoteLogAppender.append(new LogMessage(events.get(i % events.size()), loggingContext));
}
Tasks.waitFor(1000, () -> appender.getEvents().size(), 120, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
remoteLogAppender.stop();
httpService.stop();
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.logbuffer.LogBufferProcessorPipeline in project cdap by caskdata.
the class ConcurrentLogBufferWriterTest method testWrites.
@Test
public void testWrites() throws Exception {
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
ImmutableList<byte[]> events = getLoggingEvents();
writer.process(new LogBufferRequest(0, events));
// verify if the events were written to log buffer
try (DataInputStream dis = new DataInputStream(new FileInputStream(absolutePath + "/0.buf"))) {
for (byte[] eventBytes : events) {
ILoggingEvent event = serializer.fromBytes(ByteBuffer.wrap(eventBytes));
Assert.assertEquals(event.getMessage(), getEvent(dis, serializer.toBytes(event).length).getMessage());
}
}
// verify if the pipeline has processed the messages.
Tasks.waitFor(5, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.logbuffer.LogBufferProcessorPipeline in project cdap by caskdata.
the class ConcurrentLogBufferWriterTest method testConcurrentWrites.
@Test
public void testConcurrentWrites() throws Exception {
int threadCount = 20;
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
ImmutableList<byte[]> events = getLoggingEvents();
ExecutorService executor = Executors.newFixedThreadPool(threadCount);
final CyclicBarrier barrier = new CyclicBarrier(threadCount + 1);
for (int i = 0; i < threadCount; i++) {
executor.submit(() -> {
try {
barrier.await();
writer.process(new LogBufferRequest(0, events));
} catch (Exception e) {
LOG.error("Exception raised when processing log events.", e);
}
});
}
barrier.await();
executor.shutdown();
Assert.assertTrue(executor.awaitTermination(1, TimeUnit.MINUTES));
// verify if the events were written to log buffer
try (DataInputStream dis = new DataInputStream(new FileInputStream(absolutePath + "/0.buf"))) {
for (int i = 0; i < threadCount; i++) {
for (byte[] eventBytes : events) {
ILoggingEvent event = serializer.fromBytes(ByteBuffer.wrap(eventBytes));
Assert.assertEquals(event.getMessage(), getEvent(dis, serializer.toBytes(event).length).getMessage());
}
}
}
// verify if the pipeline has processed the messages.
Tasks.waitFor(100, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.logbuffer.LogBufferProcessorPipeline in project cdap by caskdata.
the class LogBufferService method startUp.
@Override
protected void startUp() throws Exception {
// load log pipelines
List<LogBufferProcessorPipeline> bufferPipelines = loadLogPipelines();
// start all the log pipelines
validateAllFutures(Iterables.transform(pipelines, Service::start));
// recovery service and http handler will send log events to log pipelines. In order to avoid deleting file while
// reading them in recovery service, we will pass in an atomic boolean will be set to true by recovery service
// when it is done recovering data. So while recovery service is running, cleanup task will be a no-op
AtomicBoolean startCleanup = new AtomicBoolean(false);
// start log recovery service to recover all the pending logs.
recoveryService = new LogBufferRecoveryService(cConf, bufferPipelines, checkpointManagers, startCleanup);
recoveryService.startAndWait();
// create concurrent writer
ConcurrentLogBufferWriter concurrentWriter = new ConcurrentLogBufferWriter(cConf, bufferPipelines, new LogBufferCleaner(cConf, checkpointManagers, startCleanup));
// create and start http service
NettyHttpService.Builder builder = new CommonNettyHttpServiceBuilder(cConf, Constants.Service.LOG_BUFFER_SERVICE).setHttpHandlers(new LogBufferHandler(concurrentWriter)).setExceptionHandler(new HttpExceptionHandler()).setHost(cConf.get(Constants.LogBuffer.LOG_BUFFER_SERVER_BIND_ADDRESS)).setPort(cConf.getInt(Constants.LogBuffer.LOG_BUFFER_SERVER_BIND_PORT));
if (cConf.getBoolean(Constants.Security.SSL.INTERNAL_ENABLED)) {
new HttpsEnabler().configureKeyStore(cConf, sConf).enable(builder);
}
httpService = builder.build();
httpService.start();
cancellable = discoveryService.register(ResolvingDiscoverable.of(URIScheme.createDiscoverable(Constants.Service.LOG_BUFFER_SERVICE, httpService)));
}
Aggregations