use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class TimeEventQueueProcessorTest method test.
@Test
public void test() throws Exception {
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
LogProcessorPipelineContext context = new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0);
context.start();
TimeEventQueueProcessor<TestOffset> processor = new TimeEventQueueProcessor<>(context, 50, 1, ImmutableList.of(0));
long now = System.currentTimeMillis();
List<ILoggingEvent> events = ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "5", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "4", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "6", now - 100));
ProcessedEventMetadata<TestOffset> metadata = processor.process(0, new TransformingIterator(events.iterator()));
// all 6 events should be processed. This is because when the buffer is full after 5 events, time event queue
// processor should append existing buffered events and enqueue 6th event
Assert.assertEquals(6, metadata.getTotalEventsProcessed());
for (Map.Entry<Integer, Checkpoint<TestOffset>> entry : metadata.getCheckpoints().entrySet()) {
Checkpoint<TestOffset> value = entry.getValue();
// offset should be max offset processed so far
Assert.assertEquals(6, value.getOffset().getOffset());
}
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class ConcurrentLogBufferWriterTest method testWrites.
@Test
public void testWrites() throws Exception {
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
ImmutableList<byte[]> events = getLoggingEvents();
writer.process(new LogBufferRequest(0, events));
// verify if the events were written to log buffer
try (DataInputStream dis = new DataInputStream(new FileInputStream(absolutePath + "/0.buf"))) {
for (byte[] eventBytes : events) {
ILoggingEvent event = serializer.fromBytes(ByteBuffer.wrap(eventBytes));
Assert.assertEquals(event.getMessage(), getEvent(dis, serializer.toBytes(event).length).getMessage());
}
}
// verify if the pipeline has processed the messages.
Tasks.waitFor(5, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class ConcurrentLogBufferWriterTest method testConcurrentWrites.
@Test
public void testConcurrentWrites() throws Exception {
int threadCount = 20;
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
ImmutableList<byte[]> events = getLoggingEvents();
ExecutorService executor = Executors.newFixedThreadPool(threadCount);
final CyclicBarrier barrier = new CyclicBarrier(threadCount + 1);
for (int i = 0; i < threadCount; i++) {
executor.submit(() -> {
try {
barrier.await();
writer.process(new LogBufferRequest(0, events));
} catch (Exception e) {
LOG.error("Exception raised when processing log events.", e);
}
});
}
barrier.await();
executor.shutdown();
Assert.assertTrue(executor.awaitTermination(1, TimeUnit.MINUTES));
// verify if the events were written to log buffer
try (DataInputStream dis = new DataInputStream(new FileInputStream(absolutePath + "/0.buf"))) {
for (int i = 0; i < threadCount; i++) {
for (byte[] eventBytes : events) {
ILoggingEvent event = serializer.fromBytes(ByteBuffer.wrap(eventBytes));
Assert.assertEquals(event.getMessage(), getEvent(dis, serializer.toBytes(event).length).getMessage());
}
}
}
// verify if the pipeline has processed the messages.
Tasks.waitFor(100, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class DistributedLogFramework method createService.
@Override
@SuppressWarnings("unchecked")
protected Service createService(Set<Integer> partitions) {
Map<String, LogPipelineSpecification<AppenderContext>> specs = new LogPipelineLoader(cConf).load(contextProvider);
int pipelineCount = specs.size();
// Create one KafkaLogProcessorPipeline per spec
final List<Service> pipelines = new ArrayList<>();
for (final LogPipelineSpecification<AppenderContext> pipelineSpec : specs.values()) {
final CConfiguration cConf = pipelineSpec.getConf();
final AppenderContext context = pipelineSpec.getContext();
long bufferSize = getBufferSize(pipelineCount, cConf, partitions.size());
final String topic = cConf.get(Constants.Logging.KAFKA_TOPIC);
final KafkaPipelineConfig config = new KafkaPipelineConfig(topic, partitions, bufferSize, cConf.getLong(Constants.Logging.PIPELINE_EVENT_DELAY_MS), cConf.getInt(Constants.Logging.PIPELINE_KAFKA_FETCH_SIZE), cConf.getLong(Constants.Logging.PIPELINE_CHECKPOINT_INTERVAL_MS));
RetryStrategy retryStrategy = RetryStrategies.fromConfiguration(cConf, "system.log.process.");
pipelines.add(new RetryOnStartFailureService(() -> new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(cConf, context.getName(), context, context.getMetricsContext(), context.getInstanceId()), checkpointManagerFactory.create(pipelineSpec.getCheckpointPrefix() + topic, CheckpointManagerFactory.Type.KAFKA), brokerService, config), retryStrategy));
}
// Returns a Service that start/stop all pipelines.
return new AbstractIdleService() {
@Override
protected void startUp() throws Exception {
// Starts all pipeline
validateAllFutures(Iterables.transform(pipelines, Service::start));
}
@Override
protected void shutDown() throws Exception {
// Stops all pipeline
validateAllFutures(Iterables.transform(pipelines, Service::stop));
}
};
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.
the class ConcurrentLogBufferWriterTest method testWrites.
@Test
public void testWrites() throws Exception {
CConfiguration cConf = CConfiguration.create();
String absolutePath = TMP_FOLDER.newFolder().getAbsolutePath();
cConf.set(Constants.LogBuffer.LOG_BUFFER_BASE_DIR, absolutePath);
cConf.setLong(Constants.LogBuffer.LOG_BUFFER_MAX_FILE_SIZE_BYTES, 100000);
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
ConcurrentLogBufferWriter writer = new ConcurrentLogBufferWriter(cConf, ImmutableList.of(pipeline), () -> {
});
ImmutableList<byte[]> events = getLoggingEvents();
writer.process(new LogBufferRequest(0, events));
// verify if the events were written to log buffer
try (DataInputStream dis = new DataInputStream(new FileInputStream(absolutePath + "/0.buf"))) {
for (byte[] eventBytes : events) {
ILoggingEvent event = serializer.fromBytes(ByteBuffer.wrap(eventBytes));
Assert.assertEquals(event.getMessage(), getEvent(dis, serializer.toBytes(event).length).getMessage());
}
}
// verify if the pipeline has processed the messages.
Tasks.waitFor(5, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
Aggregations