use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class KafkaLogProcessorPipelineTest method testMetricsAppender.
@Test
public void testMetricsAppender() throws Exception {
Injector injector = KAFKA_TESTER.getInjector();
MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
collectionService.startAndWait();
LoggerContext loggerContext = new LocalAppenderContext(injector.getInstance(TransactionRunner.class), injector.getInstance(LocationFactory.class), injector.getInstance(MetricsCollectionService.class));
final File logDir = TEMP_FOLDER.newFolder();
loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
configurator.setContext(loggerContext);
URL configURL = getClass().getClassLoader().getResource("pipeline-metric-appender.xml");
Assert.assertNotNull(configURL);
configurator.doConfigure(configURL);
String topic = "metricsPipeline";
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMetricAppender", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka
long now = System.currentTimeMillis();
WorkerLoggingContext loggingContext = new WorkerLoggingContext("default", "app1", "worker1", "run1", "instance1");
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)), loggingContext);
WorkflowProgramLoggingContext workflowProgramLoggingContext = new WorkflowProgramLoggingContext("default", "app1", "wflow1", "run1", ProgramType.MAPREDUCE, "mr1", "mrun1");
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.TRACE, "3", now - 500)), workflowProgramLoggingContext);
ServiceLoggingContext serviceLoggingContext = new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.TRANSACTION);
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900)), serviceLoggingContext);
final MetricStore metricStore = injector.getInstance(MetricStore.class);
try {
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.info", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 5L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.debug", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 1L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.warn", AggregationFunction.SUM, // mapreduce metrics context
ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.MAPREDUCE, "mr1", Constants.Metrics.Tag.RUN_ID, "mrun1"), new ArrayList<>()), 2L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.trace", AggregationFunction.SUM, // workflow metrics context
ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.WORKFLOW, "wflow1", Constants.Metrics.Tag.RUN_ID, "run1"), new ArrayList<>()), 1L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.services.log.error", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(serviceLoggingContext), new ArrayList<>()), 3L);
} finally {
pipeline.stopAndWait();
loggerContext.stop();
collectionService.stopAndWait();
}
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class KafkaLogProcessorPipelineTest method testRegularFlush.
@Test
public void testRegularFlush() throws Exception {
String topic = "testFlush";
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
TestCheckpointManager checkpointManager = new TestCheckpointManager();
// Use a longer checkpoint time and a short event delay. Should expect flush called at least once
// per event delay.
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024, 100, 1048576, 2000);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Even when there is no event, the flush should still get called.
Tasks.waitFor(5, appender::getFlushCount, 3, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Publish some logs
long now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 300), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now + 100)));
// Wait until getting all logs.
Tasks.waitFor(3, () -> appender.getEvents().size(), 3, TimeUnit.SECONDS, 200, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
// Should get at least 20 flush calls, since the checkpoint is every 2 seconds
Assert.assertTrue(appender.getFlushCount() >= 20);
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class KafkaLogProcessorPipelineTest method testMultiAppenders.
@Test
public void testMultiAppenders() throws Exception {
final File logDir = TEMP_FOLDER.newFolder();
LoggerContext loggerContext = new LoggerContext();
loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
configurator.setContext(loggerContext);
URL configURL = getClass().getClassLoader().getResource("pipeline-multi-appenders.xml");
Assert.assertNotNull(configURL);
configurator.doConfigure(configURL);
String topic = "testMultiAppenders";
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMultiAppenders", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka using a non-specific logger
long now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("logger.trace", Level.TRACE, "TRACE", now - 1000), LogPipelineTestUtil.createLoggingEvent("logger.debug", Level.DEBUG, "DEBUG", now - 900), LogPipelineTestUtil.createLoggingEvent("logger.info", Level.INFO, "INFO", now - 800), LogPipelineTestUtil.createLoggingEvent("logger.warn", Level.WARN, "WARN", now - 700), LogPipelineTestUtil.createLoggingEvent("logger.error", Level.ERROR, "ERROR", now - 600)));
// All logs should get logged to the default.log file
Tasks.waitFor(true, () -> {
File logFile = new File(logDir, "default.log");
List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR").equals(lines);
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Publish some more log messages via the non-additive "test.info" logger.
now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.info.trace", Level.TRACE, "TRACE", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.info.debug", Level.DEBUG, "DEBUG", now - 900), LogPipelineTestUtil.createLoggingEvent("test.info", Level.INFO, "INFO", now - 800), LogPipelineTestUtil.createLoggingEvent("test.info.warn", Level.WARN, "WARN", now - 700), LogPipelineTestUtil.createLoggingEvent("test.info.error", Level.ERROR, "ERROR", now - 600)));
// Only logs with INFO or above level should get written to the info.log file
Tasks.waitFor(true, () -> {
File logFile = new File(logDir, "info.log");
List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("INFO", "WARN", "ERROR").equals(lines);
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// The default.log file shouldn't be changed, because the test.info logger is non additive
File defaultLogFile = new File(logDir, "default.log");
List<String> lines = Files.readAllLines(defaultLogFile.toPath(), StandardCharsets.UTF_8);
Assert.assertEquals(Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR"), lines);
// Publish a log messages via the additive "test.error" logger.
now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.error.1.2", Level.ERROR, "ERROR", now - 1000)));
// Expect the log get appended to both the error.log file as well as the default.log file
Tasks.waitFor(true, () -> {
File logFile = new File(logDir, "error.log");
List<String> lines1 = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
if (!Collections.singletonList("ERROR").equals(lines1)) {
return false;
}
logFile = new File(logDir, "default.log");
lines1 = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR", "ERROR").equals(lines1);
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class KafkaLogProcessorPipelineTest method testBasicSort.
@Test
public void testBasicSort() throws Exception {
String topic = "testPipeline";
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 300L, 1048576, 500L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka
long now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)));
// Since the messages are published in one batch, the processor should be able to fetch all of them,
// hence the sorting order should be deterministic.
// The DEBUG message should get filtered out
Tasks.waitFor(5, () -> appender.getEvents().size(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
for (int i = 0; i < 5; i++) {
Assert.assertEquals(Integer.toString(i), appender.getEvents().poll().getMessage());
}
// Now publish large messages that exceed the maximum queue size (1024). It should trigger writing regardless of
// the event timestamp
List<ILoggingEvent> events = new ArrayList<>(500);
now = System.currentTimeMillis();
for (int i = 0; i < 500; i++) {
// The event timestamp is 10 seconds in future.
events.add(LogPipelineTestUtil.createLoggingEvent("test.large.logger", Level.WARN, "Large logger " + i, now + 10000));
}
publishLog(topic, events);
Tasks.waitFor(true, () -> !appender.getEvents().isEmpty(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
events.clear();
events.addAll(appender.getEvents());
for (int i = 0; i < events.size(); i++) {
Assert.assertEquals("Large logger " + i, events.get(i).getMessage());
}
pipeline.stopAndWait();
loggerContext.stop();
Assert.assertNull(appender.getEvents());
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by cdapio.
the class LogBufferProcessorPipelineTest method testSingleAppender.
@Test
public void testSingleAppender() throws Exception {
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
LogBufferProcessorPipeline pipeline = new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
// start the pipeline
pipeline.startAndWait();
// start thread to write to incomingEventQueue
List<ILoggingEvent> events = getLoggingEvents();
AtomicInteger i = new AtomicInteger(0);
List<LogBufferEvent> bufferEvents = events.stream().map(event -> {
LogBufferEvent lbe = new LogBufferEvent(event, serializer.toBytes(event).length, new LogBufferFileOffset(0, i.get()));
i.incrementAndGet();
return lbe;
}).collect(Collectors.toList());
// start a thread to send log buffer events to pipeline
ExecutorService executorService = Executors.newSingleThreadExecutor();
executorService.execute(() -> {
for (int count = 0; count < 40; count++) {
pipeline.processLogEvents(bufferEvents.iterator());
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// should not happen
}
}
});
// wait for pipeline to append all the logs to appender. The DEBUG message should get filtered out.
Tasks.waitFor(200, () -> appender.getEvents().size(), 60, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
executorService.shutdown();
pipeline.stopAndWait();
loggerContext.stop();
}
Aggregations