use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.
the class KafkaLogProcessorPipelineTest method testMetricsAppender.
@Test
public void testMetricsAppender() throws Exception {
Injector injector = KAFKA_TESTER.getInjector();
MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
collectionService.startAndWait();
LoggerContext loggerContext = new LocalAppenderContext(injector.getInstance(TransactionRunner.class), injector.getInstance(LocationFactory.class), injector.getInstance(MetricsCollectionService.class));
final File logDir = TEMP_FOLDER.newFolder();
loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
configurator.setContext(loggerContext);
URL configURL = getClass().getClassLoader().getResource("pipeline-metric-appender.xml");
Assert.assertNotNull(configURL);
configurator.doConfigure(configURL);
String topic = "metricsPipeline";
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMetricAppender", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka
long now = System.currentTimeMillis();
WorkerLoggingContext loggingContext = new WorkerLoggingContext("default", "app1", "worker1", "run1", "instance1");
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)), loggingContext);
WorkflowProgramLoggingContext workflowProgramLoggingContext = new WorkflowProgramLoggingContext("default", "app1", "wflow1", "run1", ProgramType.MAPREDUCE, "mr1", "mrun1");
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.TRACE, "3", now - 500)), workflowProgramLoggingContext);
ServiceLoggingContext serviceLoggingContext = new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.TRANSACTION);
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900)), serviceLoggingContext);
final MetricStore metricStore = injector.getInstance(MetricStore.class);
try {
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.info", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 5L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.debug", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 1L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.warn", AggregationFunction.SUM, // mapreduce metrics context
ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.MAPREDUCE, "mr1", Constants.Metrics.Tag.RUN_ID, "mrun1"), new ArrayList<>()), 2L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.trace", AggregationFunction.SUM, // workflow metrics context
ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.WORKFLOW, "wflow1", Constants.Metrics.Tag.RUN_ID, "run1"), new ArrayList<>()), 1L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.services.log.error", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(serviceLoggingContext), new ArrayList<>()), 3L);
} finally {
pipeline.stopAndWait();
loggerContext.stop();
collectionService.stopAndWait();
}
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.
the class KafkaLogProcessorPipelineTest method testMultiAppenders.
@Test
public void testMultiAppenders() throws Exception {
final File logDir = TEMP_FOLDER.newFolder();
LoggerContext loggerContext = new LoggerContext();
loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
configurator.setContext(loggerContext);
URL configURL = getClass().getClassLoader().getResource("pipeline-multi-appenders.xml");
Assert.assertNotNull(configURL);
configurator.doConfigure(configURL);
String topic = "testMultiAppenders";
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMultiAppenders", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka using a non-specific logger
long now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("logger.trace", Level.TRACE, "TRACE", now - 1000), LogPipelineTestUtil.createLoggingEvent("logger.debug", Level.DEBUG, "DEBUG", now - 900), LogPipelineTestUtil.createLoggingEvent("logger.info", Level.INFO, "INFO", now - 800), LogPipelineTestUtil.createLoggingEvent("logger.warn", Level.WARN, "WARN", now - 700), LogPipelineTestUtil.createLoggingEvent("logger.error", Level.ERROR, "ERROR", now - 600)));
// All logs should get logged to the default.log file
Tasks.waitFor(true, () -> {
File logFile = new File(logDir, "default.log");
List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR").equals(lines);
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Publish some more log messages via the non-additive "test.info" logger.
now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.info.trace", Level.TRACE, "TRACE", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.info.debug", Level.DEBUG, "DEBUG", now - 900), LogPipelineTestUtil.createLoggingEvent("test.info", Level.INFO, "INFO", now - 800), LogPipelineTestUtil.createLoggingEvent("test.info.warn", Level.WARN, "WARN", now - 700), LogPipelineTestUtil.createLoggingEvent("test.info.error", Level.ERROR, "ERROR", now - 600)));
// Only logs with INFO or above level should get written to the info.log file
Tasks.waitFor(true, () -> {
File logFile = new File(logDir, "info.log");
List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("INFO", "WARN", "ERROR").equals(lines);
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// The default.log file shouldn't be changed, because the test.info logger is non additive
File defaultLogFile = new File(logDir, "default.log");
List<String> lines = Files.readAllLines(defaultLogFile.toPath(), StandardCharsets.UTF_8);
Assert.assertEquals(Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR"), lines);
// Publish a log messages via the additive "test.error" logger.
now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.error.1.2", Level.ERROR, "ERROR", now - 1000)));
// Expect the log get appended to both the error.log file as well as the default.log file
Tasks.waitFor(true, () -> {
File logFile = new File(logDir, "error.log");
List<String> lines1 = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
if (!Collections.singletonList("ERROR").equals(lines1)) {
return false;
}
logFile = new File(logDir, "default.log");
lines1 = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR", "ERROR").equals(lines1);
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.
the class KafkaLogProcessorPipelineTest method testBasicSort.
@Test
public void testBasicSort() throws Exception {
String topic = "testPipeline";
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
final MockAppender appender = LogPipelineTestUtil.getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", MockAppender.class);
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 300L, 1048576, 500L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka
long now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)));
// Since the messages are published in one batch, the processor should be able to fetch all of them,
// hence the sorting order should be deterministic.
// The DEBUG message should get filtered out
Tasks.waitFor(5, () -> appender.getEvents().size(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
for (int i = 0; i < 5; i++) {
Assert.assertEquals(Integer.toString(i), appender.getEvents().poll().getMessage());
}
// Now publish large messages that exceed the maximum queue size (1024). It should trigger writing regardless of
// the event timestamp
List<ILoggingEvent> events = new ArrayList<>(500);
now = System.currentTimeMillis();
for (int i = 0; i < 500; i++) {
// The event timestamp is 10 seconds in future.
events.add(LogPipelineTestUtil.createLoggingEvent("test.large.logger", Level.WARN, "Large logger " + i, now + 10000));
}
publishLog(topic, events);
Tasks.waitFor(true, () -> !appender.getEvents().isEmpty(), 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
events.clear();
events.addAll(appender.getEvents());
for (int i = 0; i < events.size(); i++) {
Assert.assertEquals("Large logger " + i, events.get(i).getMessage());
}
pipeline.stopAndWait();
loggerContext.stop();
Assert.assertNull(appender.getEvents());
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.
the class TimeEventQueueProcessorTest method test.
@Test
public void test() throws Exception {
LoggerContext loggerContext = LogPipelineTestUtil.createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), MockAppender.class.getName());
LogProcessorPipelineContext context = new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0);
context.start();
TimeEventQueueProcessor<TestOffset> processor = new TimeEventQueueProcessor<>(context, 50, 1, ImmutableList.of(0));
long now = System.currentTimeMillis();
List<ILoggingEvent> events = ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "5", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "4", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "6", now - 100));
ProcessedEventMetadata<TestOffset> metadata = processor.process(0, new TransformingIterator(events.iterator()));
// all 6 events should be processed. This is because when the buffer is full after 5 events, time event queue
// processor should append existing buffered events and enqueue 6th event
Assert.assertEquals(6, metadata.getTotalEventsProcessed());
for (Map.Entry<Integer, Checkpoint<TestOffset>> entry : metadata.getCheckpoints().entrySet()) {
Checkpoint<TestOffset> value = entry.getValue();
// offset should be max offset processed so far
Assert.assertEquals(6, value.getOffset().getOffset());
}
}
use of io.cdap.cdap.logging.pipeline.LogProcessorPipelineContext in project cdap by caskdata.
the class LogBufferHandlerTest method getLogPipeline.
private LogBufferProcessorPipeline getLogPipeline(LoggerContext loggerContext) {
MockCheckpointManager checkpointManager = new MockCheckpointManager();
LogBufferPipelineConfig config = new LogBufferPipelineConfig(1024L, 300L, 500L, 4);
loggerContext.start();
return new LogBufferProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), config, checkpointManager, 0);
}
Aggregations