use of ch.qos.logback.classic.LoggerContext in project cdap-ingest by caskdata.
the class FileTailerIT method getTimeLogger.
private Logger getTimeLogger(String file) {
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
RollingFileAppender fileAppender = new RollingFileAppender();
fileAppender.setName("File Tailer IT");
fileAppender.setContext(loggerContext);
fileAppender.setFile(file);
fileAppender.setAppend(true);
TimeBasedRollingPolicy rollingPolicy = new TimeBasedRollingPolicy();
rollingPolicy.setContext(loggerContext);
rollingPolicy.setParent(fileAppender);
rollingPolicy.setFileNamePattern(file + "%d{yyyy-MM-dd_HH-mm}");
rollingPolicy.start();
fileAppender.setRollingPolicy(rollingPolicy);
PatternLayoutEncoder layoutEncoder = new PatternLayoutEncoder();
layoutEncoder.setContext(loggerContext);
layoutEncoder.setPattern("%msg%n");
layoutEncoder.start();
fileAppender.setEncoder(layoutEncoder);
fileAppender.start();
// configures the logger
Logger logger = loggerContext.getLogger(FileTailerIT.class.getName() + "time");
logger.setLevel(Level.DEBUG);
logger.addAppender(fileAppender);
return logger;
}
use of ch.qos.logback.classic.LoggerContext in project cdap-ingest by caskdata.
the class TailerLogUtils method getSizeLogger.
public static Logger getSizeLogger(String file, String fileSize) {
LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
RollingFileAppender fileAppender = new RollingFileAppender();
fileAppender.setContext(loggerContext);
fileAppender.setFile(file);
fileAppender.setAppend(true);
FixedWindowRollingPolicy rollingPolicy = new FixedWindowRollingPolicy();
rollingPolicy.setContext(loggerContext);
rollingPolicy.setFileNamePattern(file + "%i");
rollingPolicy.setParent(fileAppender);
rollingPolicy.start();
rollingPolicy.setMaxIndex(100);
fileAppender.setRollingPolicy(rollingPolicy);
SizeBasedTriggeringPolicy triggeringPolicy = new SizeBasedTriggeringPolicy();
triggeringPolicy.setContext(loggerContext);
triggeringPolicy.setMaxFileSize(fileSize);
triggeringPolicy.start();
fileAppender.setTriggeringPolicy(triggeringPolicy);
PatternLayoutEncoder layoutEncoder = new PatternLayoutEncoder();
layoutEncoder.setContext(loggerContext);
layoutEncoder.setPattern("[%d %-5p %c{1}] %msg%n");
layoutEncoder.start();
fileAppender.setEncoder(layoutEncoder);
fileAppender.start();
// configures logger
Logger rootLogger = loggerContext.getLogger(BaseTailerTest.class.getName() + "size");
rootLogger.setLevel(Level.DEBUG);
rootLogger.addAppender(fileAppender);
return rootLogger;
}
use of ch.qos.logback.classic.LoggerContext in project SmartApplianceEnabler by camueller.
the class Application method configureLogging.
public static void configureLogging(ch.qos.logback.classic.Level level, String file, boolean additive) {
LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
lc.getFrameworkPackages().add(ApplianceLogger.class.getPackage().getName());
PatternLayoutEncoder ple = new PatternLayoutEncoder();
ple.setPattern("%date %level [%thread] %logger{10} [%file:%line] %msg%n");
ple.setContext(lc);
ple.start();
FileAppender<ILoggingEvent> fileAppender = null;
if (file != null) {
fileAppender = new FileAppender<ILoggingEvent>();
fileAppender.setFile(file);
fileAppender.setEncoder(ple);
fileAppender.setContext(lc);
fileAppender.start();
}
ch.qos.logback.classic.Logger logger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("de.avanux");
if (fileAppender != null) {
logger.addAppender(fileAppender);
}
logger.setLevel(level);
logger.setAdditive(additive);
if (fileAppender != null) {
ch.qos.logback.classic.Logger rootLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME);
rootLogger.addAppender(fileAppender);
}
logger.info("Logging configured with log level " + level);
// external classes
ch.qos.logback.classic.Logger loggerRetrieveRemoteDescriptors = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.fourthline.cling.protocol.RetrieveRemoteDescriptors");
loggerRetrieveRemoteDescriptors.setLevel(Level.ERROR);
}
use of ch.qos.logback.classic.LoggerContext in project cdap by caskdata.
the class LogStageInjector method start.
/**
* Hijacks the appenders for the root logger and replaces them with a {@link LogStageAppender} that will insert
* the ETL stage name at the start of each message if the stage name is set. Uses {@link org.slf4j.MDC} to look up
* the current stage name.
*/
public static void start() {
if (!initialized.compareAndSet(false, true)) {
return;
}
ILoggerFactory loggerFactory = LoggerFactory.getILoggerFactory();
if (!(loggerFactory instanceof LoggerContext)) {
LOG.warn("LoggerFactory is not a logback LoggerContext. Stage names will not be injected into log messages.");
return;
}
LoggerContext loggerContext = (LoggerContext) loggerFactory;
ch.qos.logback.classic.Logger rootLogger = loggerContext.getLogger(Logger.ROOT_LOGGER_NAME);
List<Appender<ILoggingEvent>> appenders = new ArrayList<>();
Iterator<Appender<ILoggingEvent>> appenderIterator = rootLogger.iteratorForAppenders();
while (appenderIterator.hasNext()) {
Appender<ILoggingEvent> appender = appenderIterator.next();
// appender will have a different classloader than LogStageAppender...
if (appender.getClass().getName().equals(LogStageAppender.class.getName())) {
return;
}
appenders.add(appender);
}
Appender<ILoggingEvent> stageAppender = new LogStageAppender(appenders);
stageAppender.setContext(loggerContext);
stageAppender.start();
rootLogger.addAppender(stageAppender);
// To avoid duplicate messages, need to detach the original appenders
for (Appender<ILoggingEvent> appender : appenders) {
rootLogger.detachAppender(appender);
}
}
use of ch.qos.logback.classic.LoggerContext in project cdap by caskdata.
the class KafkaLogProcessorPipelineTest method testMultiAppenders.
@Test
public void testMultiAppenders() throws Exception {
final File logDir = TEMP_FOLDER.newFolder();
LoggerContext loggerContext = new LoggerContext();
loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
configurator.setContext(loggerContext);
URL configURL = getClass().getClassLoader().getResource("pipeline-multi-appenders.xml");
Assert.assertNotNull(configURL);
configurator.doConfigure(configURL);
String topic = "testMultiAppenders";
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMultiAppenders", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka using a non-specific logger
long now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(createLoggingEvent("logger.trace", Level.TRACE, "TRACE", now - 1000), createLoggingEvent("logger.debug", Level.DEBUG, "DEBUG", now - 900), createLoggingEvent("logger.info", Level.INFO, "INFO", now - 800), createLoggingEvent("logger.warn", Level.WARN, "WARN", now - 700), createLoggingEvent("logger.error", Level.ERROR, "ERROR", now - 600)));
// All logs should get logged to the default.log file
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
File logFile = new File(logDir, "default.log");
List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR").equals(lines);
}
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Publish some more log messages via the non-additive "test.info" logger.
now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(createLoggingEvent("test.info.trace", Level.TRACE, "TRACE", now - 1000), createLoggingEvent("test.info.debug", Level.DEBUG, "DEBUG", now - 900), createLoggingEvent("test.info", Level.INFO, "INFO", now - 800), createLoggingEvent("test.info.warn", Level.WARN, "WARN", now - 700), createLoggingEvent("test.info.error", Level.ERROR, "ERROR", now - 600)));
// Only logs with INFO or above level should get written to the info.log file
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
File logFile = new File(logDir, "info.log");
List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("INFO", "WARN", "ERROR").equals(lines);
}
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// The default.log file shouldn't be changed, because the test.info logger is non additive
File defaultLogFile = new File(logDir, "default.log");
List<String> lines = Files.readAllLines(defaultLogFile.toPath(), StandardCharsets.UTF_8);
Assert.assertEquals(Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR"), lines);
// Publish a log messages via the additive "test.error" logger.
now = System.currentTimeMillis();
publishLog(topic, ImmutableList.of(createLoggingEvent("test.error.1.2", Level.ERROR, "ERROR", now - 1000)));
// Expect the log get appended to both the error.log file as well as the default.log file
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
File logFile = new File(logDir, "error.log");
List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
if (!Collections.singletonList("ERROR").equals(lines)) {
return false;
}
logFile = new File(logDir, "default.log");
lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
return Arrays.asList("TRACE", "DEBUG", "INFO", "WARN", "ERROR", "ERROR").equals(lines);
}
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
pipeline.stopAndWait();
loggerContext.stop();
}
Aggregations