use of org.apache.log4j.Appender in project xtext-core by eclipse.
the class LoggingTester method captureLogging.
public static LoggingTester.LogCapture captureLogging(final Level level, final Class<?> source, final Runnable action) {
final Logger logger = Logger.getLogger(source);
final LoggingTester.QueueAppender appender = new LoggingTester.QueueAppender();
final Level oldLevel = logger.getLevel();
final ArrayList<Appender> allAppenders = LoggingTester.appenderHierarchy(logger);
final LoggingTester.SourceFilter filter = new LoggingTester.SourceFilter(logger);
try {
final Consumer<Appender> _function = (Appender it) -> {
it.addFilter(filter);
};
allAppenders.forEach(_function);
logger.addAppender(appender);
logger.setLevel(level);
action.run();
final List<LoggingTester.LogEntry> events = IterableExtensions.<LoggingTester.LogEntry>sortWith(IterableExtensions.<LoggingTester.LogEntry>toList(appender.events), LoggingTester.TEMPORAL_ORDER);
return new LoggingTester.LogCapture(events);
} finally {
logger.removeAppender(appender);
final Consumer<Appender> _function_1 = (Appender it) -> {
LoggingTester.removeFilter(it, filter);
};
allAppenders.forEach(_function_1);
logger.setLevel(oldLevel);
}
}
use of org.apache.log4j.Appender in project knime-core by knime.
the class NodeLogger method addWorkflowDirAppender.
/**
* Adds a new workflow directory logger for the given workflow directory if it doesn't exists yet.
* @param workflowDir the directory of the workflow that should be logged to
*/
private void addWorkflowDirAppender(final File workflowDir) {
if (workflowDir == null) {
// if the workflowDir is null we do not need to append an extra log appender
return;
}
// in this method we have to use the logger directly to prevent a deadlock!!!
final Logger logger = m_logger;
final String workflowDirPath = workflowDir.getPath();
if (workflowDirPath == null) {
return;
}
Appender wfAppender = WF_APPENDER.get(workflowDirPath);
if (wfAppender != null) {
logger.addAppender(wfAppender);
} else {
// we do the getAppender twice to prevent the synchronize block on subsequent calls!!!
synchronized (WF_APPENDER) {
// we need a synchronize block otherwise we might create a second appender that opens a file handle
// which never get closed and thus the copying of a full log file to the zip file fails
wfAppender = WF_APPENDER.get(workflowDirPath);
if (wfAppender == null) {
// use the KNIME specific LogfielAppender that moves larger log files into a separate zip file
// and that implements equals and hash code to ensure that two LogfileAppender
// with the same name are considered equal to prevent duplicate appender registration
final FileAppender fileAppender = new LogfileAppender(workflowDir);
fileAppender.setLayout(WF_DIR_LOG_FILE_LAYOUT);
fileAppender.setName(workflowDirPath);
final Filter mainFilter = LOG_FILE_APPENDER.getFilter();
fileAppender.addFilter(new Filter() {
@Override
public int decide(final LoggingEvent event) {
final Object msg = event.getMessage();
if (msg instanceof KNIMELogMessage) {
final KNIMELogMessage kmsg = (KNIMELogMessage) msg;
// can be null
final File msgDir = kmsg.getWorkflowDir();
if ((LOG_GLOBAL_IN_WF_DIR && msgDir == null) || LOG_IN_WF_DIR && workflowDir.equals(msgDir)) {
// return only neutral to let the log level based filters decide if we log this event
if (mainFilter != null) {
return mainFilter.decide(event);
}
return Filter.NEUTRAL;
}
}
return Filter.DENY;
}
});
// we have to call this function to activate the writer!!!
fileAppender.activateOptions();
logger.addAppender(fileAppender);
WF_APPENDER.put(workflowDirPath, fileAppender);
if (m_listener == null) {
m_listener = new MyWorkflowListener();
WorkflowManager.ROOT.addListener(m_listener);
}
}
}
}
}
use of org.apache.log4j.Appender in project knime-core by knime.
the class NodeLogger method setAppenderLevelRange.
/**
* Sets a level range filter on the given appender.
*
* @param appenderName the name of the appender
* @param min the minimum logging level
* @param max the maximum logging level
* @throws NoSuchElementException if the given appender does not exist
* @since 2.8
*/
public static void setAppenderLevelRange(final String appenderName, final LEVEL min, final LEVEL max) throws NoSuchElementException {
Logger root = Logger.getRootLogger();
Appender appender = root.getAppender(appenderName);
if (appender == null) {
throw new NoSuchElementException("Appender '" + appenderName + "' does not exist");
}
Filter filter = appender.getFilter();
while ((filter != null) && !(filter instanceof LevelRangeFilter)) {
filter = filter.getNext();
}
if (filter == null) {
// add a new level range filter
LevelRangeFilter levelFilter = new LevelRangeFilter();
levelFilter.setLevelMin(transLEVEL(min));
levelFilter.setLevelMax(transLEVEL(max));
appender.addFilter(levelFilter);
} else {
// modify existing level range filter
((LevelRangeFilter) filter).setLevelMin(transLEVEL(min));
((LevelRangeFilter) filter).setLevelMax(transLEVEL(max));
}
}
use of org.apache.log4j.Appender in project traccar by tananaev.
the class Log method setupLogger.
public static void setupLogger(Config config) throws IOException {
Layout layout = new PatternLayout("%d{" + DATE_FORMAT + "} %5p: %m%n");
Appender appender = new DailyRollingFileAppender(layout, config.getString("logger.file"), "'.'yyyyMMdd");
LogManager.resetConfiguration();
LogManager.getRootLogger().addAppender(new NullAppender());
logger = Logger.getLogger(LOGGER_NAME);
logger.addAppender(appender);
logger.setLevel(Level.toLevel(config.getString("logger.level"), Level.ALL));
// Workaround for "Bug 745866 - (EDG-45) Possible netty logging config problem"
InternalLoggerFactory.setDefaultFactory(new InternalLoggerFactory() {
@Override
public InternalLogger newInstance(String string) {
return new NettyInternalLogger();
}
});
Log.logSystemInfo();
Log.info("Version: " + getAppVersion());
}
use of org.apache.log4j.Appender in project scheduling by ow2-proactive.
the class EnabledListenJobLogsSupport method listenJobLogs.
@Override
public synchronized void listenJobLogs(JobId jobId, AppenderProvider appenderProvider) throws UnknownJobException {
jlogger.info(jobId, "listening logs");
// create the appender to the remote listener
Appender clientAppender = null;
try {
clientAppender = appenderProvider.getAppender();
} catch (LogForwardingException e) {
jlogger.error(jobId, "cannot create an appender", e);
throw new InternalException("Cannot create an appender for job " + jobId, e);
}
boolean logIsAlreadyInitialized = jobsToBeLogged.contains(jobId);
initJobLogging(jobId, clientAppender);
JobResult result = dbManager.loadJobResult(jobId);
if (result == null) {
throw new UnknownJobException(jobId);
}
// for finished tasks, add logs events "manually"
Collection<TaskResult> allRes = result.getAllResults().values();
for (TaskResult tr : allRes) {
this.flushTaskLogs(tr, clientAppender, jobId);
}
for (RunningTaskData taskData : liveJobs.getRunningTasks(jobId)) {
jlogger.debug(jobId, "Handling log initialization for task " + taskData.getTask().getName());
try {
TaskLauncher taskLauncher = taskData.getLauncher();
if (logIsAlreadyInitialized) {
jlogger.debug(jobId, "Call getStoredLogs");
taskLauncher.getStoredLogs(appenderProvider);
} else {
jlogger.debug(jobId, "Call activateLogs");
taskLauncher.activateLogs(lfs.getAppenderProvider());
}
} catch (Exception e) {
tlogger.error(taskData.getTask().getId(), "cannot create an appender provider", e);
}
}
if (!result.getJobInfo().getStatus().isJobAlive()) {
jlogger.info(jobId, "cleaning loggers for already finished job");
cleanLoggers(jobId);
}
}
Aggregations