Search in sources :

Example 91 with LoggerContext

use of ch.qos.logback.classic.LoggerContext in project cdap by caskdata.

the class KafkaLogProcessorPipelineTest method testMetricsAppender.

@Test
public void testMetricsAppender() throws Exception {
    Injector injector = KAFKA_TESTER.getInjector();
    MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
    collectionService.startAndWait();
    LoggerContext loggerContext = new LocalAppenderContext(injector.getInstance(DatasetFramework.class), injector.getInstance(TransactionSystemClient.class), injector.getInstance(LocationFactory.class), injector.getInstance(MetricsCollectionService.class));
    final File logDir = TEMP_FOLDER.newFolder();
    loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
    LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
    configurator.setContext(loggerContext);
    URL configURL = getClass().getClassLoader().getResource("pipeline-metric-appender.xml");
    Assert.assertNotNull(configURL);
    configurator.doConfigure(configURL);
    String topic = "metricsPipeline";
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMetricAppender", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Publish some log messages to Kafka
    long now = System.currentTimeMillis();
    FlowletLoggingContext flowletLoggingContext = new FlowletLoggingContext("default", "app1", "flow1", "flowlet1", "run1", "instance1");
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), createLoggingEvent("test.logger", Level.INFO, "2", now - 700), createLoggingEvent("test.logger", Level.INFO, "3", now - 500), createLoggingEvent("test.logger", Level.INFO, "1", now - 900), createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), createLoggingEvent("test.logger", Level.INFO, "4", now - 100)), flowletLoggingContext);
    WorkflowProgramLoggingContext workflowProgramLoggingContext = new WorkflowProgramLoggingContext("default", "app1", "wflow1", "run1", ProgramType.MAPREDUCE, "mr1", "mrun1");
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.logger", Level.WARN, "0", now - 1000), createLoggingEvent("test.logger", Level.WARN, "2", now - 700), createLoggingEvent("test.logger", Level.TRACE, "3", now - 500)), workflowProgramLoggingContext);
    ServiceLoggingContext serviceLoggingContext = new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.TRANSACTION);
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.logger", Level.ERROR, "0", now - 1000), createLoggingEvent("test.logger", Level.ERROR, "2", now - 700), createLoggingEvent("test.logger", Level.ERROR, "3", now - 500), createLoggingEvent("test.logger", Level.INFO, "1", now - 900)), serviceLoggingContext);
    final MetricStore metricStore = injector.getInstance(MetricStore.class);
    try {
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.info", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(flowletLoggingContext), new ArrayList<String>()), 5L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.debug", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(flowletLoggingContext), new ArrayList<String>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.warn", AggregationFunction.SUM, // mapreduce metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.MAPREDUCE, "mr1", Constants.Metrics.Tag.RUN_ID, "mrun1"), new ArrayList<String>()), 2L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.trace", AggregationFunction.SUM, // workflow metrics context
        ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.WORKFLOW, "wflow1", Constants.Metrics.Tag.RUN_ID, "run1"), new ArrayList<String>()), 1L);
        verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.services.log.error", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(serviceLoggingContext), new ArrayList<String>()), 3L);
    } finally {
        pipeline.stopAndWait();
        loggerContext.stop();
        collectionService.stopAndWait();
    }
}
Also used : MetricStore(co.cask.cdap.api.metrics.MetricStore) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) LocalMetricsCollectionService(co.cask.cdap.metrics.collect.LocalMetricsCollectionService) ArrayList(java.util.ArrayList) LogProcessorPipelineContext(co.cask.cdap.logging.pipeline.LogProcessorPipelineContext) ServiceLoggingContext(co.cask.cdap.common.logging.ServiceLoggingContext) LoggerContext(ch.qos.logback.classic.LoggerContext) URL(java.net.URL) LocationFactory(org.apache.twill.filesystem.LocationFactory) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) TransactionSystemClient(org.apache.tephra.TransactionSystemClient) LocalAppenderContext(co.cask.cdap.logging.framework.LocalAppenderContext) WorkflowProgramLoggingContext(co.cask.cdap.logging.context.WorkflowProgramLoggingContext) Injector(com.google.inject.Injector) FlowletLoggingContext(co.cask.cdap.logging.context.FlowletLoggingContext) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) File(java.io.File) LogPipelineConfigurator(co.cask.cdap.logging.pipeline.LogPipelineConfigurator) Test(org.junit.Test)

Example 92 with LoggerContext

use of ch.qos.logback.classic.LoggerContext in project cdap by caskdata.

the class KafkaLogProcessorPipelineTest method testRegularFlush.

@Test
public void testRegularFlush() throws Exception {
    String topic = "testFlush";
    LoggerContext loggerContext = createLoggerContext("WARN", ImmutableMap.of("test.logger", "INFO"), TestAppender.class.getName());
    final TestAppender appender = getAppender(loggerContext.getLogger(Logger.ROOT_LOGGER_NAME), "Test", TestAppender.class);
    TestCheckpointManager checkpointManager = new TestCheckpointManager();
    // Use a longer checkpoint time and a short event delay. Should expect flush called at least once
    // per event delay.
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024, 100, 1048576, 2000);
    KAFKA_TESTER.createTopic(topic, 1);
    loggerContext.start();
    KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "test", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
    pipeline.startAndWait();
    // Even when there is no event, the flush should still get called.
    Tasks.waitFor(5, new Callable<Integer>() {

        @Override
        public Integer call() throws Exception {
            return appender.getFlushCount();
        }
    }, 3, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // Publish some logs
    long now = System.currentTimeMillis();
    publishLog(topic, ImmutableList.of(createLoggingEvent("test.logger", Level.INFO, "0", now - 500), createLoggingEvent("test.logger", Level.INFO, "1", now - 300), createLoggingEvent("test.logger", Level.INFO, "2", now + 100)));
    // Wait until getting all logs.
    Tasks.waitFor(3, new Callable<Integer>() {

        @Override
        public Integer call() throws Exception {
            return appender.getEvents().size();
        }
    }, 3, TimeUnit.SECONDS, 200, TimeUnit.MILLISECONDS);
    pipeline.stopAndWait();
    // Should get at least 20 flush calls, since the checkpoint is every 2 seconds
    Assert.assertTrue(appender.getFlushCount() >= 20);
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) LogProcessorPipelineContext(co.cask.cdap.logging.pipeline.LogProcessorPipelineContext) LoggerContext(ch.qos.logback.classic.LoggerContext) NoSuchElementException(java.util.NoSuchElementException) IOException(java.io.IOException) Test(org.junit.Test)

Example 93 with LoggerContext

use of ch.qos.logback.classic.LoggerContext in project sakuli by ConSol.

the class LoggerInitializerTest method setUp.

@BeforeMethod
public void setUp() throws Exception {
    MockitoAnnotations.initMocks(this);
    LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
    context.reset();
    when(sakuliProperties.getLogFolder()).thenReturn(Paths.get(logFolder));
    when(sakuliProperties.getLogPattern()).thenReturn("%-5level [%d{YYYY-MM-dd HH:mm:ss}] - %msg%n");
}
Also used : LoggerContext(ch.qos.logback.classic.LoggerContext) BeforeMethod(org.testng.annotations.BeforeMethod)

Example 94 with LoggerContext

use of ch.qos.logback.classic.LoggerContext in project midpoint by Evolveum.

the class StartupConfiguration method setupInitialLogging.

private void setupInitialLogging(File midpointHome) {
    File logbackConfigFile = new File(midpointHome, LOGBACK_CONFIG_FILENAME);
    boolean clear = false;
    if (logbackConfigFile.exists()) {
        clear = true;
    } else {
        logbackConfigFile = new File(midpointHome, LOGBACK_EXTRA_CONFIG_FILENAME);
        if (!logbackConfigFile.exists()) {
            return;
        }
    }
    LOGGER.info("Loading logging configuration from {} ({})", logbackConfigFile, clear ? "clearing default configuration" : "extending defalt configuration");
    LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
    if (clear) {
        context.reset();
    }
    try {
        JoranConfigurator configurator = new JoranConfigurator();
        configurator.setContext(context);
        configurator.doConfigure(logbackConfigFile);
    } catch (Exception e) {
        // This will logged by defalt logging configuration
        LOGGER.error("Error loading additional logging configuration: {}", e.getMessage(), e);
        // If normal logging fail make sure it is logged by web container
        e.printStackTrace();
    }
    StatusPrinter.printInCaseOfErrorsOrWarnings(context);
}
Also used : JoranConfigurator(ch.qos.logback.classic.joran.JoranConfigurator) File(java.io.File) LoggerContext(ch.qos.logback.classic.LoggerContext) SystemException(com.evolveum.midpoint.util.exception.SystemException) JoranException(ch.qos.logback.core.joran.spi.JoranException)

Example 95 with LoggerContext

use of ch.qos.logback.classic.LoggerContext in project ha-bridge by bwssytems.

the class LoggingUtil method getLoggers.

/**
	 * Retrieve all configured logback loggers. 
	 * 
	 * @param showAll If set return ALL loggers, not only the configured ones.
	 * @return List of Loggers
	 */
public static List<ch.qos.logback.classic.Logger> getLoggers(final boolean showAll) {
    final LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
    final List<ch.qos.logback.classic.Logger> loggers = new ArrayList<ch.qos.logback.classic.Logger>();
    for (ch.qos.logback.classic.Logger log : lc.getLoggerList()) {
        if (showAll == false) {
            if (log.getLevel() != null || LoggingUtil.hasAppenders(log)) {
                loggers.add(log);
            }
        } else {
            loggers.add(log);
        }
    }
    return loggers;
}
Also used : Logger(ch.qos.logback.classic.Logger) ArrayList(java.util.ArrayList) Logger(ch.qos.logback.classic.Logger) LoggerContext(ch.qos.logback.classic.LoggerContext)

Aggregations

LoggerContext (ch.qos.logback.classic.LoggerContext)216 Test (org.junit.Test)134 ILoggingEvent (ch.qos.logback.classic.spi.ILoggingEvent)43 Logger (ch.qos.logback.classic.Logger)41 PatternLayoutEncoder (ch.qos.logback.classic.encoder.PatternLayoutEncoder)26 FileAppender (ch.qos.logback.core.FileAppender)22 File (java.io.File)21 JoranConfigurator (ch.qos.logback.classic.joran.JoranConfigurator)19 RollingFileAppender (ch.qos.logback.core.rolling.RollingFileAppender)16 Appender (ch.qos.logback.core.Appender)14 ConsoleAppender (ch.qos.logback.core.ConsoleAppender)14 JoranException (ch.qos.logback.core.joran.spi.JoranException)14 NullLevelFilterFactory (io.dropwizard.logging.filter.NullLevelFilterFactory)13 AsyncLoggingEventAppenderFactory (io.dropwizard.logging.async.AsyncLoggingEventAppenderFactory)12 AsyncAppender (ch.qos.logback.classic.AsyncAppender)11 DropwizardLayoutFactory (io.dropwizard.logging.layout.DropwizardLayoutFactory)11 Logger (org.slf4j.Logger)10 IOException (java.io.IOException)8 TimeBasedRollingPolicy (ch.qos.logback.core.rolling.TimeBasedRollingPolicy)7 URL (java.net.URL)7