Search in sources :

Example 36 with ILoggingEvent

use of ch.qos.logback.classic.spi.ILoggingEvent in project cdap by caskdata.

the class KafkaLogProcessorPipelineTest method publishLog.

private void publishLog(String topic, Iterable<ILoggingEvent> events, LoggingContext context) {
    KafkaPublisher.Preparer preparer = KAFKA_TESTER.getKafkaClient().getPublisher(KafkaPublisher.Ack.LEADER_RECEIVED, Compression.NONE).prepare(topic);
    LoggingEventSerializer serializer = new LoggingEventSerializer();
    for (ILoggingEvent event : events) {
        preparer.add(ByteBuffer.wrap(serializer.toBytes(new LogMessage(event, context))), context.getLogPartition());
    }
    preparer.send();
}
Also used : LogMessage(co.cask.cdap.logging.appender.LogMessage) LoggingEventSerializer(co.cask.cdap.logging.serialize.LoggingEventSerializer) ILoggingEvent(ch.qos.logback.classic.spi.ILoggingEvent) KafkaPublisher(org.apache.twill.kafka.client.KafkaPublisher)

Example 37 with ILoggingEvent

use of ch.qos.logback.classic.spi.ILoggingEvent in project cdap by caskdata.

the class DistributedLogFrameworkTest method createLoggingEvent.

/**
 * Creates an {@link ILoggingEvent}.
 */
private ILoggingEvent createLoggingEvent(String loggerName, Level level, String message, long timestamp) {
    LoggingEvent event = new LoggingEvent();
    event.setLevel(level);
    event.setLoggerName(loggerName);
    event.setMessage(message);
    event.setTimeStamp(timestamp);
    return event;
}
Also used : ILoggingEvent(ch.qos.logback.classic.spi.ILoggingEvent) LoggingEvent(ch.qos.logback.classic.spi.LoggingEvent)

Example 38 with ILoggingEvent

use of ch.qos.logback.classic.spi.ILoggingEvent in project cdap by caskdata.

the class KafkaOffsetResolverTest method testOutOfOrderEvents.

@Test
public void testOutOfOrderEvents() throws Exception {
    String topic = "testOutOfOrderEvents";
    KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, EVENT_DELAY_MILLIS, 1048576, 200L);
    KAFKA_TESTER.createTopic(topic, 1);
    // Publish log messages to Kafka and wait for all messages to be published
    long baseTime = System.currentTimeMillis() - 2 * EVENT_DELAY_MILLIS;
    List<ILoggingEvent> outOfOrderEvents = ImmutableList.of(createLoggingEvent("test.logger", Level.INFO, "0", baseTime - 20 * 1000 - EVENT_DELAY_MILLIS), createLoggingEvent("test.logger", Level.INFO, "0", baseTime - 20 * 1000 - EVENT_DELAY_MILLIS), createLoggingEvent("test.logger", Level.INFO, "1", baseTime - 7 * 1000 - EVENT_DELAY_MILLIS), createLoggingEvent("test.logger", Level.INFO, "2", baseTime - 9 * 100), createLoggingEvent("test.logger", Level.INFO, "3", baseTime - 500), createLoggingEvent("test.logger", Level.INFO, "1", baseTime - 9 * 1000), createLoggingEvent("test.logger", Level.INFO, "1", baseTime - 9 * 1000 + EVENT_DELAY_MILLIS / 2), createLoggingEvent("test.logger", Level.INFO, "1", baseTime - 9 * 1000), createLoggingEvent("test.logger", Level.INFO, "1", baseTime - 9 * 1000 - EVENT_DELAY_MILLIS / 2), createLoggingEvent("test.logger", Level.INFO, "1", baseTime - 10 * 1000), createLoggingEvent("test.logger", Level.INFO, "1", baseTime - 600), createLoggingEvent("test.logger", Level.INFO, "5", baseTime - 20 * 1000), createLoggingEvent("test.logger", Level.INFO, "5", baseTime - 20 * 1000 + EVENT_DELAY_MILLIS / 2), createLoggingEvent("test.logger", Level.INFO, "6", baseTime - 600), createLoggingEvent("test.logger", Level.INFO, "6", baseTime - 10 * 1000), createLoggingEvent("test.logger", Level.INFO, "7", baseTime - 2 * 1000 + EVENT_DELAY_MILLIS), createLoggingEvent("test.logger", Level.INFO, "8", baseTime - 7 * 1000 + EVENT_DELAY_MILLIS), createLoggingEvent("test.logger", Level.INFO, "4", baseTime - 100 + EVENT_DELAY_MILLIS));
    publishLog(topic, outOfOrderEvents);
    waitForAllLogsPublished(topic, outOfOrderEvents.size());
    KafkaOffsetResolver offsetResolver = new KafkaOffsetResolver(KAFKA_TESTER.getBrokerService(), config);
    // matches the expected offset
    for (ILoggingEvent event : outOfOrderEvents) {
        assertOffsetResolverResult(offsetResolver, outOfOrderEvents, event.getTimeStamp(), baseTime);
    }
    // Try to find the offset with an event time that has timestamp earlier than all event timestamps in Kafka
    assertOffsetResolverResult(offsetResolver, outOfOrderEvents, baseTime - 10 * EVENT_DELAY_MILLIS, baseTime);
    // Try to find the offset with an event time that has timestamp larger than all event timestamps in Kafka
    assertOffsetResolverResult(offsetResolver, outOfOrderEvents, baseTime + 10 * EVENT_DELAY_MILLIS, baseTime);
    // and assert that found offset with target timestamp matches the expected offset
    for (int i = 0; i < 10; i++) {
        for (ILoggingEvent event : outOfOrderEvents) {
            assertOffsetResolverResult(offsetResolver, outOfOrderEvents, event.getTimeStamp() + RANDOM.nextInt() % EVENT_DELAY_MILLIS, baseTime);
        }
    }
}
Also used : ILoggingEvent(ch.qos.logback.classic.spi.ILoggingEvent) Checkpoint(co.cask.cdap.logging.meta.Checkpoint) Test(org.junit.Test)

Example 39 with ILoggingEvent

use of ch.qos.logback.classic.spi.ILoggingEvent in project cdap by caskdata.

the class KafkaOffsetResolverTest method createLoggingEvent.

/**
 * Creates a new {@link ILoggingEvent} with the given information.
 */
private ILoggingEvent createLoggingEvent(String loggerName, Level level, String message, long timestamp) {
    LoggingEvent event = new LoggingEvent();
    event.setLevel(level);
    event.setLoggerName(loggerName);
    event.setMessage(message);
    event.setTimeStamp(timestamp);
    return event;
}
Also used : ILoggingEvent(ch.qos.logback.classic.spi.ILoggingEvent) LoggingEvent(ch.qos.logback.classic.spi.LoggingEvent)

Example 40 with ILoggingEvent

use of ch.qos.logback.classic.spi.ILoggingEvent in project cdap by caskdata.

the class KafkaOffsetResolverTest method publishLog.

private void publishLog(String topic, Iterable<ILoggingEvent> events, LoggingContext context) {
    KafkaPublisher.Preparer preparer = KAFKA_TESTER.getKafkaClient().getPublisher(KafkaPublisher.Ack.LEADER_RECEIVED, Compression.NONE).prepare(topic);
    LoggingEventSerializer serializer = new LoggingEventSerializer();
    for (ILoggingEvent event : events) {
        preparer.add(ByteBuffer.wrap(serializer.toBytes(event)), context.getLogPartition());
    }
    preparer.send();
}
Also used : LoggingEventSerializer(co.cask.cdap.logging.serialize.LoggingEventSerializer) ILoggingEvent(ch.qos.logback.classic.spi.ILoggingEvent) KafkaPublisher(org.apache.twill.kafka.client.KafkaPublisher)

Aggregations

ILoggingEvent (ch.qos.logback.classic.spi.ILoggingEvent)300 Test (org.junit.Test)111 Logger (ch.qos.logback.classic.Logger)90 LoggerContext (ch.qos.logback.classic.LoggerContext)85 Appender (ch.qos.logback.core.Appender)48 Test (org.junit.jupiter.api.Test)47 PatternLayoutEncoder (ch.qos.logback.classic.encoder.PatternLayoutEncoder)44 FileAppender (ch.qos.logback.core.FileAppender)40 ConsoleAppender (ch.qos.logback.core.ConsoleAppender)35 RollingFileAppender (ch.qos.logback.core.rolling.RollingFileAppender)30 File (java.io.File)26 AsyncLoggingEventAppenderFactory (io.dropwizard.logging.async.AsyncLoggingEventAppenderFactory)22 DropwizardLayoutFactory (io.dropwizard.logging.layout.DropwizardLayoutFactory)21 Logger (org.slf4j.Logger)20 TimeBasedRollingPolicy (ch.qos.logback.core.rolling.TimeBasedRollingPolicy)17 AsyncAppender (ch.qos.logback.classic.AsyncAppender)16 ArrayList (java.util.ArrayList)16 ListAppender (ch.qos.logback.core.read.ListAppender)15 OnMarkerEvaluator (ch.qos.logback.classic.boolex.OnMarkerEvaluator)13 EvaluatorFilter (ch.qos.logback.core.filter.EvaluatorFilter)13