Search in sources :

Example 11 with Journal

use of org.graylog2.shared.journal.Journal in project graylog2-server by Graylog2.

the class MessageFilterChainProcessorTest method testMessagesCanBeDropped.

@Test
public void testMessagesCanBeDropped() {
    final MessageFilter first = new DummyFilter(10);
    final MessageFilter second = new RemovingMessageFilter();
    final Set<MessageFilter> filters = ImmutableSet.of(first, second);
    final MessageFilterChainProcessor processor = new MessageFilterChainProcessor(new MetricRegistry(), filters, journal, serverStatus);
    final Message message = new Message("message", "source", new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC));
    final Messages result = processor.process(message);
    assertThat(result).isEmpty();
}
Also used : Messages(org.graylog2.plugin.Messages) Message(org.graylog2.plugin.Message) MetricRegistry(com.codahale.metrics.MetricRegistry) MessageFilter(org.graylog2.plugin.filters.MessageFilter) DateTime(org.joda.time.DateTime) Test(org.junit.Test)

Example 12 with Journal

use of org.graylog2.shared.journal.Journal in project graylog2-server by Graylog2.

the class JournalDecode method runCommand.

@Override
protected void runCommand() {
    Range<Long> range;
    try {
        final List<String> offsets = Splitter.on("..").limit(2).splitToList(rangeArg);
        if (offsets.size() == 1) {
            range = Range.singleton(Long.valueOf(offsets.get(0)));
        } else if (offsets.size() == 2) {
            final String first = offsets.get(0);
            final String second = offsets.get(1);
            if (first.isEmpty()) {
                range = Range.atMost(Long.valueOf(second));
            } else if (second.isEmpty()) {
                range = Range.atLeast(Long.valueOf(first));
            } else {
                range = Range.closed(Long.valueOf(first), Long.valueOf(second));
            }
        } else {
            throw new RuntimeException();
        }
    } catch (Exception e) {
        System.err.println("Malformed offset range: " + rangeArg);
        return;
    }
    final Map<String, Codec.Factory<? extends Codec>> codecFactory = injector.getInstance(Key.get(new TypeLiteral<Map<String, Codec.Factory<? extends Codec>>>() {
    }));
    final Long readOffset = range.lowerEndpoint();
    final long count = range.upperEndpoint() - range.lowerEndpoint() + 1;
    final List<Journal.JournalReadEntry> entries = journal.read(readOffset, count);
    for (final Journal.JournalReadEntry entry : entries) {
        final RawMessage raw = RawMessage.decode(entry.getPayload(), entry.getOffset());
        if (raw == null) {
            System.err.println(MessageFormatter.format("Journal entry at offset {} failed to decode", entry.getOffset()));
            continue;
        }
        final Codec codec = codecFactory.get(raw.getCodecName()).create(raw.getCodecConfig());
        final Message message = codec.decode(raw);
        if (message == null) {
            System.err.println(MessageFormatter.format("Could not use codec {} to decode raw message id {} at offset {}", new Object[] { raw.getCodecName(), raw.getId(), entry.getOffset() }));
        } else {
            message.setJournalOffset(raw.getJournalOffset());
        }
        final ResolvableInetSocketAddress remoteAddress = raw.getRemoteAddress();
        final String remote = remoteAddress == null ? "unknown address" : remoteAddress.getInetSocketAddress().toString();
        final StringBuffer sb = new StringBuffer();
        sb.append("Message ").append(raw.getId()).append('\n').append(" at ").append(raw.getTimestamp()).append('\n').append(" in format ").append(raw.getCodecName()).append('\n').append(" at offset ").append(raw.getJournalOffset()).append('\n').append(" received from remote address ").append(remote).append('\n').append(" (source field: ").append(message == null ? "unparsed" : message.getSource()).append(')').append('\n');
        if (message != null) {
            sb.append(" contains ").append(message.getFieldNames().size()).append(" fields.");
        } else {
            sb.append("The message could not be parse by the given codec.");
        }
        System.out.println(sb);
    }
}
Also used : RawMessage(org.graylog2.plugin.journal.RawMessage) Message(org.graylog2.plugin.Message) Journal(org.graylog2.shared.journal.Journal) Codec(org.graylog2.plugin.inputs.codecs.Codec) ResolvableInetSocketAddress(org.graylog2.plugin.ResolvableInetSocketAddress) TypeLiteral(com.google.inject.TypeLiteral) RawMessage(org.graylog2.plugin.journal.RawMessage)

Example 13 with Journal

use of org.graylog2.shared.journal.Journal in project graylog2-server by Graylog2.

the class JournalResource method show.

@GET
@Timed
@ApiOperation(value = "Get current state of the journal on this node.")
@RequiresPermissions(RestPermissions.JOURNAL_READ)
public JournalSummaryResponse show() {
    if (!journalEnabled) {
        return JournalSummaryResponse.createDisabled();
    }
    if (journal instanceof KafkaJournal) {
        final KafkaJournal kafkaJournal = (KafkaJournal) journal;
        final ThrottleState throttleState = kafkaJournal.getThrottleState();
        long oldestSegment = Long.MAX_VALUE;
        for (final LogSegment segment : kafkaJournal.getSegments()) {
            oldestSegment = Math.min(oldestSegment, segment.created());
        }
        return JournalSummaryResponse.createEnabled(throttleState.appendEventsPerSec, throttleState.readEventsPerSec, throttleState.uncommittedJournalEntries, Size.bytes(throttleState.journalSize), Size.bytes(throttleState.journalSizeLimit), kafkaJournal.numberOfSegments(), new DateTime(oldestSegment, DateTimeZone.UTC), kafkaJournalConfiguration);
    }
    log.warn("Unknown Journal implementation {} in use, cannot get information about it. Pretending journal is disabled.", journal.getClass());
    return JournalSummaryResponse.createDisabled();
}
Also used : LogSegment(kafka.log.LogSegment) ThrottleState(org.graylog2.plugin.ThrottleState) KafkaJournal(org.graylog2.shared.journal.KafkaJournal) DateTime(org.joda.time.DateTime) RequiresPermissions(org.apache.shiro.authz.annotation.RequiresPermissions) Timed(com.codahale.metrics.annotation.Timed) GET(javax.ws.rs.GET) ApiOperation(io.swagger.annotations.ApiOperation)

Example 14 with Journal

use of org.graylog2.shared.journal.Journal in project graylog2-server by Graylog2.

the class KafkaJournalTest method segmentAgeCleanup.

@Test
public void segmentAgeCleanup() throws Exception {
    final InstantMillisProvider clock = new InstantMillisProvider(DateTime.now(DateTimeZone.UTC));
    DateTimeUtils.setCurrentMillisProvider(clock);
    try {
        final Size segmentSize = Size.kilobytes(1L);
        final KafkaJournal journal = new KafkaJournal(journalDirectory, scheduler, segmentSize, Duration.standardHours(1), Size.kilobytes(10L), Duration.standardMinutes(1), 1_000_000, Duration.standardMinutes(1), 100, new MetricRegistry(), serverStatus);
        final File messageJournalDir = new File(journalDirectory, "messagejournal-0");
        assertTrue(messageJournalDir.exists());
        // we need to fix up the last modified times of the actual files.
        long[] lastModifiedTs = new long[2];
        // create two chunks, 30 seconds apart
        createBulkChunks(journal, segmentSize, 1);
        journal.flushDirtyLogs();
        lastModifiedTs[0] = clock.getMillis();
        clock.tick(Period.seconds(30));
        createBulkChunks(journal, segmentSize, 1);
        journal.flushDirtyLogs();
        lastModifiedTs[1] = clock.getMillis();
        int i = 0;
        for (final LogSegment segment : journal.getSegments()) {
            assertTrue(i < 2);
            segment.lastModified_$eq(lastModifiedTs[i]);
            i++;
        }
        int cleanedLogs = journal.cleanupLogs();
        assertEquals("no segments should've been cleaned", cleanedLogs, 0);
        assertEquals("two segments segment should remain", countSegmentsInDir(messageJournalDir), 2);
        // move clock beyond the retention period and clean again
        clock.tick(Period.seconds(120));
        cleanedLogs = journal.cleanupLogs();
        assertEquals("two segments should've been cleaned (only one will actually be removed...)", cleanedLogs, 2);
        assertEquals("one segment should remain", countSegmentsInDir(messageJournalDir), 1);
    } finally {
        DateTimeUtils.setCurrentMillisSystem();
    }
}
Also used : LogSegment(kafka.log.LogSegment) Size(com.github.joschi.jadconfig.util.Size) InstantMillisProvider(org.graylog2.plugin.InstantMillisProvider) MetricRegistry(com.codahale.metrics.MetricRegistry) File(java.io.File) Test(org.junit.Test)

Aggregations

MetricRegistry (com.codahale.metrics.MetricRegistry)5 Test (org.junit.Test)5 Message (org.graylog2.plugin.Message)4 MessageFilter (org.graylog2.plugin.filters.MessageFilter)4 DateTime (org.joda.time.DateTime)3 Timer (com.codahale.metrics.Timer)2 Timed (com.codahale.metrics.annotation.Timed)2 ApiOperation (io.swagger.annotations.ApiOperation)2 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 GET (javax.ws.rs.GET)2 LogSegment (kafka.log.LogSegment)2 RequiresPermissions (org.apache.shiro.authz.annotation.RequiresPermissions)2 Notification (org.graylog2.notifications.Notification)2 Messages (org.graylog2.plugin.Messages)2 ThrottleState (org.graylog2.plugin.ThrottleState)2 RawMessage (org.graylog2.plugin.journal.RawMessage)2 HdrTimer (org.graylog2.shared.metrics.HdrTimer)2 JsonNode (com.fasterxml.jackson.databind.JsonNode)1 Size (com.github.joschi.jadconfig.util.Size)1