use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class PipelineInterpreterTest method testCreateMessage.
@Test
public void testCreateMessage() {
final RuleService ruleService = mock(MongoDbRuleService.class);
when(ruleService.loadAll()).thenReturn(Collections.singleton(RuleDao.create("abc", "title", "description", "rule \"creates message\"\n" + "when to_string($message.message) == \"original message\"\n" + "then\n" + " create_message(\"derived message\");\n" + "end", Tools.nowUTC(), null)));
final PipelineService pipelineService = mock(MongoDbPipelineService.class);
when(pipelineService.loadAll()).thenReturn(Collections.singleton(PipelineDao.create("p1", "title", "description", "pipeline \"pipeline\"\n" + "stage 0 match all\n" + " rule \"creates message\";\n" + "end\n", Tools.nowUTC(), null)));
final Map<String, Function<?>> functions = ImmutableMap.of(CreateMessage.NAME, new CreateMessage(), StringConversion.NAME, new StringConversion());
final PipelineInterpreter interpreter = createPipelineInterpreter(ruleService, pipelineService, functions);
Message msg = messageInDefaultStream("original message", "test");
final Messages processed = interpreter.process(msg);
final Message[] messages = Iterables.toArray(processed, Message.class);
assertEquals(2, messages.length);
}
use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class V20190304102700_MigrateMessageListStructureTest method testMigratingViewStructure.
@Test
@MongoDBFixtures("V20190304102700_MigrateMessageListStructureTest.json")
public void testMigratingViewStructure() {
final BasicDBObject dbQuery1 = new BasicDBObject();
dbQuery1.put("_id", new ObjectId("58458e442f857c314491344e"));
final MongoCollection<Document> collection = mongodb.mongoConnection().getMongoDatabase().getCollection("views");
migration.upgrade();
final FindIterable<Document> views = collection.find(dbQuery1);
final Document view1 = views.first();
@SuppressWarnings("unchecked") final List<Document> widgets1 = (List) view1.get("state", Document.class).get("a2a804b7-27cf-4cac-8015-58d9a9640d33", Document.class).get("widgets");
assertThat(widgets1.size()).isEqualTo(2);
assertThat(widgets1.stream().filter(widget -> widget.getString("type").equals("messages")).count()).isEqualTo(1);
assertThat(widgets1.stream().filter(widget -> widget.getString("type").equals("messages")).allMatch((widget) -> {
final Document config = widget.get("config", Document.class);
@SuppressWarnings("unchecked") final List<String> fields = (List) config.get("fields");
final boolean startWithTimestamp = fields.get(0).contains("timestamp");
final boolean showMessageRow = config.getBoolean("show_message_row");
return startWithTimestamp && showMessageRow;
})).isTrue();
final BasicDBObject dbQuery2 = new BasicDBObject();
dbQuery2.put("_id", new ObjectId("58458e442f857c314491344f"));
final FindIterable<Document> views2 = collection.find(dbQuery2);
final Document view2 = views2.first();
final Document states = view2.get("state", Document.class);
assertThat(states.values().size()).isEqualTo(13);
assertThat(states.keySet()).containsExactly("7c042319-530a-41b9-9dbb-9676fb1da1a4", "9e5144be-a445-4289-a4cc-0f55142524bc", "c13b2482-60e7-4b1e-98c9-0df8d6da8230", "5adc9297-dfc8-4fd9-b422-cbb097715a62", "ade8c853-503c-407f-b125-efbe2d368973", "cc2bf983-b398-4295-bf01-1c10ed1a97e1", "64feccae-9447-40ef-a401-79a7972078a2", "7c7e04c6-f9f0-495c-91cc-865f60687f8c", "eeaa8838-616f-40c0-88c0-1059ac64f37e", "91c6f8c9-024c-48ec-a869-90548fad218a", "955a71f2-673a-4e1c-a99f-ef97b1b4ae71", "343ff7b6-4554-49d4-bc0b-1339fdc5dac0", "7a84d053-e40a-48c1-a433-97521f7ce7ef");
states.values().forEach(state -> {
@SuppressWarnings("unchecked") final List<Document> widgets2 = (List) ((Document) state).get("widgets");
assertThat(widgets2.stream().filter(widget -> widget.getString("type").equals("messages")).count()).isGreaterThan(0);
widgets2.stream().filter(widget -> widget.getString("type").equals("messages")).forEach((widget) -> {
final Document config = widget.get("config", Document.class);
@SuppressWarnings("unchecked") final List<String> fields = (List) config.get("fields");
final boolean startWithTimestamp = fields.get(0).contains("timestamp");
final boolean showMessageRow = config.getBoolean("show_message_row");
assertThat(startWithTimestamp).isTrue();
assertThat(showMessageRow).isTrue();
});
});
}
use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class NetFlowCodec method decodeMessages.
@Nullable
@Override
public Collection<Message> decodeMessages(@Nonnull RawMessage rawMessage) {
try {
final ResolvableInetSocketAddress remoteAddress = rawMessage.getRemoteAddress();
final InetSocketAddress sender = remoteAddress != null ? remoteAddress.getInetSocketAddress() : null;
final byte[] payload = rawMessage.getPayload();
if (payload.length < 3) {
LOG.debug("NetFlow message (source: {}) doesn't even fit the NetFlow version (size: {} bytes)", sender, payload.length);
return null;
}
final ByteBuf buffer = Unpooled.wrappedBuffer(payload);
switch(buffer.readByte()) {
case PASSTHROUGH_MARKER:
final NetFlowV5Packet netFlowV5Packet = NetFlowV5Parser.parsePacket(buffer);
return netFlowV5Packet.records().stream().map(record -> NetFlowFormatter.toMessage(netFlowV5Packet.header(), record, sender)).collect(Collectors.toList());
case ORDERED_V9_MARKER:
// our "custom" netflow v9 that has all the templates in the same packet
return decodeV9(sender, buffer);
default:
final List<RawMessage.SourceNode> sourceNodes = rawMessage.getSourceNodes();
final RawMessage.SourceNode sourceNode = sourceNodes.isEmpty() ? null : sourceNodes.get(sourceNodes.size() - 1);
final String inputId = sourceNode == null ? "<unknown>" : sourceNode.inputId;
LOG.warn("Unsupported NetFlow packet on input {} (source: {})", inputId, sender);
return null;
}
} catch (FlowException e) {
LOG.error("Error parsing NetFlow packet <{}> received from <{}>", rawMessage.getId(), rawMessage.getRemoteAddress(), e);
if (LOG.isDebugEnabled()) {
LOG.debug("NetFlow packet hexdump:\n{}", ByteBufUtil.prettyHexDump(Unpooled.wrappedBuffer(rawMessage.getPayload())));
}
return null;
} catch (InvalidProtocolBufferException e) {
LOG.error("Invalid NetFlowV9 entry found, cannot parse the messages", ExceptionUtils.getRootCause(e));
return null;
}
}
use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class PipelineInterpreter method process.
/**
* Evaluates all pipelines that apply to the given messages, based on the current stream routing
* of the messages.
*
* The processing loops on each single message (passed in or created by pipelines) until the set
* of streams does not change anymore. No cycle detection is performed.
*
* @param messages the messages to process through the pipelines
* @param interpreterListener a listener which gets called for each processing stage (e.g. to
* trace execution)
* @param state the pipeline/stage/rule/stream connection state to use during
* processing
* @return the processed messages
*/
public Messages process(Messages messages, InterpreterListener interpreterListener, State state) {
interpreterListener.startProcessing();
// message id + stream id
final Set<Tuple2<String, String>> processingBlacklist = Sets.newHashSet();
final List<Message> toProcess = Lists.newArrayList(messages);
final List<Message> fullyProcessed = Lists.newArrayListWithExpectedSize(toProcess.size());
while (!toProcess.isEmpty()) {
final MessageCollection currentSet = new MessageCollection(toProcess);
// we'll add them back below
toProcess.clear();
for (Message message : currentSet) {
final String msgId = message.getId();
// this makes a copy of the list, which is mutated later in updateStreamBlacklist
// it serves as a worklist, to keep track of which <msg, stream> tuples need to be re-run again
final Set<String> initialStreamIds = message.getStreams().stream().map(Stream::getId).collect(Collectors.toSet());
final ImmutableSet<Pipeline> pipelinesToRun = selectPipelines(interpreterListener, processingBlacklist, message, initialStreamIds, state.getStreamPipelineConnections());
toProcess.addAll(processForResolvedPipelines(message, msgId, pipelinesToRun, interpreterListener, state));
// add each processed message-stream combination to the blacklist set and figure out if the processing
// has added a stream to the message, in which case we need to cycle and determine whether to process
// its pipeline connections, too
boolean addedStreams = updateStreamBlacklist(processingBlacklist, message, initialStreamIds);
potentiallyDropFilteredMessage(message);
// go to 1 and iterate over all messages again until no more streams are being assigned
if (!addedStreams || message.getFilterOut()) {
log.debug("[{}] no new streams matches or dropped message, not running again", msgId);
fullyProcessed.add(message);
} else {
// process again, we've added a stream
log.debug("[{}] new streams assigned, running again for those streams", msgId);
toProcess.add(message);
}
}
}
interpreterListener.finishProcessing();
// 7. return the processed messages
return new MessageCollection(fullyProcessed);
}
use of org.graylog2.indexer.messages.Messages in project graylog2-server by Graylog2.
the class LoggersResource method messages.
@GET
@Timed
@ApiOperation(value = "Get recent internal log messages")
@ApiResponses(value = { @ApiResponse(code = 404, message = "Memory appender is disabled."), @ApiResponse(code = 500, message = "Memory appender is broken.") })
@Path("/messages/recent")
@Produces(MediaType.APPLICATION_JSON)
@RequiresPermissions(RestPermissions.LOGGERSMESSAGES_READ)
public LogMessagesSummary messages(@ApiParam(name = "limit", value = "How many log messages should be returned", defaultValue = "500", allowableValues = "range[0, infinity]") @QueryParam("limit") @DefaultValue("500") @Min(0L) int limit, @ApiParam(name = "level", value = "Which log level (or higher) should the messages have", defaultValue = "ALL", allowableValues = "[OFF, FATAL, ERROR, WARN, INFO, DEBUG, TRACE, ALL]") @QueryParam("level") @DefaultValue("ALL") @NotEmpty String level) {
final Appender appender = getAppender(MEMORY_APPENDER_NAME);
if (appender == null) {
throw new NotFoundException("Memory appender is disabled. Please refer to the example log4j.xml file.");
}
if (!(appender instanceof MemoryAppender)) {
throw new InternalServerErrorException("Memory appender is not an instance of MemoryAppender. Please refer to the example log4j.xml file.");
}
final Level logLevel = Level.toLevel(level, Level.ALL);
final MemoryAppender memoryAppender = (MemoryAppender) appender;
final List<InternalLogMessage> messages = new ArrayList<>(limit);
for (LogEvent event : memoryAppender.getLogMessages(limit)) {
final Level eventLevel = event.getLevel();
if (!eventLevel.isMoreSpecificThan(logLevel)) {
continue;
}
final ThrowableProxy thrownProxy = event.getThrownProxy();
final String throwable;
if (thrownProxy == null) {
throwable = null;
} else {
throwable = thrownProxy.getExtendedStackTraceAsString("");
}
final Marker marker = event.getMarker();
messages.add(InternalLogMessage.create(event.getMessage().getFormattedMessage(), event.getLoggerName(), eventLevel.toString(), marker == null ? null : marker.toString(), new DateTime(event.getTimeMillis(), DateTimeZone.UTC), throwable, event.getThreadName(), event.getContextData().toMap()));
}
return LogMessagesSummary.create(messages);
}
Aggregations