use of org.graylog2.plugin.inputs.annotations.Codec in project graylog-plugin-integrations by Graylog2.
the class KinesisService method detectAndParseMessage.
/**
* Detect the message type.
*
* @param logMessage A string containing the actual log message.
* @param timestamp The message timestamp.
* @param kinesisStreamName The stream name.
* @param logGroupName The CloudWatch log group name.
* @param logStreamName The CloudWatch log stream name.
* @param compressed Indicates if the payload is compressed and probably from CloudWatch.
* @return A {@code KinesisHealthCheckResponse} with the fully parsed message and type.
*/
private KinesisHealthCheckResponse detectAndParseMessage(String logMessage, DateTime timestamp, String kinesisStreamName, String logGroupName, String logStreamName, boolean compressed) {
LOG.debug("Attempting to detect the type of log message. message [{}] stream [{}] log group [{}].", logMessage, kinesisStreamName, logGroupName);
final AWSLogMessage awsLogMessage = new AWSLogMessage(logMessage);
AWSMessageType awsMessageType = awsLogMessage.detectLogMessageType(compressed);
LOG.debug("The message is type [{}]", awsMessageType);
final String responseMessage = String.format("Success. The message is a %s message.", awsMessageType.getLabel());
final KinesisLogEntry logEvent = KinesisLogEntry.create(kinesisStreamName, logGroupName, logStreamName, timestamp, logMessage);
final Codec.Factory<? extends Codec> codecFactory = this.availableCodecs.get(awsMessageType.getCodecName());
if (codecFactory == null) {
throw new BadRequestException(String.format("A codec with name [%s] could not be found.", awsMessageType.getCodecName()));
}
// TODO: Do we need to provide a valid configuration here?
final Codec codec = codecFactory.create(Configuration.EMPTY_CONFIGURATION);
final byte[] payload;
try {
payload = objectMapper.writeValueAsBytes(logEvent);
} catch (JsonProcessingException e) {
throw new BadRequestException("Encoding the message to bytes failed.", e);
}
final Message fullyParsedMessage = codec.decode(new RawMessage(payload));
if (fullyParsedMessage == null) {
throw new BadRequestException(String.format("Message decoding failed. More information might be " + "available by enabling Debug logging. message [%s]", logMessage));
}
LOG.debug("Successfully parsed message type [{}] with codec [{}].", awsMessageType, awsMessageType.getCodecName());
return KinesisHealthCheckResponse.create(awsMessageType, responseMessage, fullyParsedMessage.getFields());
}
use of org.graylog2.plugin.inputs.annotations.Codec in project graylog-plugin-integrations by Graylog2.
the class AWSCodec method decode.
@Nullable
@Override
public Message decode(@Nonnull RawMessage rawMessage) {
// Load the codec by message type.
final AWSMessageType awsMessageType = AWSMessageType.valueOf(configuration.getString(CK_AWS_MESSAGE_TYPE));
final Codec.Factory<? extends Codec> codecFactory = this.availableCodecs.get(awsMessageType.getCodecName());
if (codecFactory == null) {
LOG.error("A codec with name [{}] could not be found.", awsMessageType.getCodecName());
return null;
}
final Codec codec = codecFactory.create(configuration);
// Parse the message with the specified codec.
final Message message = codec.decode(new RawMessage(rawMessage.getPayload()));
if (message == null) {
LOG.error("Failed to decode message for codec [{}].", codec.getName());
return null;
}
return message;
}
use of org.graylog2.plugin.inputs.annotations.Codec in project graylog-plugin-integrations by Graylog2.
the class AWSCodecTest method testKinesisRawCodec.
@Test
public void testKinesisRawCodec() throws JsonProcessingException {
final HashMap<String, Object> configMap = new HashMap<>();
configMap.put(AWSCodec.CK_AWS_MESSAGE_TYPE, AWSMessageType.KINESIS_RAW.toString());
final Configuration configuration = new Configuration(configMap);
final AWSCodec codec = new AWSCodec(configuration, AWSTestingUtils.buildTestCodecs());
final DateTime timestamp = DateTime.now(DateTimeZone.UTC);
final KinesisLogEntry kinesisLogEntry = KinesisLogEntry.create("a-stream", "log-group", "log-stream", timestamp, "This a raw message");
Message message = codec.decode(new RawMessage(objectMapper.writeValueAsBytes(kinesisLogEntry)));
Assert.assertEquals("log-group", message.getField(AbstractKinesisCodec.FIELD_LOG_GROUP));
Assert.assertEquals("log-stream", message.getField(AbstractKinesisCodec.FIELD_LOG_STREAM));
Assert.assertEquals("a-stream", message.getField(AbstractKinesisCodec.FIELD_KINESIS_STREAM));
Assert.assertEquals(KinesisRawLogCodec.SOURCE, message.getField("source"));
Assert.assertEquals("This a raw message", message.getField("message"));
Assert.assertEquals(timestamp, message.getTimestamp());
}
use of org.graylog2.plugin.inputs.annotations.Codec in project graylog-plugin-integrations by Graylog2.
the class CloudWatchFlowLogCodecTest method testFlowLogCodecValues.
/**
* Verify that the correct values are parsed by the Flow Log codec.
*/
@Test
public void testFlowLogCodecValues() {
final String flowLogMessage = "2 423432432432 eni-3244234 172.1.1.2 172.1.1.2 80 2264 6 1 52 1559738144 1559738204 ACCEPT OK";
final DateTime timestamp = DateTime.now(DateTimeZone.UTC);
final KinesisLogEntry logEvent = KinesisLogEntry.create("a-stream", "log-group", "log-stream", timestamp, flowLogMessage);
final Message message = codec.decodeLogData(logEvent);
Assert.assertEquals("log-group", message.getField(AbstractKinesisCodec.FIELD_LOG_GROUP));
Assert.assertEquals("log-stream", message.getField(AbstractKinesisCodec.FIELD_LOG_STREAM));
Assert.assertEquals("a-stream", message.getField(AbstractKinesisCodec.FIELD_KINESIS_STREAM));
Assert.assertEquals(6, message.getField(KinesisCloudWatchFlowLogCodec.FIELD_PROTOCOL_NUMBER));
Assert.assertEquals("172.1.1.2", message.getField(KinesisCloudWatchFlowLogCodec.FIELD_SRC_ADDR));
Assert.assertEquals(KinesisCloudWatchFlowLogCodec.SOURCE, message.getField("source"));
Assert.assertEquals("eni-3244234 ACCEPT TCP 172.1.1.2:80 -> 172.1.1.2:2264", message.getField("message"));
Assert.assertEquals(1L, message.getField(KinesisCloudWatchFlowLogCodec.FIELD_PACKETS));
Assert.assertEquals(80, message.getField(KinesisCloudWatchFlowLogCodec.FIELD_SRC_PORT));
Assert.assertEquals(60, message.getField(KinesisCloudWatchFlowLogCodec.FIELD_CAPTURE_WINDOW_DURATION));
Assert.assertEquals("TCP", message.getField(KinesisCloudWatchFlowLogCodec.FIELD_PROTOCOL));
Assert.assertEquals("423432432432", message.getField(KinesisCloudWatchFlowLogCodec.FIELD_ACCOUNT_ID));
Assert.assertEquals("eni-3244234", message.getField(KinesisCloudWatchFlowLogCodec.FIELD_INTERFACE_ID));
Assert.assertEquals("OK", message.getField(KinesisCloudWatchFlowLogCodec.FIELD_LOG_STATUS));
Assert.assertEquals(52L, message.getField(KinesisCloudWatchFlowLogCodec.FIELD_BYTES));
Assert.assertEquals(true, message.getField(KinesisCloudWatchFlowLogCodec.SOURCE_GROUP_IDENTIFIER));
Assert.assertEquals("172.1.1.2", message.getField(KinesisCloudWatchFlowLogCodec.FIELD_DST_ADDR));
Assert.assertEquals(2264, message.getField(KinesisCloudWatchFlowLogCodec.FIELD_DST_PORT));
Assert.assertEquals("ACCEPT", message.getField(KinesisCloudWatchFlowLogCodec.FIELD_ACTION));
Assert.assertEquals(timestamp, message.getTimestamp());
}
use of org.graylog2.plugin.inputs.annotations.Codec in project graylog-plugin-integrations by Graylog2.
the class IpfixAggregatorTest method ixFlowTest.
@SuppressWarnings("unchecked")
@Test
public void ixFlowTest() throws IOException {
final IpfixAggregator ipfixAggregator = new IpfixAggregator();
final Map<String, Object> configMap = getIxiaConfigmap();
final IpfixCodec codec = new IpfixCodec(new Configuration(configMap), ipfixAggregator);
final List<Message> messages = Lists.newArrayList();
// followed by three data sets. two sets have subtemplateList data, the third has only empty lists for domain information
try (InputStream stream = Resources.getResource("ixflow.pcap").openStream()) {
final Pcap pcap = Pcap.openStream(stream);
pcap.loop(packet -> {
if (packet.hasProtocol(Protocol.UDP)) {
final UDPPacket udp = (UDPPacket) packet.getPacket(Protocol.UDP);
final InetSocketAddress source = new InetSocketAddress(udp.getParentPacket().getSourceIP(), udp.getSourcePort());
byte[] payload = new byte[udp.getPayload().getReadableBytes()];
udp.getPayload().getBytes(payload);
final ByteBuf buf = Unpooled.wrappedBuffer(payload);
final CodecAggregator.Result result = ipfixAggregator.addChunk(buf, source);
final ByteBuf ipfixRawBuf = result.getMessage();
if (ipfixRawBuf != null) {
byte[] bytes = new byte[ipfixRawBuf.readableBytes()];
ipfixRawBuf.getBytes(0, bytes);
messages.addAll(Objects.requireNonNull(codec.decodeMessages(new RawMessage(bytes))));
}
}
return true;
});
} catch (IOException e) {
fail("Cannot process PCAP stream");
}
assertThat(messages).hasSize(3);
assertThat(messages.get(0).getFields()).doesNotContainKey("httpSession").containsEntry("dnsRecord_0_dnsIpv4Address", "1.2.0.2").containsEntry("dnsRecord_0_dnsIpv6Address", "0:0:0:0:0:0:0:0").containsEntry("dnsRecord_0_dnsName", "server-1020002.example.int.");
assertThat(messages.get(1).getFields()).doesNotContainKey("httpSession").containsEntry("dnsRecord_0_dnsIpv4Address", "1.2.14.73").containsEntry("dnsRecord_0_dnsIpv6Address", "0:0:0:0:0:0:0:0").containsEntry("dnsRecord_0_dnsName", "server-1020e49.example.int.");
assertThat(messages.get(2).getFields()).doesNotContainKey("httpSession").doesNotContainKey("dnsRecord");
}
Aggregations