use of loghub.Event in project LogHub by fbacchella.
the class TestNsca method test.
@Test
public void test() throws ConfigException, IOException {
String conf = "pipeline[main] {} output $main | { loghub.senders.Nsca { password: \"password\", encryption: \"RIJNDAEL192\", nagiosServer: \"localhost\", largeMessageSupport: true, mapping: { \"level\": \"level\", \"service\": \"service\", \"message\": \"message\", \"host\": \"host\", } } }";
Properties p = Configuration.parse(new StringReader(conf));
Nsca sender = (Nsca) p.senders.stream().findAny().get();
Assert.assertTrue(sender.configure(p));
Event ev = Event.emptyEvent(ConnectionContext.EMPTY);
ev.put("level", "CRITICAL");
ev.put("service", "aservice");
ev.put("message", "message");
ev.put("host", "host");
sender.send(ev);
}
use of loghub.Event in project LogHub by fbacchella.
the class Kafka method run.
@Override
public void run() {
consumer.subscribe(Collections.singletonList(topic));
boolean broke = false;
while (!isInterrupted()) {
ConsumerRecords<Long, byte[]> consumerRecords = consumer.poll(100);
if (consumerRecords.count() == 0) {
continue;
}
for (ConsumerRecord<Long, byte[]> record : consumerRecords) {
ConnectionContext ctxt = new KafkaContext(record.topic());
Event event = emptyEvent(ctxt);
if (record.timestampType() == TimestampType.CREATE_TIME) {
event.setTimestamp(new Date(record.timestamp()));
}
Header[] headers = record.headers().toArray();
if (headers.length > 0) {
Map<String, byte[]> headersMap = new HashMap<>(headers.length);
Arrays.stream(headers).forEach(i -> headersMap.put(i.key(), i.value()));
event.put("headers", headersMap);
}
byte[] content = record.value();
try {
event.putAll(decoder.decode(ctxt, content, 0, content.length));
send(event);
} catch (DecodeException e) {
logger.error(e.getMessage());
logger.catching(e);
}
if (isInterrupted()) {
consumer.commitSync(Collections.singletonMap(new TopicPartition(record.topic(), record.partition()), new OffsetAndMetadata(record.offset())));
broke = true;
break;
}
}
if (!broke) {
consumer.commitAsync();
} else {
break;
}
}
consumer.close();
}
use of loghub.Event in project LogHub by fbacchella.
the class ElasticSearch method putContent.
private byte[] putContent(Batch documents) {
StringBuilder builder = new StringBuilder();
Map<String, String> settings = new HashMap<>(2);
Map<String, Object> action = Collections.singletonMap("index", settings);
Map<String, Object> esjson = new HashMap<>();
ObjectMapper jsonmapper = json.get();
int validEvents = 0;
for (Event e : documents) {
try {
if (!e.containsKey(type)) {
processStatus(e, CompletableFuture.completedFuture(false));
continue;
}
validEvents++;
esjson.clear();
esjson.putAll(e);
esjson.put("@timestamp", ISO8601.get().format(e.getTimestamp()));
esjson.put("__index", esIndexFormat.get().format(e.getTimestamp()));
settings.put("_type", esjson.remove(type).toString());
settings.put("_index", esjson.remove("__index").toString());
try {
builder.append(jsonmapper.writeValueAsString(action));
builder.append("\n");
builder.append(jsonmapper.writeValueAsString(esjson));
builder.append("\n");
} catch (JsonProcessingException ex) {
logger.error("Failed to serialized {}: {}", e, ex.getMessage());
logger.catching(Level.DEBUG, ex);
}
processStatus(e, CompletableFuture.completedFuture(true));
} catch (java.lang.StackOverflowError ex) {
processStatus(e, CompletableFuture.completedFuture(false));
logger.error("Failed to serialized {}, infinite recursion", e);
}
}
if (validEvents == 0) {
return null;
} else {
return builder.toString().getBytes(CharsetUtil.UTF_8);
}
}
use of loghub.Event in project LogHub by fbacchella.
the class Gelf method encode.
@Override
public byte[] encode(Event event) {
try {
Map<String, Object> gelfcontent = new HashMap<>(event.size() + 5);
gelfcontent.put("version", "1.1");
gelfcontent.put("host", hostname);
if (event.containsKey(shortmessagefield)) {
gelfcontent.put("short_message", event.remove(shortmessagefield));
}
if (fullmessagefield != null && event.containsKey(fullmessagefield)) {
gelfcontent.put("full_message", event.remove(fullmessagefield));
}
gelfcontent.put("timestamp", event.getTimestamp().getTime() / 1000.0);
event.entrySet().stream().filter(i -> !"id".equals(i.getKey())).filter(i -> fieldpredicate.test(i.getKey())).forEach(i -> gelfcontent.put("_" + i.getKey(), i.getValue()));
byte[] buffer1 = json.get().writeValueAsBytes(gelfcontent);
byte[] buffer2;
if (compressed) {
try (final ByteArrayOutputStream bos = new ByteArrayOutputStream();
final GZIPOutputStream stream = new GZIPOutputStream(bos)) {
stream.write(buffer1);
stream.finish();
buffer2 = bos.toByteArray();
}
} else if (stream) {
buffer2 = Arrays.copyOf(buffer1, buffer1.length + 1);
} else {
buffer2 = buffer1;
}
return buffer2;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of loghub.Event in project LogHub by fbacchella.
the class Grok method processMessage.
@Override
public boolean processMessage(Event event, String field, String destination) {
if (!event.containsKey(field)) {
return false;
}
String line = event.get(field).toString();
Match gm = grok.match(line);
gm.captures();
if (!gm.isNull()) {
// Results from grok needs to be cleaned
for (Map.Entry<String, Object> e : gm.toMap().entrySet()) {
String destinationField = e.getKey();
// . is a special field name, it mean a value to put back in the original field
if (".".equals(e.getKey())) {
destinationField = field;
}
// Needed until https://github.com/thekrakken/java-grok/issues/61 is fixed
if (e.getKey().equals(e.getKey().toUpperCase()) && !".".equals(e.getKey())) {
continue;
}
if (e.getValue() == null) {
continue;
}
if (e.getValue() instanceof List) {
List<?> listvalue = (List<?>) e.getValue();
List<String> newvalues = new ArrayList<>();
listvalue.stream().filter(i -> i != null).map(i -> i.toString()).forEach(newvalues::add);
if (newvalues.size() == 0) {
continue;
} else if (newvalues.size() == 1) {
event.put(destinationField, newvalues.get(0));
} else {
event.put(destinationField, newvalues);
}
} else {
event.put(destinationField, e.getValue());
}
}
return true;
}
return false;
}
Aggregations