use of loghub.ConnectionContext in project LogHub by fbacchella.
the class SnmpTrap method processPdu.
@Override
public void processPdu(CommandResponderEvent trap) {
try {
PDU pdu = trap.getPDU();
Address localaddr = trap.getTransportMapping().getListenAddress();
Address remoteaddr = trap.getPeerAddress();
ConnectionContext ctx = ConnectionContext.EMPTY;
if (localaddr instanceof TransportIpAddress && remoteaddr instanceof TransportIpAddress) {
InetSocketAddress localinetaddr = getSA((TransportIpAddress) localaddr);
InetSocketAddress remoteinetaddr = getSA((TransportIpAddress) remoteaddr);
ctx = new IpConnectionContext(localinetaddr, remoteinetaddr, null);
}
Event event = emptyEvent(ctx);
if (pdu instanceof PDUv1) {
PDUv1 pduv1 = (PDUv1) pdu;
String enterprise = (String) convertVar(pduv1.getEnterprise());
event.put("enterprise", enterprise);
event.put("agent_addr", pduv1.getAgentAddress().getInetAddress());
if (pduv1.getGenericTrap() != PDUv1.ENTERPRISE_SPECIFIC) {
event.put("generic_trap", GENERICTRAP.values()[pduv1.getGenericTrap()].toString());
} else {
String resolved = formatter.format(pduv1.getEnterprise(), new Integer32(pduv1.getSpecificTrap()), true);
event.put("specific_trap", resolved);
}
event.put("time_stamp", 1.0 * pduv1.getTimestamp() / 100.0);
}
@SuppressWarnings("unchecked") Enumeration<VariableBinding> vbenum = (Enumeration<VariableBinding>) pdu.getVariableBindings().elements();
for (VariableBinding i : Collections.list(vbenum)) {
OID vbOID = i.getOid();
Object value = convertVar(i.getVariable());
smartPut(event, vbOID, value);
}
send(event);
} catch (Exception e) {
logger.error(e.getMessage());
logger.catching(e);
} finally {
trap.setProcessed(true);
}
}
use of loghub.ConnectionContext in project LogHub by fbacchella.
the class Kafka method run.
@Override
public void run() {
consumer.subscribe(Collections.singletonList(topic));
boolean broke = false;
while (!isInterrupted()) {
ConsumerRecords<Long, byte[]> consumerRecords = consumer.poll(100);
if (consumerRecords.count() == 0) {
continue;
}
for (ConsumerRecord<Long, byte[]> record : consumerRecords) {
ConnectionContext ctxt = new KafkaContext(record.topic());
Event event = emptyEvent(ctxt);
if (record.timestampType() == TimestampType.CREATE_TIME) {
event.setTimestamp(new Date(record.timestamp()));
}
Header[] headers = record.headers().toArray();
if (headers.length > 0) {
Map<String, byte[]> headersMap = new HashMap<>(headers.length);
Arrays.stream(headers).forEach(i -> headersMap.put(i.key(), i.value()));
event.put("headers", headersMap);
}
byte[] content = record.value();
try {
event.putAll(decoder.decode(ctxt, content, 0, content.length));
send(event);
} catch (DecodeException e) {
logger.error(e.getMessage());
logger.catching(e);
}
if (isInterrupted()) {
consumer.commitSync(Collections.singletonMap(new TopicPartition(record.topic(), record.partition()), new OffsetAndMetadata(record.offset())));
broke = true;
break;
}
}
if (!broke) {
consumer.commitAsync();
} else {
break;
}
}
consumer.close();
}
Aggregations