use of com.fluenda.parcefone.event.CEFHandlingException in project nifi by apache.
the class ParseCEF method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final CEFParser parser = new CEFParser(validator);
final byte[] buffer = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, buffer);
}
});
CommonEvent event;
try {
// parcefoneLocale defaults to en_US, so this should not fail. But we force failure in case the custom
// validator failed to identify an invalid Locale
final Locale parcefoneLocale = Locale.forLanguageTag(context.getProperty(DATETIME_REPRESENTATION).getValue());
event = parser.parse(buffer, true, parcefoneLocale);
} catch (Exception e) {
// This should never trigger but adding in here as a fencing mechanism to
// address possible ParCEFone bugs.
getLogger().error("Parser returned unexpected Exception {} while processing {}; routing to failure", new Object[] { e, flowFile });
session.transfer(flowFile, REL_FAILURE);
return;
}
// event, so we test
if (event == null) {
getLogger().error("Failed to parse {} as a CEF message: it does not conform to the CEF standard; routing to failure", new Object[] { flowFile });
session.transfer(flowFile, REL_FAILURE);
return;
}
try {
final String destination = context.getProperty(FIELDS_DESTINATION).getValue();
switch(destination) {
case DESTINATION_ATTRIBUTES:
final Map<String, String> attributes = new HashMap<>();
// Process KVs of the Header field
for (Map.Entry<String, Object> entry : event.getHeader().entrySet()) {
attributes.put("cef.header." + entry.getKey(), prettyResult(entry.getValue(), tzId));
}
// Process KVs composing the Extension field
for (Map.Entry<String, Object> entry : event.getExtension(true).entrySet()) {
attributes.put("cef.extension." + entry.getKey(), prettyResult(entry.getValue(), tzId));
flowFile = session.putAllAttributes(flowFile, attributes);
}
break;
case DESTINATION_CONTENT:
ObjectNode results = mapper.createObjectNode();
// Add two JSON objects containing one CEF field each
results.set("header", mapper.valueToTree(event.getHeader()));
results.set("extension", mapper.valueToTree(event.getExtension(true)));
// to the resulting JSON
if (context.getProperty(APPEND_RAW_MESSAGE_TO_JSON).asBoolean()) {
results.set("_raw", mapper.valueToTree(new String(buffer)));
}
flowFile = session.write(flowFile, new OutputStreamCallback() {
@Override
public void process(OutputStream out) throws IOException {
try (OutputStream outputStream = new BufferedOutputStream(out)) {
outputStream.write(mapper.writeValueAsBytes(results));
}
}
});
// Adjust the FlowFile mime.type attribute
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json");
// Update the provenance for good measure
session.getProvenanceReporter().modifyContent(flowFile, "Replaced content with parsed CEF fields and values");
break;
}
// whatever the parsing stratgy, ready to transfer to success and commit
session.transfer(flowFile, REL_SUCCESS);
session.commit();
} catch (CEFHandlingException e) {
// The flowfile has failed parsing & validation, routing to failure and committing
getLogger().error("Failed to parse {} as a CEF message due to {}; routing to failure", new Object[] { flowFile, e });
// Create a provenance event recording the routing to failure
session.getProvenanceReporter().route(flowFile, REL_FAILURE);
session.transfer(flowFile, REL_FAILURE);
session.commit();
return;
} finally {
session.rollback();
}
}
Aggregations