use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class ScrollElasticsearchHttp method getPage.
private void getPage(final Response getResponse, final URL url, final ProcessContext context, final ProcessSession session, FlowFile flowFile, final ComponentLog logger, final long startNanos) throws IOException {
final int statusCode = getResponse.code();
if (isSuccess(statusCode)) {
ResponseBody body = getResponse.body();
final byte[] bodyBytes = body.bytes();
JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes));
String scrollId = responseJson.get("_scroll_id").asText();
StringBuilder builder = new StringBuilder();
builder.append("{ \"hits\" : [");
JsonNode hits = responseJson.get("hits").get("hits");
if (hits.size() == 0) {
finishQuery(context.getStateManager());
session.remove(flowFile);
return;
}
for (int i = 0; i < hits.size(); i++) {
JsonNode hit = hits.get(i);
String retrievedIndex = hit.get("_index").asText();
String retrievedType = hit.get("_type").asText();
JsonNode source = hit.get("_source");
flowFile = session.putAttribute(flowFile, "es.index", retrievedIndex);
flowFile = session.putAttribute(flowFile, "es.type", retrievedType);
flowFile = session.putAttribute(flowFile, "mime.type", "application/json");
builder.append(source.toString());
if (i < hits.size() - 1) {
builder.append(", ");
}
}
builder.append("] }");
logger.debug("Elasticsearch retrieved " + responseJson.size() + " documents, routing to success");
flowFile = session.write(flowFile, out -> {
out.write(builder.toString().getBytes());
});
session.transfer(flowFile, REL_SUCCESS);
saveScrollId(context.getStateManager(), scrollId);
// emit provenance event
final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
session.getProvenanceReporter().receive(flowFile, url.toExternalForm(), millis);
} else {
// 5xx -> RETRY, but a server error might last a while, so yield
if (statusCode / 100 == 5) {
logger.warn("Elasticsearch returned code {} with message {}, removing the flow file. This is likely a server problem, yielding...", new Object[] { statusCode, getResponse.message() });
session.remove(flowFile);
context.yield();
} else {
logger.warn("Elasticsearch returned code {} with message {}", new Object[] { statusCode, getResponse.message() });
session.remove(flowFile);
}
}
}
use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class AbstractEmailProcessor method transfer.
/**
* Disposes the message by converting it to a {@link FlowFile} transferring
* it to the REL_SUCCESS relationship.
*/
private void transfer(Message emailMessage, ProcessContext context, ProcessSession processSession) {
long start = System.nanoTime();
FlowFile flowFile = processSession.create();
flowFile = processSession.append(flowFile, out -> {
try {
emailMessage.writeTo(out);
} catch (MessagingException e) {
throw new IOException(e);
}
});
long executionDuration = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
String fromAddressesString = "";
try {
Address[] fromAddresses = emailMessage.getFrom();
if (fromAddresses != null) {
fromAddressesString = Arrays.asList(fromAddresses).toString();
}
} catch (MessagingException e) {
this.logger.warn("Failed to retrieve 'From' attribute from Message.");
}
processSession.getProvenanceReporter().receive(flowFile, this.displayUrl, "Received message from " + fromAddressesString, executionDuration);
this.getLogger().info("Successfully received {} from {} in {} millis", new Object[] { flowFile, fromAddressesString, executionDuration });
processSession.transfer(flowFile, REL_SUCCESS);
}
use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class ExtractEmailHeaders method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
final ComponentLog logger = getLogger();
final List<FlowFile> invalidFlowFilesList = new ArrayList<>();
final List<FlowFile> processedFlowFilesList = new ArrayList<>();
final FlowFile originalFlowFile = session.get();
if (originalFlowFile == null) {
return;
}
final String requireStrictAddresses = context.getProperty(STRICT_PARSING).getValue();
final List<String> capturedHeadersList = Arrays.asList(context.getProperty(CAPTURED_HEADERS).getValue().toLowerCase().split(":"));
final Map<String, String> attributes = new HashMap<>();
session.read(originalFlowFile, new InputStreamCallback() {
@Override
public void process(final InputStream rawIn) throws IOException {
try (final InputStream in = new BufferedInputStream(rawIn)) {
Properties props = new Properties();
props.put("mail.mime.address.strict", requireStrictAddresses);
Session mailSession = Session.getInstance(props);
MimeMessage originalMessage = new MimeMessage(mailSession, in);
MimeMessageParser parser = new MimeMessageParser(originalMessage).parse();
// RFC-2822 determines that a message must have a "From:" header
// if a message lacks the field, it is flagged as invalid
Address[] from = originalMessage.getFrom();
if (from == null) {
throw new MessagingException("Message failed RFC-2822 validation: No Sender");
}
Date sentDate = originalMessage.getSentDate();
if (sentDate == null) {
// Throws MessageException due to lack of minimum required headers
throw new MessagingException("Message failed RFC-2822 validation: No Sent Date");
} else if (capturedHeadersList.size() > 0) {
Enumeration headers = originalMessage.getAllHeaders();
while (headers.hasMoreElements()) {
Header header = (Header) headers.nextElement();
if (StringUtils.isNotEmpty(header.getValue()) && capturedHeadersList.contains(header.getName().toLowerCase())) {
attributes.put("email.headers." + header.getName().toLowerCase(), header.getValue());
}
}
}
putAddressListInAttributes(attributes, EMAIL_HEADER_TO, originalMessage.getRecipients(Message.RecipientType.TO));
putAddressListInAttributes(attributes, EMAIL_HEADER_CC, originalMessage.getRecipients(Message.RecipientType.CC));
putAddressListInAttributes(attributes, EMAIL_HEADER_BCC, originalMessage.getRecipients(Message.RecipientType.BCC));
// RFC-2822 specifies "From" as mailbox-list
putAddressListInAttributes(attributes, EMAIL_HEADER_FROM, originalMessage.getFrom());
if (StringUtils.isNotEmpty(originalMessage.getMessageID())) {
attributes.put(EMAIL_HEADER_MESSAGE_ID, originalMessage.getMessageID());
}
if (originalMessage.getReceivedDate() != null) {
attributes.put(EMAIL_HEADER_RECV_DATE, originalMessage.getReceivedDate().toString());
}
if (originalMessage.getSentDate() != null) {
attributes.put(EMAIL_HEADER_SENT_DATE, originalMessage.getSentDate().toString());
}
if (StringUtils.isNotEmpty(originalMessage.getSubject())) {
attributes.put(EMAIL_HEADER_SUBJECT, originalMessage.getSubject());
}
// Zeroes EMAIL_ATTACHMENT_COUNT
attributes.put(EMAIL_ATTACHMENT_COUNT, "0");
// But insert correct value if attachments are present
if (parser.hasAttachments()) {
attributes.put(EMAIL_ATTACHMENT_COUNT, String.valueOf(parser.getAttachmentList().size()));
}
} catch (Exception e) {
// Message is invalid or triggered an error during parsing
attributes.clear();
logger.error("Could not parse the flowfile {} as an email, treating as failure", new Object[] { originalFlowFile, e });
invalidFlowFilesList.add(originalFlowFile);
}
}
});
if (attributes.size() > 0) {
FlowFile updatedFlowFile = session.putAllAttributes(originalFlowFile, attributes);
logger.info("Extracted {} headers into {} file", new Object[] { attributes.size(), updatedFlowFile });
processedFlowFilesList.add(updatedFlowFile);
}
session.transfer(processedFlowFilesList, REL_SUCCESS);
session.transfer(invalidFlowFilesList, REL_FAILURE);
}
use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class SmtpConsumer method data.
@Override
public void data(final InputStream data) throws RejectException, TooMuchDataException, IOException {
final ProcessSession processSession = sessionFactory.createSession();
final StopWatch watch = new StopWatch();
watch.start();
try {
FlowFile flowFile = processSession.create();
final AtomicBoolean limitExceeded = new AtomicBoolean(false);
flowFile = processSession.write(flowFile, (OutputStream out) -> {
final LimitingInputStream lis = new LimitingInputStream(data, maxMessageSize);
IOUtils.copy(lis, out);
if (lis.hasReachedLimit()) {
limitExceeded.set(true);
}
});
if (limitExceeded.get()) {
throw new TooMuchDataException("Maximum message size limit reached - client must send smaller messages");
}
flowFile = processSession.putAllAttributes(flowFile, extractMessageAttributes());
watch.stop();
processSession.getProvenanceReporter().receive(flowFile, "smtp://" + host + ":" + port + "/", watch.getDuration(TimeUnit.MILLISECONDS));
processSession.transfer(flowFile, ListenSMTP.REL_SUCCESS);
processSession.commit();
} catch (FlowFileAccessException | IllegalStateException | RejectException | IOException ex) {
log.error("Unable to fully process input due to " + ex.getMessage(), ex);
throw ex;
} finally {
// make sure this happens no matter what - is safe
processSession.rollback();
}
}
use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class ParseEvtx method processChunkGranularity.
protected void processChunkGranularity(ProcessSession session, ComponentLog componentLog, FlowFile flowFile, String basename, InputStream in) throws IOException {
FileHeader fileHeader = fileHeaderFactory.create(in, componentLog);
while (fileHeader.hasNext()) {
try {
ChunkHeader chunkHeader = fileHeader.next();
FlowFile updated = session.create(flowFile);
AtomicReference<Exception> exceptionReference = new AtomicReference<>(null);
updated = session.write(updated, out -> {
try (RootNodeHandler rootNodeHandler = rootNodeHandlerFactory.create(out)) {
while (chunkHeader.hasNext()) {
try {
rootNodeHandler.handle(chunkHeader.next().getRootNode());
} catch (IOException e) {
exceptionReference.set(e);
break;
}
}
} catch (IOException e) {
exceptionReference.set(e);
}
});
Exception exception = exceptionReference.get();
resultProcessor.process(session, componentLog, updated, exception, getName(basename, chunkHeader.getChunkNumber(), null, XML_EXTENSION));
if (exception != null) {
malformedChunkHandler.handle(flowFile, session, getName(basename, chunkHeader.getChunkNumber(), null, EVTX_EXTENSION), chunkHeader.getBinaryReader().getBytes());
}
} catch (MalformedChunkException e) {
malformedChunkHandler.handle(flowFile, session, getName(basename, e.getChunkNum(), null, EVTX_EXTENSION), e.getBadChunk());
}
}
}
Aggregations