use of org.apache.nifi.stream.io.LimitingInputStream in project nifi by apache.
the class SmtpConsumer method data.
@Override
public void data(final InputStream data) throws RejectException, TooMuchDataException, IOException {
final ProcessSession processSession = sessionFactory.createSession();
final StopWatch watch = new StopWatch();
watch.start();
try {
FlowFile flowFile = processSession.create();
final AtomicBoolean limitExceeded = new AtomicBoolean(false);
flowFile = processSession.write(flowFile, (OutputStream out) -> {
final LimitingInputStream lis = new LimitingInputStream(data, maxMessageSize);
IOUtils.copy(lis, out);
if (lis.hasReachedLimit()) {
limitExceeded.set(true);
}
});
if (limitExceeded.get()) {
throw new TooMuchDataException("Maximum message size limit reached - client must send smaller messages");
}
flowFile = processSession.putAllAttributes(flowFile, extractMessageAttributes());
watch.stop();
processSession.getProvenanceReporter().receive(flowFile, "smtp://" + host + ":" + port + "/", watch.getDuration(TimeUnit.MILLISECONDS));
processSession.transfer(flowFile, ListenSMTP.REL_SUCCESS);
processSession.commit();
} catch (FlowFileAccessException | IllegalStateException | RejectException | IOException ex) {
log.error("Unable to fully process input due to " + ex.getMessage(), ex);
throw ex;
} finally {
// make sure this happens no matter what - is safe
processSession.rollback();
}
}
use of org.apache.nifi.stream.io.LimitingInputStream in project nifi by apache.
the class ByteArraySchemaRecordReader method nextRecord.
@Override
protected StandardProvenanceEventRecord nextRecord(final DataInputStream in, final int serializationVersion) throws IOException {
verifySerializationVersion(serializationVersion);
final long byteOffset = getBytesConsumed();
final int recordLength = in.readInt();
final InputStream limitedIn = new LimitingInputStream(in, recordLength);
final Record eventRecord = recordReader.readRecord(limitedIn);
if (eventRecord == null) {
return null;
}
return EventRecord.getEvent(eventRecord, getFilename(), byteOffset, getMaxAttributeLength());
}
use of org.apache.nifi.stream.io.LimitingInputStream in project nifi by apache.
the class EventIdFirstSchemaRecordReader method readRecord.
private StandardProvenanceEventRecord readRecord(final DataInputStream in, final long eventId, final long startOffset, final int recordLength) throws IOException {
final InputStream limitedIn = new LimitingInputStream(in, recordLength);
final Record eventRecord = recordReader.readRecord(limitedIn);
if (eventRecord == null) {
return null;
}
final StandardProvenanceEventRecord deserializedEvent = LookupTableEventRecord.getEvent(eventRecord, getFilename(), startOffset, getMaxAttributeLength(), firstEventId, systemTimeOffset, componentIds, componentTypes, queueIds, eventTypes);
deserializedEvent.setEventId(eventId);
return deserializedEvent;
}
Aggregations