use of org.apache.nifi.processor.exception.FlowFileAccessException in project nifi by apache.
the class AbstractKudu method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final FlowFile flowFile = session.get();
try {
if (flowFile == null)
return;
final Map<String, String> attributes = new HashMap<String, String>();
final AtomicReference<Throwable> exceptionHolder = new AtomicReference<>(null);
final RecordReaderFactory recordReaderFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
final KuduSession kuduSession = this.getKuduSession(kuduClient);
session.read(flowFile, (final InputStream rawIn) -> {
RecordReader recordReader = null;
try (final BufferedInputStream in = new BufferedInputStream(rawIn)) {
try {
recordReader = recordReaderFactory.createRecordReader(flowFile, in, getLogger());
} catch (Exception ex) {
final RecordReaderFactoryException rrfe = new RecordReaderFactoryException("Unable to create RecordReader", ex);
exceptionHolder.set(rrfe);
return;
}
List<String> fieldNames = recordReader.getSchema().getFieldNames();
final RecordSet recordSet = recordReader.createRecordSet();
if (skipHeadLine)
recordSet.next();
int numOfAddedRecord = 0;
Record record = recordSet.next();
while (record != null) {
org.apache.kudu.client.Operation oper = null;
if (operationType == OperationType.UPSERT) {
oper = upsertRecordToKudu(kuduTable, record, fieldNames);
} else {
oper = insertRecordToKudu(kuduTable, record, fieldNames);
}
kuduSession.apply(oper);
numOfAddedRecord++;
record = recordSet.next();
}
getLogger().info("KUDU: number of inserted records: " + numOfAddedRecord);
attributes.put(RECORD_COUNT_ATTR, String.valueOf(numOfAddedRecord));
} catch (KuduException ex) {
getLogger().error("Exception occurred while interacting with Kudu due to " + ex.getMessage(), ex);
exceptionHolder.set(ex);
} catch (Exception e) {
exceptionHolder.set(e);
} finally {
IOUtils.closeQuietly(recordReader);
}
});
kuduSession.close();
if (exceptionHolder.get() != null) {
throw exceptionHolder.get();
}
// Update flow file's attributes after the ingestion
session.putAllAttributes(flowFile, attributes);
session.transfer(flowFile, REL_SUCCESS);
session.getProvenanceReporter().send(flowFile, "Successfully added flowfile to kudu");
} catch (IOException | FlowFileAccessException e) {
getLogger().error("Failed to write due to {}", new Object[] { e });
session.transfer(flowFile, REL_FAILURE);
} catch (Throwable t) {
getLogger().error("Failed to write due to {}", new Object[] { t });
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.processor.exception.FlowFileAccessException in project nifi by apache.
the class TestListenSyslog method testErrorQueue.
@Test
public void testErrorQueue() throws IOException {
final List<ListenSyslog.RawSyslogEvent> msgs = new ArrayList<>();
msgs.add(new ListenSyslog.RawSyslogEvent(VALID_MESSAGE.getBytes(), "sender-01"));
msgs.add(new ListenSyslog.RawSyslogEvent(VALID_MESSAGE.getBytes(), "sender-01"));
// Add message that will throw a FlowFileAccessException the first time that we attempt to read
// the contents but will succeed the second time.
final AtomicInteger getMessageAttempts = new AtomicInteger(0);
msgs.add(new ListenSyslog.RawSyslogEvent(VALID_MESSAGE.getBytes(), "sender-01") {
@Override
public byte[] getData() {
final int attempts = getMessageAttempts.incrementAndGet();
if (attempts == 1) {
throw new FlowFileAccessException("Unit test failure");
} else {
return VALID_MESSAGE.getBytes();
}
}
});
final CannedMessageProcessor proc = new CannedMessageProcessor(msgs);
final TestRunner runner = TestRunners.newTestRunner(proc);
runner.setProperty(ListenSyslog.MAX_BATCH_SIZE, "5");
runner.setProperty(ListenSyslog.PROTOCOL, ListenSyslog.UDP_VALUE.getValue());
runner.setProperty(ListenSyslog.PORT, "0");
runner.setProperty(ListenSyslog.PARSE_MESSAGES, "false");
runner.run();
assertEquals(1, proc.getErrorQueueSize());
runner.assertAllFlowFilesTransferred(ListenSyslog.REL_SUCCESS, 1);
runner.getFlowFilesForRelationship(ListenSyslog.REL_SUCCESS).get(0).assertContentEquals(VALID_MESSAGE + "\n" + VALID_MESSAGE);
// running again should pull from the error queue
runner.clearTransferState();
runner.run();
runner.assertAllFlowFilesTransferred(ListenSyslog.REL_SUCCESS, 1);
runner.getFlowFilesForRelationship(ListenSyslog.REL_SUCCESS).get(0).assertContentEquals(VALID_MESSAGE);
}
use of org.apache.nifi.processor.exception.FlowFileAccessException in project nifi by apache.
the class MockProcessSession method exportTo.
@Override
public void exportTo(FlowFile flowFile, final Path path, final boolean append) {
flowFile = validateState(flowFile);
if (flowFile == null || path == null) {
throw new IllegalArgumentException("argument cannot be null");
}
if (!(flowFile instanceof MockFlowFile)) {
throw new IllegalArgumentException("Cannot export a flow file that I did not create");
}
final MockFlowFile mock = (MockFlowFile) flowFile;
final OpenOption mode = append ? StandardOpenOption.APPEND : StandardOpenOption.CREATE;
try (final OutputStream out = Files.newOutputStream(path, mode)) {
out.write(mock.getData());
} catch (final IOException e) {
throw new FlowFileAccessException(e.toString(), e);
}
}
use of org.apache.nifi.processor.exception.FlowFileAccessException in project nifi by apache.
the class MockProcessSession method importFrom.
@Override
public MockFlowFile importFrom(final Path path, final boolean keepSourceFile, FlowFile flowFile) {
flowFile = validateState(flowFile);
if (path == null || flowFile == null) {
throw new IllegalArgumentException("argument cannot be null");
}
if (!(flowFile instanceof MockFlowFile)) {
throw new IllegalArgumentException("Cannot export a flow file that I did not create");
}
final MockFlowFile mock = (MockFlowFile) flowFile;
MockFlowFile newFlowFile = new MockFlowFile(mock.getId(), flowFile);
currentVersions.put(newFlowFile.getId(), newFlowFile);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
Files.copy(path, baos);
} catch (final IOException e) {
throw new FlowFileAccessException(e.toString(), e);
}
newFlowFile.setData(baos.toByteArray());
newFlowFile = putAttribute(newFlowFile, CoreAttributes.FILENAME.key(), path.getFileName().toString());
return newFlowFile;
}
use of org.apache.nifi.processor.exception.FlowFileAccessException in project nifi by apache.
the class MockProcessSession method closeStreams.
private void closeStreams(final Map<FlowFile, ? extends Closeable> streamMap, final boolean enforceClosed) {
// avoid ConcurrentModificationException by creating a copy of the List
final Map<FlowFile, ? extends Closeable> openStreamCopy = new HashMap<>(streamMap);
for (final Map.Entry<FlowFile, ? extends Closeable> entry : openStreamCopy.entrySet()) {
final FlowFile flowFile = entry.getKey();
final Closeable openStream = entry.getValue();
try {
openStream.close();
} catch (IOException e) {
throw new FlowFileAccessException("Failed to close stream for " + flowFile, e);
}
if (enforceClosed) {
throw new FlowFileHandlingException("Cannot commit session because the following streams were created via " + "calls to ProcessSession.read(FlowFile) or ProcessSession.write(FlowFile) and never closed: " + streamMap);
}
}
}
Aggregations