use of org.apache.nifi.serialization.RecordReaderFactory in project nifi by apache.
the class PutParquetTest method testIOExceptionFromReaderShouldRouteToRetry.
@Test
public void testIOExceptionFromReaderShouldRouteToRetry() throws InitializationException, IOException, MalformedRecordException, SchemaNotFoundException {
configure(proc, 10);
final RecordSet recordSet = Mockito.mock(RecordSet.class);
when(recordSet.next()).thenThrow(new IOException("ERROR"));
final RecordReader recordReader = Mockito.mock(RecordReader.class);
when(recordReader.createRecordSet()).thenReturn(recordSet);
when(recordReader.getSchema()).thenReturn(AvroTypeUtil.createSchema(schema));
final RecordReaderFactory readerFactory = Mockito.mock(RecordReaderFactory.class);
when(readerFactory.getIdentifier()).thenReturn("mock-reader-factory");
when(readerFactory.createRecordReader(any(FlowFile.class), any(InputStream.class), any(ComponentLog.class))).thenReturn(recordReader);
testRunner.addControllerService("mock-reader-factory", readerFactory);
testRunner.enableControllerService(readerFactory);
testRunner.setProperty(PutParquet.RECORD_READER, "mock-reader-factory");
final String filename = "testMalformedRecordExceptionShouldRouteToFailure-" + System.currentTimeMillis();
final Map<String, String> flowFileAttributes = new HashMap<>();
flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);
testRunner.enqueue("trigger", flowFileAttributes);
testRunner.run();
testRunner.assertAllFlowFilesTransferred(PutParquet.REL_RETRY, 1);
}
use of org.apache.nifi.serialization.RecordReaderFactory in project nifi by apache.
the class PutParquetTest method testMalformedRecordExceptionFromReaderShouldRouteToFailure.
@Test
public void testMalformedRecordExceptionFromReaderShouldRouteToFailure() throws InitializationException, IOException, MalformedRecordException, SchemaNotFoundException {
configure(proc, 10);
final RecordReader recordReader = Mockito.mock(RecordReader.class);
when(recordReader.nextRecord()).thenThrow(new MalformedRecordException("ERROR"));
final RecordReaderFactory readerFactory = Mockito.mock(RecordReaderFactory.class);
when(readerFactory.getIdentifier()).thenReturn("mock-reader-factory");
when(readerFactory.createRecordReader(any(FlowFile.class), any(InputStream.class), any(ComponentLog.class))).thenReturn(recordReader);
testRunner.addControllerService("mock-reader-factory", readerFactory);
testRunner.enableControllerService(readerFactory);
testRunner.setProperty(PutParquet.RECORD_READER, "mock-reader-factory");
final String filename = "testMalformedRecordExceptionShouldRouteToFailure-" + System.currentTimeMillis();
final Map<String, String> flowFileAttributes = new HashMap<>();
flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);
testRunner.enqueue("trigger", flowFileAttributes);
testRunner.run();
testRunner.assertAllFlowFilesTransferred(PutParquet.REL_FAILURE, 1);
}
use of org.apache.nifi.serialization.RecordReaderFactory in project nifi by apache.
the class PutParquetTest method testInvalidAvroShouldRouteToFailure.
@Test
public void testInvalidAvroShouldRouteToFailure() throws InitializationException, SchemaNotFoundException, MalformedRecordException, IOException {
configure(proc, 0);
// simulate throwing an IOException when the factory creates a reader which is what would happen when
// invalid Avro is passed to the Avro reader factory
final RecordReaderFactory readerFactory = Mockito.mock(RecordReaderFactory.class);
when(readerFactory.getIdentifier()).thenReturn("mock-reader-factory");
when(readerFactory.createRecordReader(any(FlowFile.class), any(InputStream.class), any(ComponentLog.class))).thenThrow(new IOException("NOT AVRO"));
testRunner.addControllerService("mock-reader-factory", readerFactory);
testRunner.enableControllerService(readerFactory);
testRunner.setProperty(PutParquet.RECORD_READER, "mock-reader-factory");
final String filename = "testInvalidAvroShouldRouteToFailure-" + System.currentTimeMillis();
final Map<String, String> flowFileAttributes = new HashMap<>();
flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);
testRunner.enqueue("trigger", flowFileAttributes);
testRunner.run();
testRunner.assertAllFlowFilesTransferred(PutParquet.REL_FAILURE, 1);
}
use of org.apache.nifi.serialization.RecordReaderFactory in project nifi by apache.
the class AbstractKudu method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final FlowFile flowFile = session.get();
try {
if (flowFile == null)
return;
final Map<String, String> attributes = new HashMap<String, String>();
final AtomicReference<Throwable> exceptionHolder = new AtomicReference<>(null);
final RecordReaderFactory recordReaderFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
final KuduSession kuduSession = this.getKuduSession(kuduClient);
session.read(flowFile, (final InputStream rawIn) -> {
RecordReader recordReader = null;
try (final BufferedInputStream in = new BufferedInputStream(rawIn)) {
try {
recordReader = recordReaderFactory.createRecordReader(flowFile, in, getLogger());
} catch (Exception ex) {
final RecordReaderFactoryException rrfe = new RecordReaderFactoryException("Unable to create RecordReader", ex);
exceptionHolder.set(rrfe);
return;
}
List<String> fieldNames = recordReader.getSchema().getFieldNames();
final RecordSet recordSet = recordReader.createRecordSet();
if (skipHeadLine)
recordSet.next();
int numOfAddedRecord = 0;
Record record = recordSet.next();
while (record != null) {
org.apache.kudu.client.Operation oper = null;
if (operationType == OperationType.UPSERT) {
oper = upsertRecordToKudu(kuduTable, record, fieldNames);
} else {
oper = insertRecordToKudu(kuduTable, record, fieldNames);
}
kuduSession.apply(oper);
numOfAddedRecord++;
record = recordSet.next();
}
getLogger().info("KUDU: number of inserted records: " + numOfAddedRecord);
attributes.put(RECORD_COUNT_ATTR, String.valueOf(numOfAddedRecord));
} catch (KuduException ex) {
getLogger().error("Exception occurred while interacting with Kudu due to " + ex.getMessage(), ex);
exceptionHolder.set(ex);
} catch (Exception e) {
exceptionHolder.set(e);
} finally {
IOUtils.closeQuietly(recordReader);
}
});
kuduSession.close();
if (exceptionHolder.get() != null) {
throw exceptionHolder.get();
}
// Update flow file's attributes after the ingestion
session.putAllAttributes(flowFile, attributes);
session.transfer(flowFile, REL_SUCCESS);
session.getProvenanceReporter().send(flowFile, "Successfully added flowfile to kudu");
} catch (IOException | FlowFileAccessException e) {
getLogger().error("Failed to write due to {}", new Object[] { e });
session.transfer(flowFile, REL_FAILURE);
} catch (Throwable t) {
getLogger().error("Failed to write due to {}", new Object[] { t });
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.serialization.RecordReaderFactory in project nifi by apache.
the class TestPutKudu method testMalformedRecordExceptionFromReaderShouldRouteToFailure.
@Test
public void testMalformedRecordExceptionFromReaderShouldRouteToFailure() throws InitializationException, IOException, MalformedRecordException, SchemaNotFoundException {
createRecordReader(10);
final RecordReader recordReader = Mockito.mock(RecordReader.class);
when(recordReader.nextRecord()).thenThrow(new MalformedRecordException("ERROR"));
final RecordReaderFactory readerFactory = Mockito.mock(RecordReaderFactory.class);
when(readerFactory.getIdentifier()).thenReturn("mock-reader-factory");
when(readerFactory.createRecordReader(any(FlowFile.class), any(InputStream.class), any(ComponentLog.class))).thenReturn(recordReader);
testRunner.addControllerService("mock-reader-factory", readerFactory);
testRunner.enableControllerService(readerFactory);
testRunner.setProperty(PutKudu.RECORD_READER, "mock-reader-factory");
final String filename = "testMalformedRecordExceptionShouldRouteToFailure-" + System.currentTimeMillis();
final Map<String, String> flowFileAttributes = new HashMap<>();
flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);
testRunner.enqueue("trigger", flowFileAttributes);
testRunner.run();
testRunner.assertAllFlowFilesTransferred(PutKudu.REL_FAILURE, 1);
}
Aggregations