use of org.apache.nifi.processor.io.StreamCallback in project nifi by apache.
the class ConvertCSVToAvro method onTrigger.
@Override
public void onTrigger(ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile incomingCSV = session.get();
if (incomingCSV == null) {
return;
}
CSVProperties props = new CSVProperties.Builder().charset(context.getProperty(CHARSET).evaluateAttributeExpressions(incomingCSV).getValue()).delimiter(context.getProperty(DELIMITER).evaluateAttributeExpressions(incomingCSV).getValue()).quote(context.getProperty(QUOTE).evaluateAttributeExpressions(incomingCSV).getValue()).escape(context.getProperty(ESCAPE).evaluateAttributeExpressions(incomingCSV).getValue()).hasHeader(context.getProperty(HAS_HEADER).evaluateAttributeExpressions(incomingCSV).asBoolean()).linesToSkip(context.getProperty(LINES_TO_SKIP).evaluateAttributeExpressions(incomingCSV).asInteger()).build();
String schemaProperty = context.getProperty(SCHEMA).evaluateAttributeExpressions(incomingCSV).getValue();
final Schema schema;
try {
schema = getSchema(schemaProperty, DefaultConfiguration.get());
} catch (SchemaNotFoundException e) {
getLogger().error("Cannot find schema: " + schemaProperty);
session.transfer(incomingCSV, FAILURE);
return;
}
try (final DataFileWriter<Record> writer = new DataFileWriter<>(AvroUtil.newDatumWriter(schema, Record.class))) {
writer.setCodec(getCodecFactory(context.getProperty(COMPRESSION_TYPE).getValue()));
try {
final AtomicLong written = new AtomicLong(0L);
final FailureTracker failures = new FailureTracker();
FlowFile badRecords = session.clone(incomingCSV);
FlowFile outgoingAvro = session.write(incomingCSV, new StreamCallback() {
@Override
public void process(InputStream in, OutputStream out) throws IOException {
try (CSVFileReader<Record> reader = new CSVFileReader<>(in, props, schema, Record.class)) {
reader.initialize();
try (DataFileWriter<Record> w = writer.create(schema, out)) {
while (reader.hasNext()) {
try {
Record record = reader.next();
w.append(record);
written.incrementAndGet();
} catch (DatasetRecordException e) {
failures.add(e);
}
}
}
}
}
});
long errors = failures.count();
session.adjustCounter("Converted records", written.get(), false);
session.adjustCounter("Conversion errors", errors, false);
if (written.get() > 0L) {
session.transfer(outgoingAvro, SUCCESS);
if (errors > 0L) {
getLogger().warn("Failed to convert {}/{} records from CSV to Avro", new Object[] { errors, errors + written.get() });
badRecords = session.putAttribute(badRecords, "errors", failures.summary());
session.transfer(badRecords, INCOMPATIBLE);
} else {
session.remove(badRecords);
}
} else {
session.remove(outgoingAvro);
if (errors > 0L) {
getLogger().warn("Failed to convert {}/{} records from CSV to Avro", new Object[] { errors, errors });
badRecords = session.putAttribute(badRecords, "errors", failures.summary());
} else {
badRecords = session.putAttribute(badRecords, "errors", "No incoming records");
}
session.transfer(badRecords, FAILURE);
}
} catch (ProcessException | DatasetIOException e) {
getLogger().error("Failed reading or writing", e);
session.transfer(incomingCSV, FAILURE);
} catch (DatasetException e) {
getLogger().error("Failed to read FlowFile", e);
session.transfer(incomingCSV, FAILURE);
}
} catch (final IOException ioe) {
throw new RuntimeException("Unable to close Avro Writer", ioe);
}
}
use of org.apache.nifi.processor.io.StreamCallback in project nifi by apache.
the class ConvertJSONToAvro method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile incomingJSON = session.get();
if (incomingJSON == null) {
return;
}
String schemaProperty = context.getProperty(SCHEMA).evaluateAttributeExpressions(incomingJSON).getValue();
final Schema schema;
try {
schema = getSchema(schemaProperty, DefaultConfiguration.get());
} catch (SchemaNotFoundException e) {
getLogger().error("Cannot find schema: " + schemaProperty);
session.transfer(incomingJSON, FAILURE);
return;
}
final DataFileWriter<Record> writer = new DataFileWriter<>(AvroUtil.newDatumWriter(schema, Record.class));
writer.setCodec(getCodecFactory(context.getProperty(COMPRESSION_TYPE).getValue()));
try {
final AtomicLong written = new AtomicLong(0L);
final FailureTracker failures = new FailureTracker();
FlowFile badRecords = session.clone(incomingJSON);
FlowFile outgoingAvro = session.write(incomingJSON, new StreamCallback() {
@Override
public void process(InputStream in, OutputStream out) throws IOException {
try (JSONFileReader<Record> reader = new JSONFileReader<>(in, schema, Record.class)) {
reader.initialize();
try (DataFileWriter<Record> w = writer.create(schema, out)) {
while (reader.hasNext()) {
try {
Record record = reader.next();
w.append(record);
written.incrementAndGet();
} catch (final DatasetRecordException e) {
failures.add(e);
}
}
}
}
}
});
long errors = failures.count();
session.adjustCounter("Converted records", written.get(), false);
session.adjustCounter("Conversion errors", errors, false);
if (written.get() > 0L) {
session.transfer(outgoingAvro, SUCCESS);
if (errors > 0L) {
getLogger().warn("Failed to convert {}/{} records from JSON to Avro", new Object[] { errors, errors + written.get() });
badRecords = session.putAttribute(badRecords, "errors", failures.summary());
session.transfer(badRecords, INCOMPATIBLE);
} else {
session.remove(badRecords);
}
} else {
session.remove(outgoingAvro);
if (errors > 0L) {
getLogger().warn("Failed to convert {}/{} records from JSON to Avro", new Object[] { errors, errors });
badRecords = session.putAttribute(badRecords, "errors", failures.summary());
} else {
badRecords = session.putAttribute(badRecords, "errors", "No incoming records");
}
session.transfer(badRecords, FAILURE);
}
} catch (ProcessException | DatasetIOException e) {
getLogger().error("Failed reading or writing", e);
session.transfer(incomingJSON, FAILURE);
} catch (DatasetException e) {
getLogger().error("Failed to read FlowFile", e);
session.transfer(incomingJSON, FAILURE);
} finally {
try {
writer.close();
} catch (IOException e) {
getLogger().warn("Unable to close writer ressource", e);
}
}
}
use of org.apache.nifi.processor.io.StreamCallback in project nifi by apache.
the class ResizeImage method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final int width, height;
try {
width = context.getProperty(IMAGE_WIDTH).evaluateAttributeExpressions(flowFile).asInteger();
height = context.getProperty(IMAGE_HEIGHT).evaluateAttributeExpressions(flowFile).asInteger();
} catch (final NumberFormatException nfe) {
getLogger().error("Failed to resize {} due to {}", new Object[] { flowFile, nfe });
session.transfer(flowFile, REL_FAILURE);
return;
}
final String algorithm = context.getProperty(SCALING_ALGORITHM).getValue();
final int hints;
if (algorithm.equalsIgnoreCase(RESIZE_DEFAULT.getValue())) {
hints = Image.SCALE_DEFAULT;
} else if (algorithm.equalsIgnoreCase(RESIZE_FAST.getValue())) {
hints = Image.SCALE_FAST;
} else if (algorithm.equalsIgnoreCase(RESIZE_SMOOTH.getValue())) {
hints = Image.SCALE_SMOOTH;
} else if (algorithm.equalsIgnoreCase(RESIZE_REPLICATE.getValue())) {
hints = Image.SCALE_REPLICATE;
} else if (algorithm.equalsIgnoreCase(RESIZE_AREA_AVERAGING.getValue())) {
hints = Image.SCALE_AREA_AVERAGING;
} else {
throw new AssertionError("Invalid Scaling Algorithm: " + algorithm);
}
final StopWatch stopWatch = new StopWatch(true);
try {
flowFile = session.write(flowFile, new StreamCallback() {
@Override
public void process(final InputStream rawIn, final OutputStream out) throws IOException {
try (final BufferedInputStream in = new BufferedInputStream(rawIn)) {
final ImageInputStream iis = ImageIO.createImageInputStream(in);
if (iis == null) {
throw new ProcessException("FlowFile is not in a valid format");
}
final Iterator<ImageReader> readers = ImageIO.getImageReaders(iis);
if (!readers.hasNext()) {
throw new ProcessException("FlowFile is not in a valid format");
}
final ImageReader reader = readers.next();
final String formatName = reader.getFormatName();
reader.setInput(iis, true);
final BufferedImage image = reader.read(0);
final Image scaledImage = image.getScaledInstance(width, height, hints);
final BufferedImage scaledBufferedImg;
if (scaledImage instanceof BufferedImage) {
scaledBufferedImg = (BufferedImage) scaledImage;
} else {
scaledBufferedImg = new BufferedImage(scaledImage.getWidth(null), scaledImage.getHeight(null), image.getType());
final Graphics2D graphics = scaledBufferedImg.createGraphics();
try {
graphics.drawImage(scaledImage, 0, 0, null);
} finally {
graphics.dispose();
}
}
ImageIO.write(scaledBufferedImg, formatName, out);
}
}
});
session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
session.transfer(flowFile, REL_SUCCESS);
} catch (final ProcessException pe) {
getLogger().error("Failed to resize {} due to {}", new Object[] { flowFile, pe });
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.processor.io.StreamCallback in project nifi by apache.
the class OpenPGPKeyBasedEncryptorTest method testShouldEncryptAndDecrypt.
@Test
public void testShouldEncryptAndDecrypt() throws Exception {
// Arrange
final String PLAINTEXT = "This is a plaintext message.";
logger.info("Plaintext: {}", PLAINTEXT);
InputStream plainStream = new ByteArrayInputStream(PLAINTEXT.getBytes("UTF-8"));
OutputStream cipherStream = new ByteArrayOutputStream();
OutputStream recoveredStream = new ByteArrayOutputStream();
// No file, just streams
String filename = "tempFile.txt";
// Encryptor does not require password
OpenPGPKeyBasedEncryptor encryptor = new OpenPGPKeyBasedEncryptor(EncryptionMethod.PGP.getAlgorithm(), EncryptionMethod.PGP.getProvider(), PUBLIC_KEYRING_PATH, USER_ID, new char[0], filename);
StreamCallback encryptionCallback = encryptor.getEncryptionCallback();
OpenPGPKeyBasedEncryptor decryptor = new OpenPGPKeyBasedEncryptor(EncryptionMethod.PGP.getAlgorithm(), EncryptionMethod.PGP.getProvider(), SECRET_KEYRING_PATH, USER_ID, PASSWORD.toCharArray(), filename);
StreamCallback decryptionCallback = decryptor.getDecryptionCallback();
// Act
encryptionCallback.process(plainStream, cipherStream);
final byte[] cipherBytes = ((ByteArrayOutputStream) cipherStream).toByteArray();
logger.info("Encrypted: {}", Hex.encodeHexString(cipherBytes));
InputStream cipherInputStream = new ByteArrayInputStream(cipherBytes);
decryptionCallback.process(cipherInputStream, recoveredStream);
// Assert
byte[] recoveredBytes = ((ByteArrayOutputStream) recoveredStream).toByteArray();
String recovered = new String(recoveredBytes, "UTF-8");
logger.info("Recovered: {}", recovered);
assert PLAINTEXT.equals(recovered);
}
use of org.apache.nifi.processor.io.StreamCallback in project nifi by apache.
the class OpenPGPKeyBasedEncryptorTest method testShouldDecryptExternalFile.
@Test
public void testShouldDecryptExternalFile() throws Exception {
// Arrange
byte[] plainBytes = Files.readAllBytes(Paths.get(plainFile.getPath()));
final String PLAINTEXT = new String(plainBytes, "UTF-8");
InputStream cipherStream = new FileInputStream(unsignedFile);
OutputStream recoveredStream = new ByteArrayOutputStream();
// No file, just streams
String filename = unsignedFile.getName();
OpenPGPKeyBasedEncryptor encryptor = new OpenPGPKeyBasedEncryptor(EncryptionMethod.PGP.getAlgorithm(), EncryptionMethod.PGP.getProvider(), SECRET_KEYRING_PATH, USER_ID, PASSWORD.toCharArray(), filename);
StreamCallback decryptionCallback = encryptor.getDecryptionCallback();
// Act
decryptionCallback.process(cipherStream, recoveredStream);
// Assert
byte[] recoveredBytes = ((ByteArrayOutputStream) recoveredStream).toByteArray();
String recovered = new String(recoveredBytes, "UTF-8");
logger.info("Recovered: {}", recovered);
Assert.assertEquals("Recovered text", PLAINTEXT, recovered);
}
Aggregations