Search in sources :

Example 11 with InputStreamCallback

use of org.apache.nifi.processor.io.InputStreamCallback in project nifi by apache.

the class PublishKafkaRecord_1_0 method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(1, DataUnit.MB, 500));
    if (flowFiles.isEmpty()) {
        return;
    }
    final PublisherPool pool = getPublisherPool(context);
    if (pool == null) {
        context.yield();
        return;
    }
    final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue();
    final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue();
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
    final long startTime = System.nanoTime();
    try (final PublisherLease lease = pool.obtainPublisher()) {
        if (useTransactions) {
            lease.beginTransaction();
        }
        // Send each FlowFile to Kafka asynchronously.
        final Iterator<FlowFile> itr = flowFiles.iterator();
        while (itr.hasNext()) {
            final FlowFile flowFile = itr.next();
            if (!isScheduled()) {
                // If stopped, re-queue FlowFile instead of sending it
                if (useTransactions) {
                    session.rollback();
                    lease.rollback();
                    return;
                }
                session.transfer(flowFile);
                itr.remove();
                continue;
            }
            final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(flowFile).getValue();
            final String messageKeyField = context.getProperty(MESSAGE_KEY_FIELD).evaluateAttributeExpressions(flowFile).getValue();
            try {
                session.read(flowFile, new InputStreamCallback() {

                    @Override
                    public void process(final InputStream rawIn) throws IOException {
                        try (final InputStream in = new BufferedInputStream(rawIn)) {
                            final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger());
                            final RecordSet recordSet = reader.createRecordSet();
                            final RecordSchema schema = writerFactory.getSchema(flowFile.getAttributes(), recordSet.getSchema());
                            lease.publish(flowFile, recordSet, writerFactory, schema, messageKeyField, topic);
                        } catch (final SchemaNotFoundException | MalformedRecordException e) {
                            throw new ProcessException(e);
                        }
                    }
                });
            } catch (final Exception e) {
                // The FlowFile will be obtained and the error logged below, when calling publishResult.getFailedFlowFiles()
                lease.fail(flowFile, e);
                continue;
            }
        }
        // Complete the send
        final PublishResult publishResult = lease.complete();
        if (publishResult.isFailure()) {
            getLogger().info("Failed to send FlowFile to kafka; transferring to failure");
            session.transfer(flowFiles, REL_FAILURE);
            return;
        }
        // Transfer any successful FlowFiles.
        final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
        for (FlowFile success : flowFiles) {
            final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(success).getValue();
            final int msgCount = publishResult.getSuccessfulMessageCount(success);
            success = session.putAttribute(success, MSG_COUNT, String.valueOf(msgCount));
            session.adjustCounter("Messages Sent", msgCount, true);
            final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, topic);
            session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis);
            session.transfer(success, REL_SUCCESS);
        }
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) RecordReader(org.apache.nifi.serialization.RecordReader) IOException(java.io.IOException) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) ProcessException(org.apache.nifi.processor.exception.ProcessException) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) BufferedInputStream(java.io.BufferedInputStream) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) RecordSet(org.apache.nifi.serialization.record.RecordSet) RecordSchema(org.apache.nifi.serialization.record.RecordSchema)

Example 12 with InputStreamCallback

use of org.apache.nifi.processor.io.InputStreamCallback in project nifi by apache.

the class PublishKafka_1_0 method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final boolean useDemarcator = context.getProperty(MESSAGE_DEMARCATOR).isSet();
    final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(250, DataUnit.KB, 500));
    if (flowFiles.isEmpty()) {
        return;
    }
    final PublisherPool pool = getPublisherPool(context);
    if (pool == null) {
        context.yield();
        return;
    }
    final String securityProtocol = context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL).getValue();
    final String bootstrapServers = context.getProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue();
    final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
    final long startTime = System.nanoTime();
    try (final PublisherLease lease = pool.obtainPublisher()) {
        if (useTransactions) {
            lease.beginTransaction();
        }
        // Send each FlowFile to Kafka asynchronously.
        for (final FlowFile flowFile : flowFiles) {
            if (!isScheduled()) {
                // If stopped, re-queue FlowFile instead of sending it
                if (useTransactions) {
                    session.rollback();
                    lease.rollback();
                    return;
                }
                session.transfer(flowFile);
                continue;
            }
            final byte[] messageKey = getMessageKey(flowFile, context);
            final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(flowFile).getValue();
            final byte[] demarcatorBytes;
            if (useDemarcator) {
                demarcatorBytes = context.getProperty(MESSAGE_DEMARCATOR).evaluateAttributeExpressions(flowFile).getValue().getBytes(StandardCharsets.UTF_8);
            } else {
                demarcatorBytes = null;
            }
            session.read(flowFile, new InputStreamCallback() {

                @Override
                public void process(final InputStream rawIn) throws IOException {
                    try (final InputStream in = new BufferedInputStream(rawIn)) {
                        lease.publish(flowFile, in, messageKey, demarcatorBytes, topic);
                    }
                }
            });
        }
        // Complete the send
        final PublishResult publishResult = lease.complete();
        if (publishResult.isFailure()) {
            getLogger().info("Failed to send FlowFile to kafka; transferring to failure");
            session.transfer(flowFiles, REL_FAILURE);
            return;
        }
        // Transfer any successful FlowFiles.
        final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
        for (FlowFile success : flowFiles) {
            final String topic = context.getProperty(TOPIC).evaluateAttributeExpressions(success).getValue();
            final int msgCount = publishResult.getSuccessfulMessageCount(success);
            success = session.putAttribute(success, MSG_COUNT, String.valueOf(msgCount));
            session.adjustCounter("Messages Sent", msgCount, true);
            final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, topic);
            session.getProvenanceReporter().send(success, transitUri, "Sent " + msgCount + " messages", transmissionMillis);
            session.transfer(success, REL_SUCCESS);
        }
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) IOException(java.io.IOException) BufferedInputStream(java.io.BufferedInputStream) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback)

Example 13 with InputStreamCallback

use of org.apache.nifi.processor.io.InputStreamCallback in project nifi by apache.

the class ConvertExcelToCSVProcessor method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final String desiredSheetsDelimited = context.getProperty(DESIRED_SHEETS).evaluateAttributeExpressions().getValue();
    final boolean formatValues = context.getProperty(FORMAT_VALUES).asBoolean();
    final CSVFormat csvFormat = CSVUtils.createCSVFormat(context);
    // Switch to 0 based index
    final int firstRow = context.getProperty(ROWS_TO_SKIP).asInteger() - 1;
    final String[] sColumnsToSkip = StringUtils.split(context.getProperty(COLUMNS_TO_SKIP).getValue(), ",");
    final List<Integer> columnsToSkip = new ArrayList<>();
    if (sColumnsToSkip != null && sColumnsToSkip.length > 0) {
        for (String c : sColumnsToSkip) {
            try {
                // Switch to 0 based index
                columnsToSkip.add(Integer.parseInt(c) - 1);
            } catch (NumberFormatException e) {
                throw new ProcessException("Invalid column in Columns to Skip list.", e);
            }
        }
    }
    try {
        session.read(flowFile, new InputStreamCallback() {

            @Override
            public void process(InputStream inputStream) throws IOException {
                try {
                    OPCPackage pkg = OPCPackage.open(inputStream);
                    XSSFReader r = new XSSFReader(pkg);
                    ReadOnlySharedStringsTable sst = new ReadOnlySharedStringsTable(pkg);
                    StylesTable styles = r.getStylesTable();
                    XSSFReader.SheetIterator iter = (XSSFReader.SheetIterator) r.getSheetsData();
                    if (desiredSheetsDelimited != null) {
                        String[] desiredSheets = StringUtils.split(desiredSheetsDelimited, DESIRED_SHEETS_DELIMITER);
                        if (desiredSheets != null) {
                            while (iter.hasNext()) {
                                InputStream sheet = iter.next();
                                String sheetName = iter.getSheetName();
                                for (int i = 0; i < desiredSheets.length; i++) {
                                    // If the sheetName is a desired one parse it
                                    if (sheetName.equalsIgnoreCase(desiredSheets[i])) {
                                        ExcelSheetReadConfig readConfig = new ExcelSheetReadConfig(columnsToSkip, firstRow, sheetName, formatValues, sst, styles);
                                        handleExcelSheet(session, flowFile, sheet, readConfig, csvFormat);
                                        break;
                                    }
                                }
                            }
                        } else {
                            getLogger().debug("Excel document was parsed but no sheets with the specified desired names were found.");
                        }
                    } else {
                        // Get all of the sheets in the document.
                        while (iter.hasNext()) {
                            InputStream sheet = iter.next();
                            String sheetName = iter.getSheetName();
                            ExcelSheetReadConfig readConfig = new ExcelSheetReadConfig(columnsToSkip, firstRow, sheetName, formatValues, sst, styles);
                            handleExcelSheet(session, flowFile, sheet, readConfig, csvFormat);
                        }
                    }
                } catch (InvalidFormatException ife) {
                    getLogger().error("Only .xlsx Excel 2007 OOXML files are supported", ife);
                    throw new UnsupportedOperationException("Only .xlsx Excel 2007 OOXML files are supported", ife);
                } catch (OpenXML4JException | SAXException e) {
                    getLogger().error("Error occurred while processing Excel document metadata", e);
                }
            }
        });
        session.transfer(flowFile, ORIGINAL);
    } catch (RuntimeException ex) {
        getLogger().error("Failed to process incoming Excel document. " + ex.getMessage(), ex);
        FlowFile failedFlowFile = session.putAttribute(flowFile, ConvertExcelToCSVProcessor.class.getName() + ".error", ex.getMessage());
        session.transfer(failedFlowFile, FAILURE);
    }
}
Also used : ReadOnlySharedStringsTable(org.apache.poi.xssf.eventusermodel.ReadOnlySharedStringsTable) ArrayList(java.util.ArrayList) StylesTable(org.apache.poi.xssf.model.StylesTable) InvalidFormatException(org.apache.poi.openxml4j.exceptions.InvalidFormatException) FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) IOException(java.io.IOException) ProcessException(org.apache.nifi.processor.exception.ProcessException) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) CSVFormat(org.apache.commons.csv.CSVFormat) OPCPackage(org.apache.poi.openxml4j.opc.OPCPackage) XSSFReader(org.apache.poi.xssf.eventusermodel.XSSFReader)

Example 14 with InputStreamCallback

use of org.apache.nifi.processor.io.InputStreamCallback in project nifi by apache.

the class PutFileTransfer method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final ComponentLog logger = getLogger();
    final String hostname = context.getProperty(FileTransfer.HOSTNAME).evaluateAttributeExpressions(flowFile).getValue();
    final int maxNumberOfFiles = context.getProperty(FileTransfer.BATCH_SIZE).asInteger();
    int fileCount = 0;
    try (final T transfer = getFileTransfer(context)) {
        do {
            final String rootPath = context.getProperty(FileTransfer.REMOTE_PATH).evaluateAttributeExpressions(flowFile).getValue();
            final String workingDirPath;
            if (rootPath == null) {
                workingDirPath = null;
            } else {
                File workingDirectory = new File(rootPath);
                if (!workingDirectory.getPath().startsWith("/") && !workingDirectory.getPath().startsWith("\\")) {
                    workingDirectory = new File(transfer.getHomeDirectory(flowFile), workingDirectory.getPath());
                }
                workingDirPath = workingDirectory.getPath().replace("\\", "/");
            }
            final boolean rejectZeroByteFiles = context.getProperty(FileTransfer.REJECT_ZERO_BYTE).asBoolean();
            final ConflictResult conflictResult = identifyAndResolveConflictFile(context.getProperty(FileTransfer.CONFLICT_RESOLUTION).getValue(), transfer, workingDirPath, flowFile, rejectZeroByteFiles, logger);
            if (conflictResult.isTransfer()) {
                final StopWatch stopWatch = new StopWatch();
                stopWatch.start();
                beforePut(flowFile, context, transfer);
                final FlowFile flowFileToTransfer = flowFile;
                final AtomicReference<String> fullPathRef = new AtomicReference<>(null);
                session.read(flowFile, new InputStreamCallback() {

                    @Override
                    public void process(final InputStream in) throws IOException {
                        try (final InputStream bufferedIn = new BufferedInputStream(in)) {
                            if (workingDirPath != null && context.getProperty(SFTPTransfer.CREATE_DIRECTORY).asBoolean()) {
                                transfer.ensureDirectoryExists(flowFileToTransfer, new File(workingDirPath));
                            }
                            fullPathRef.set(transfer.put(flowFileToTransfer, workingDirPath, conflictResult.getFileName(), bufferedIn));
                        }
                    }
                });
                afterPut(flowFile, context, transfer);
                stopWatch.stop();
                final String dataRate = stopWatch.calculateDataRate(flowFile.getSize());
                final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS);
                logger.info("Successfully transferred {} to {} on remote host {} in {} milliseconds at a rate of {}", new Object[] { flowFile, fullPathRef.get(), hostname, millis, dataRate });
                String fullPathWithSlash = fullPathRef.get();
                if (!fullPathWithSlash.startsWith("/")) {
                    fullPathWithSlash = "/" + fullPathWithSlash;
                }
                final String destinationUri = transfer.getProtocolName() + "://" + hostname + fullPathWithSlash;
                session.getProvenanceReporter().send(flowFile, destinationUri, millis);
            }
            if (conflictResult.isPenalize()) {
                flowFile = session.penalize(flowFile);
            }
            session.transfer(flowFile, conflictResult.getRelationship());
            session.commit();
        } while (isScheduled() && (getRelationships().size() == context.getAvailableRelationships().size()) && (++fileCount < maxNumberOfFiles) && ((flowFile = session.get()) != null));
    } catch (final IOException e) {
        context.yield();
        logger.error("Unable to transfer {} to remote host {} due to {}", new Object[] { flowFile, hostname, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    } catch (final FlowFileAccessException e) {
        context.yield();
        logger.error("Unable to transfer {} to remote host {} due to {}", new Object[] { flowFile, hostname, e.getCause() });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    } catch (final ProcessException e) {
        context.yield();
        logger.error("Unable to transfer {} to remote host {} due to {}: {}; routing to failure", new Object[] { flowFile, hostname, e, e.getCause() });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) BufferedInputStream(org.apache.nifi.stream.io.BufferedInputStream) InputStream(java.io.InputStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) StopWatch(org.apache.nifi.util.StopWatch) ProcessException(org.apache.nifi.processor.exception.ProcessException) BufferedInputStream(org.apache.nifi.stream.io.BufferedInputStream) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) FlowFile(org.apache.nifi.flowfile.FlowFile) File(java.io.File)

Example 15 with InputStreamCallback

use of org.apache.nifi.processor.io.InputStreamCallback in project nifi by apache.

the class PutJMS method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).asInteger().intValue());
    if (flowFiles.isEmpty()) {
        return;
    }
    WrappedMessageProducer wrappedProducer = producerQueue.poll();
    if (wrappedProducer == null) {
        try {
            wrappedProducer = JmsFactory.createMessageProducer(context, true);
            logger.info("Connected to JMS server {}", new Object[] { context.getProperty(URL).getValue() });
        } catch (final JMSException e) {
            logger.error("Failed to connect to JMS Server due to {}", new Object[] { e });
            session.transfer(flowFiles, REL_FAILURE);
            context.yield();
            return;
        }
    }
    final Session jmsSession = wrappedProducer.getSession();
    final MessageProducer producer = wrappedProducer.getProducer();
    final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
    try {
        final Set<FlowFile> successfulFlowFiles = new HashSet<>();
        for (FlowFile flowFile : flowFiles) {
            if (flowFile.getSize() > maxBufferSize) {
                session.transfer(flowFile, REL_FAILURE);
                logger.warn("Routing {} to failure because its size exceeds the configured max", new Object[] { flowFile });
                continue;
            }
            // Read the contents of the FlowFile into a byte array
            final byte[] messageContent = new byte[(int) flowFile.getSize()];
            session.read(flowFile, new InputStreamCallback() {

                @Override
                public void process(final InputStream in) throws IOException {
                    StreamUtils.fillBuffer(in, messageContent, true);
                }
            });
            final Long ttl = context.getProperty(MESSAGE_TTL).asTimePeriod(TimeUnit.MILLISECONDS);
            final String replyToQueueName = context.getProperty(REPLY_TO_QUEUE).evaluateAttributeExpressions(flowFile).getValue();
            final Destination replyToQueue = replyToQueueName == null ? null : JmsFactory.createQueue(context, replyToQueueName);
            int priority = DEFAULT_MESSAGE_PRIORITY;
            try {
                final Integer priorityInt = context.getProperty(MESSAGE_PRIORITY).evaluateAttributeExpressions(flowFile).asInteger();
                priority = priorityInt == null ? priority : priorityInt;
            } catch (final NumberFormatException e) {
                logger.warn("Invalid value for JMS Message Priority: {}; defaulting to priority of {}", new Object[] { context.getProperty(MESSAGE_PRIORITY).evaluateAttributeExpressions(flowFile).getValue(), DEFAULT_MESSAGE_PRIORITY });
            }
            try {
                final Message message = createMessage(jmsSession, context, messageContent, flowFile, replyToQueue, priority);
                if (ttl == null) {
                    producer.setTimeToLive(0L);
                } else {
                    producer.setTimeToLive(ttl);
                }
                producer.send(message);
            } catch (final JMSException e) {
                logger.error("Failed to send {} to JMS Server due to {}", new Object[] { flowFile, e });
                session.transfer(flowFiles, REL_FAILURE);
                context.yield();
                try {
                    jmsSession.rollback();
                } catch (final JMSException jmse) {
                    logger.warn("Unable to roll back JMS Session due to {}", new Object[] { jmse });
                }
                wrappedProducer.close(logger);
                return;
            }
            successfulFlowFiles.add(flowFile);
            session.getProvenanceReporter().send(flowFile, context.getProperty(URL).getValue());
        }
        try {
            jmsSession.commit();
            session.transfer(successfulFlowFiles, REL_SUCCESS);
            final String flowFileDescription = successfulFlowFiles.size() > 10 ? successfulFlowFiles.size() + " FlowFiles" : successfulFlowFiles.toString();
            logger.info("Sent {} to JMS Server and transferred to 'success'", new Object[] { flowFileDescription });
        } catch (JMSException e) {
            logger.error("Failed to commit JMS Session due to {} and transferred to 'failure'", new Object[] { e });
            session.transfer(flowFiles, REL_FAILURE);
            context.yield();
            wrappedProducer.close(logger);
        }
    } finally {
        if (!wrappedProducer.isClosed()) {
            producerQueue.offer(wrappedProducer);
        }
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) Destination(javax.jms.Destination) StreamMessage(javax.jms.StreamMessage) Message(javax.jms.Message) BytesMessage(javax.jms.BytesMessage) InputStream(java.io.InputStream) JMSException(javax.jms.JMSException) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) WrappedMessageProducer(org.apache.nifi.processors.standard.util.WrappedMessageProducer) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) MessageProducer(javax.jms.MessageProducer) WrappedMessageProducer(org.apache.nifi.processors.standard.util.WrappedMessageProducer) Session(javax.jms.Session) ProcessSession(org.apache.nifi.processor.ProcessSession) HashSet(java.util.HashSet)

Aggregations

IOException (java.io.IOException)80 InputStream (java.io.InputStream)80 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)80 FlowFile (org.apache.nifi.flowfile.FlowFile)62 ProcessException (org.apache.nifi.processor.exception.ProcessException)35 ComponentLog (org.apache.nifi.logging.ComponentLog)27 HashMap (java.util.HashMap)25 AtomicReference (java.util.concurrent.atomic.AtomicReference)23 OutputStream (java.io.OutputStream)19 BufferedInputStream (java.io.BufferedInputStream)18 ArrayList (java.util.ArrayList)17 Map (java.util.Map)17 OutputStreamCallback (org.apache.nifi.processor.io.OutputStreamCallback)13 ByteArrayOutputStream (java.io.ByteArrayOutputStream)11 BufferedInputStream (org.apache.nifi.stream.io.BufferedInputStream)10 StopWatch (org.apache.nifi.util.StopWatch)10 HashSet (java.util.HashSet)9 Charset (java.nio.charset.Charset)8 FileInputStream (java.io.FileInputStream)7 ProcessSession (org.apache.nifi.processor.ProcessSession)7