Search in sources :

Example 1 with FlowFileHandlingException

use of org.apache.nifi.processor.exception.FlowFileHandlingException in project nifi by apache.

the class ExtractEmailAttachments method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    final ComponentLog logger = getLogger();
    final FlowFile originalFlowFile = session.get();
    if (originalFlowFile == null) {
        return;
    }
    final List<FlowFile> attachmentsList = new ArrayList<>();
    final List<FlowFile> invalidFlowFilesList = new ArrayList<>();
    final List<FlowFile> originalFlowFilesList = new ArrayList<>();
    final String requireStrictAddresses = "false";
    session.read(originalFlowFile, new InputStreamCallback() {

        @Override
        public void process(final InputStream rawIn) throws IOException {
            try (final InputStream in = new BufferedInputStream(rawIn)) {
                Properties props = new Properties();
                props.put("mail.mime.address.strict", requireStrictAddresses);
                Session mailSession = Session.getInstance(props);
                MimeMessage originalMessage = new MimeMessage(mailSession, in);
                MimeMessageParser parser = new MimeMessageParser(originalMessage).parse();
                // RFC-2822 determines that a message must have a "From:" header
                // if a message lacks the field, it is flagged as invalid
                Address[] from = originalMessage.getFrom();
                if (from == null) {
                    throw new MessagingException("Message failed RFC-2822 validation: No Sender");
                }
                Date sentDate = originalMessage.getSentDate();
                if (sentDate == null) {
                    // Throws MessageException due to lack of minimum required headers
                    throw new MessagingException("Message failed RFC2822 validation: No Sent Date");
                }
                originalFlowFilesList.add(originalFlowFile);
                if (parser.hasAttachments()) {
                    final String originalFlowFileName = originalFlowFile.getAttribute(CoreAttributes.FILENAME.key());
                    try {
                        for (final DataSource data : parser.getAttachmentList()) {
                            FlowFile split = session.create(originalFlowFile);
                            final Map<String, String> attributes = new HashMap<>();
                            if (StringUtils.isNotBlank(data.getName())) {
                                attributes.put(CoreAttributes.FILENAME.key(), data.getName());
                            }
                            if (StringUtils.isNotBlank(data.getContentType())) {
                                attributes.put(CoreAttributes.MIME_TYPE.key(), data.getContentType());
                            }
                            String parentUuid = originalFlowFile.getAttribute(CoreAttributes.UUID.key());
                            attributes.put(ATTACHMENT_ORIGINAL_UUID, parentUuid);
                            attributes.put(ATTACHMENT_ORIGINAL_FILENAME, originalFlowFileName);
                            split = session.append(split, new OutputStreamCallback() {

                                @Override
                                public void process(OutputStream out) throws IOException {
                                    IOUtils.copy(data.getInputStream(), out);
                                }
                            });
                            split = session.putAllAttributes(split, attributes);
                            attachmentsList.add(split);
                        }
                    } catch (FlowFileHandlingException e) {
                        // Something went wrong
                        // Removing splits that may have been created
                        session.remove(attachmentsList);
                        // Removing the original flow from its list
                        originalFlowFilesList.remove(originalFlowFile);
                        logger.error("Flowfile {} triggered error {} while processing message removing generated FlowFiles from sessions", new Object[] { originalFlowFile, e });
                        invalidFlowFilesList.add(originalFlowFile);
                    }
                }
            } catch (Exception e) {
                // Another error hit...
                // Removing the original flow from its list
                originalFlowFilesList.remove(originalFlowFile);
                logger.error("Could not parse the flowfile {} as an email, treating as failure", new Object[] { originalFlowFile, e });
                // Message is invalid or triggered an error during parsing
                invalidFlowFilesList.add(originalFlowFile);
            }
        }
    });
    session.transfer(attachmentsList, REL_ATTACHMENTS);
    // As per above code, originalFlowfile may be routed to invalid or
    // original depending on RFC2822 compliance.
    session.transfer(invalidFlowFilesList, REL_FAILURE);
    session.transfer(originalFlowFilesList, REL_ORIGINAL);
    if (attachmentsList.size() > 10) {
        logger.info("Split {} into {} files", new Object[] { originalFlowFile, attachmentsList.size() });
    } else if (attachmentsList.size() > 1) {
        logger.info("Split {} into {} files: {}", new Object[] { originalFlowFile, attachmentsList.size(), attachmentsList });
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) MessagingException(javax.mail.MessagingException) BufferedInputStream(org.apache.nifi.stream.io.BufferedInputStream) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Properties(java.util.Properties) MimeMessageParser(org.apache.commons.mail.util.MimeMessageParser) ComponentLog(org.apache.nifi.logging.ComponentLog) Date(java.util.Date) MessagingException(javax.mail.MessagingException) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) IOException(java.io.IOException) DataSource(javax.activation.DataSource) BufferedInputStream(org.apache.nifi.stream.io.BufferedInputStream) MimeMessage(javax.mail.internet.MimeMessage) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) HashMap(java.util.HashMap) Map(java.util.Map) ProcessSession(org.apache.nifi.processor.ProcessSession) Session(javax.mail.Session)

Example 2 with FlowFileHandlingException

use of org.apache.nifi.processor.exception.FlowFileHandlingException in project nifi by apache.

the class TestMockProcessSession method testReadWithoutCloseThrowsExceptionOnCommit.

@Test
public void testReadWithoutCloseThrowsExceptionOnCommit() throws IOException {
    final Processor processor = new PoorlyBehavedProcessor();
    final MockProcessSession session = new MockProcessSession(new SharedSessionState(processor, new AtomicLong(0L)), processor);
    FlowFile flowFile = session.createFlowFile("hello, world".getBytes());
    final InputStream in = session.read(flowFile);
    final byte[] buffer = new byte[12];
    StreamUtils.fillBuffer(in, buffer);
    assertEquals("hello, world", new String(buffer));
    session.remove(flowFile);
    try {
        session.commit();
        Assert.fail("Was able to commit session without closing InputStream");
    } catch (final FlowFileHandlingException ffhe) {
        System.out.println(ffhe.toString());
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) AtomicLong(java.util.concurrent.atomic.AtomicLong) Processor(org.apache.nifi.processor.Processor) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) InputStream(java.io.InputStream) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) Test(org.junit.Test)

Example 3 with FlowFileHandlingException

use of org.apache.nifi.processor.exception.FlowFileHandlingException in project nifi by apache.

the class MockProcessSession method migrate.

private void migrate(final MockProcessSession newOwner, final Collection<MockFlowFile> flowFiles) {
    for (final FlowFile flowFile : flowFiles) {
        if (openInputStreams.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " + "has an open InputStream for the FlowFile, created by calling ProcessSession.read(FlowFile)");
        }
        if (openOutputStreams.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " + "has an open OutputStream for the FlowFile, created by calling ProcessSession.write(FlowFile)");
        }
        if (readRecursionSet.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " already in use for an active callback or InputStream created by ProcessSession.read(FlowFile) has not been closed");
        }
        if (writeRecursionSet.contains(flowFile)) {
            throw new IllegalStateException(flowFile + " already in use for an active callback or OutputStream created by ProcessSession.write(FlowFile) has not been closed");
        }
        final FlowFile currentVersion = currentVersions.get(flowFile.getId());
        if (currentVersion == null) {
            throw new FlowFileHandlingException(flowFile + " is not known in this session");
        }
    }
    for (final Map.Entry<Relationship, List<MockFlowFile>> entry : transferMap.entrySet()) {
        final Relationship relationship = entry.getKey();
        final List<MockFlowFile> transferredFlowFiles = entry.getValue();
        for (final MockFlowFile flowFile : flowFiles) {
            if (transferredFlowFiles.remove(flowFile)) {
                newOwner.transferMap.computeIfAbsent(relationship, rel -> new ArrayList<>()).add(flowFile);
            }
        }
    }
    for (final MockFlowFile flowFile : flowFiles) {
        if (beingProcessed.remove(flowFile.getId())) {
            newOwner.beingProcessed.add(flowFile.getId());
        }
        if (penalized.remove(flowFile)) {
            newOwner.penalized.add(flowFile);
        }
        if (currentVersions.containsKey(flowFile.getId())) {
            newOwner.currentVersions.put(flowFile.getId(), currentVersions.remove(flowFile.getId()));
        }
        if (originalVersions.containsKey(flowFile.getId())) {
            newOwner.originalVersions.put(flowFile.getId(), originalVersions.remove(flowFile.getId()));
        }
        if (removedFlowFiles.remove(flowFile.getId())) {
            newOwner.removedFlowFiles.add(flowFile.getId());
        }
    }
    final Set<String> flowFileIds = flowFiles.stream().map(ff -> ff.getAttribute(CoreAttributes.UUID.key())).collect(Collectors.toSet());
    provenanceReporter.migrate(newOwner.provenanceReporter, flowFileIds);
}
Also used : OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) Arrays(java.util.Arrays) FlowFileFilter(org.apache.nifi.processor.FlowFileFilter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) HashMap(java.util.HashMap) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) QueueSize(org.apache.nifi.controller.queue.QueueSize) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ProvenanceReporter(org.apache.nifi.provenance.ProvenanceReporter) ByteArrayInputStream(java.io.ByteArrayInputStream) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) Path(java.nio.file.Path) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) OutputStream(java.io.OutputStream) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) Iterator(java.util.Iterator) FlowFile(org.apache.nifi.flowfile.FlowFile) Files(java.nio.file.Files) OpenOption(java.nio.file.OpenOption) Collection(java.util.Collection) StandardOpenOption(java.nio.file.StandardOpenOption) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) Collectors(java.util.stream.Collectors) File(java.io.File) Objects(java.util.Objects) Processor(org.apache.nifi.processor.Processor) AtomicLong(java.util.concurrent.atomic.AtomicLong) List(java.util.List) Closeable(java.io.Closeable) Pattern(java.util.regex.Pattern) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) Assert(org.junit.Assert) Collections(java.util.Collections) StreamCallback(org.apache.nifi.processor.io.StreamCallback) InputStream(java.io.InputStream) FlowFile(org.apache.nifi.flowfile.FlowFile) ArrayList(java.util.ArrayList) Relationship(org.apache.nifi.processor.Relationship) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap)

Example 4 with FlowFileHandlingException

use of org.apache.nifi.processor.exception.FlowFileHandlingException in project nifi by apache.

the class StandardProcessSession method enrich.

@Override
public StandardProvenanceEventRecord enrich(final ProvenanceEventRecord rawEvent, final FlowFile flowFile) {
    verifyTaskActive();
    final StandardRepositoryRecord repoRecord = records.get(flowFile);
    if (repoRecord == null) {
        throw new FlowFileHandlingException(flowFile + " is not known in this session (" + toString() + ")");
    }
    final StandardProvenanceEventRecord.Builder recordBuilder = new StandardProvenanceEventRecord.Builder().fromEvent(rawEvent);
    if (repoRecord.getCurrent() != null && repoRecord.getCurrentClaim() != null) {
        final ContentClaim currentClaim = repoRecord.getCurrentClaim();
        final long currentOffset = repoRecord.getCurrentClaimOffset();
        final long size = flowFile.getSize();
        final ResourceClaim resourceClaim = currentClaim.getResourceClaim();
        recordBuilder.setCurrentContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), currentOffset + currentClaim.getOffset(), size);
    }
    if (repoRecord.getOriginal() != null && repoRecord.getOriginalClaim() != null) {
        final ContentClaim originalClaim = repoRecord.getOriginalClaim();
        final long originalOffset = repoRecord.getOriginal().getContentClaimOffset();
        final long originalSize = repoRecord.getOriginal().getSize();
        final ResourceClaim resourceClaim = originalClaim.getResourceClaim();
        recordBuilder.setPreviousContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), originalOffset + originalClaim.getOffset(), originalSize);
    }
    final FlowFileQueue originalQueue = repoRecord.getOriginalQueue();
    if (originalQueue != null) {
        recordBuilder.setSourceQueueIdentifier(originalQueue.getIdentifier());
    }
    recordBuilder.setAttributes(repoRecord.getOriginalAttributes(), repoRecord.getUpdatedAttributes());
    return recordBuilder.build();
}
Also used : StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) ResourceClaim(org.apache.nifi.controller.repository.claim.ResourceClaim) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue)

Example 5 with FlowFileHandlingException

use of org.apache.nifi.processor.exception.FlowFileHandlingException in project nifi by apache.

the class StandardProcessSession method migrate.

private void migrate(final StandardProcessSession newOwner, Collection<FlowFile> flowFiles) {
    // We don't call validateRecordState() here because we want to allow migration of FlowFiles that have already been marked as removed or transferred, etc.
    flowFiles = flowFiles.stream().map(this::getMostRecent).collect(Collectors.toList());
    for (final FlowFile flowFile : flowFiles) {
        if (openInputStreams.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " + "has an open InputStream for the FlowFile, created by calling ProcessSession.read(FlowFile)");
        }
        if (openOutputStreams.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " + "has an open OutputStream for the FlowFile, created by calling ProcessSession.write(FlowFile)");
        }
        if (readRecursionSet.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " already in use for an active callback or InputStream created by ProcessSession.read(FlowFile) has not been closed");
        }
        if (writeRecursionSet.contains(flowFile)) {
            throw new IllegalStateException(flowFile + " already in use for an active callback or OutputStream created by ProcessSession.write(FlowFile) has not been closed");
        }
        final StandardRepositoryRecord record = records.get(flowFile);
        if (record == null) {
            throw new FlowFileHandlingException(flowFile + " is not known in this session (" + toString() + ")");
        }
    }
    // If we have a FORK event for one of the given FlowFiles, then all children must also be migrated. Otherwise, we
    // could have a case where we have FlowFile A transferred and eventually exiting the flow and later the 'newOwner'
    // ProcessSession is committed, claiming to have created FlowFiles from the parent, which is no longer even in
    // the flow. This would be very confusing when looking at the provenance for the FlowFile, so it is best to avoid this.
    final Set<String> flowFileIds = flowFiles.stream().map(ff -> ff.getAttribute(CoreAttributes.UUID.key())).collect(Collectors.toSet());
    for (final Map.Entry<FlowFile, ProvenanceEventBuilder> entry : forkEventBuilders.entrySet()) {
        final FlowFile eventFlowFile = entry.getKey();
        if (flowFiles.contains(eventFlowFile)) {
            final ProvenanceEventBuilder eventBuilder = entry.getValue();
            for (final String childId : eventBuilder.getChildFlowFileIds()) {
                if (!flowFileIds.contains(childId)) {
                    throw new IllegalStateException("Cannot migrate " + eventFlowFile + " to a new session because it was forked to create " + eventBuilder.getChildFlowFileIds().size() + " children and not all children are being migrated. If any FlowFile is forked, all of its children must also be migrated at the same time as the forked FlowFile");
                }
            }
        }
    }
    // event builder for the new owner of the FlowFile and remove the child from our fork event builder.
    for (final Map.Entry<FlowFile, ProvenanceEventBuilder> entry : forkEventBuilders.entrySet()) {
        final FlowFile eventFlowFile = entry.getKey();
        final ProvenanceEventBuilder eventBuilder = entry.getValue();
        final Set<String> childrenIds = new HashSet<>(eventBuilder.getChildFlowFileIds());
        ProvenanceEventBuilder copy = null;
        for (final FlowFile flowFile : flowFiles) {
            final String flowFileId = flowFile.getAttribute(CoreAttributes.UUID.key());
            if (childrenIds.contains(flowFileId)) {
                eventBuilder.removeChildFlowFile(flowFile);
                if (copy == null) {
                    copy = eventBuilder.copy();
                    copy.getChildFlowFileIds().clear();
                }
                copy.addChildFlowFile(flowFileId);
            }
        }
        if (copy != null) {
            newOwner.forkEventBuilders.put(eventFlowFile, copy);
        }
    }
    newOwner.processingStartTime = Math.min(newOwner.processingStartTime, processingStartTime);
    for (final FlowFile flowFile : flowFiles) {
        final FlowFileRecord flowFileRecord = (FlowFileRecord) flowFile;
        final StandardRepositoryRecord repoRecord = this.records.remove(flowFile);
        newOwner.records.put(flowFileRecord, repoRecord);
        // Adjust the counts for Connections for each FlowFile that was pulled from a Connection.
        // We do not have to worry about accounting for 'input counts' on connections because those
        // are incremented only during a checkpoint, and anything that's been checkpointed has
        // also been committed above.
        final FlowFileQueue inputQueue = repoRecord.getOriginalQueue();
        if (inputQueue != null) {
            final String connectionId = inputQueue.getIdentifier();
            incrementConnectionOutputCounts(connectionId, -1, -repoRecord.getOriginal().getSize());
            newOwner.incrementConnectionOutputCounts(connectionId, 1, repoRecord.getOriginal().getSize());
            unacknowledgedFlowFiles.get(inputQueue).remove(flowFile);
            newOwner.unacknowledgedFlowFiles.computeIfAbsent(inputQueue, queue -> new HashSet<>()).add(flowFileRecord);
            flowFilesIn--;
            contentSizeIn -= flowFile.getSize();
            newOwner.flowFilesIn++;
            newOwner.contentSizeIn += flowFile.getSize();
        }
        final String flowFileId = flowFile.getAttribute(CoreAttributes.UUID.key());
        if (removedFlowFiles.remove(flowFileId)) {
            newOwner.removedFlowFiles.add(flowFileId);
            newOwner.removedCount++;
            newOwner.removedBytes += flowFile.getSize();
            removedCount--;
            removedBytes -= flowFile.getSize();
        }
        if (createdFlowFiles.remove(flowFileId)) {
            newOwner.createdFlowFiles.add(flowFileId);
        }
        if (repoRecord.getTransferRelationship() != null) {
            flowFilesOut--;
            contentSizeOut -= flowFile.getSize();
            newOwner.flowFilesOut++;
            newOwner.contentSizeOut += flowFile.getSize();
        }
        final List<ProvenanceEventRecord> events = generatedProvenanceEvents.remove(flowFile);
        if (events != null) {
            newOwner.generatedProvenanceEvents.put(flowFile, events);
        }
        final ContentClaim currentClaim = repoRecord.getCurrentClaim();
        if (currentClaim != null) {
            final ByteCountingOutputStream appendableStream = appendableStreams.remove(currentClaim);
            if (appendableStream != null) {
                newOwner.appendableStreams.put(currentClaim, appendableStream);
            }
        }
        final Path toDelete = deleteOnCommit.remove(flowFile);
        if (toDelete != null) {
            newOwner.deleteOnCommit.put(flowFile, toDelete);
        }
    }
    provenanceReporter.migrate(newOwner.provenanceReporter, flowFileIds);
}
Also used : OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) FlowFileFilter(org.apache.nifi.processor.FlowFileFilter) TerminatedTaskException(org.apache.nifi.processor.exception.TerminatedTaskException) LoggerFactory(org.slf4j.LoggerFactory) QueueSize(org.apache.nifi.controller.queue.QueueSize) ByteCountingOutputStream(org.apache.nifi.stream.io.ByteCountingOutputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) DisableOnCloseOutputStream(org.apache.nifi.controller.repository.io.DisableOnCloseOutputStream) TaskTerminationInputStream(org.apache.nifi.controller.repository.io.TaskTerminationInputStream) Map(java.util.Map) Connectable(org.apache.nifi.connectable.Connectable) Connection(org.apache.nifi.connectable.Connection) Path(java.nio.file.Path) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) ByteCountingInputStream(org.apache.nifi.stream.io.ByteCountingInputStream) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) FlowFile(org.apache.nifi.flowfile.FlowFile) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) UUID(java.util.UUID) EOFException(java.io.EOFException) Collectors(java.util.stream.Collectors) MissingFlowFileException(org.apache.nifi.processor.exception.MissingFlowFileException) StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) Objects(java.util.Objects) TaskTermination(org.apache.nifi.controller.lifecycle.TaskTermination) List(java.util.List) ProvenanceEventBuilder(org.apache.nifi.provenance.ProvenanceEventBuilder) Pattern(java.util.regex.Pattern) StandardFlowFileEvent(org.apache.nifi.controller.repository.metrics.StandardFlowFileEvent) StreamCallback(org.apache.nifi.processor.io.StreamCallback) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) ProcessorNode(org.apache.nifi.controller.ProcessorNode) FlowFileAccessInputStream(org.apache.nifi.controller.repository.io.FlowFileAccessInputStream) HashMap(java.util.HashMap) ProvenanceEventRepository(org.apache.nifi.provenance.ProvenanceEventRepository) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) ProcessException(org.apache.nifi.processor.exception.ProcessException) BufferedOutputStream(java.io.BufferedOutputStream) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ProvenanceReporter(org.apache.nifi.provenance.ProvenanceReporter) Relationship(org.apache.nifi.processor.Relationship) ResourceClaim(org.apache.nifi.controller.repository.claim.ResourceClaim) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) ContentClaimWriteCache(org.apache.nifi.controller.repository.claim.ContentClaimWriteCache) FlowFileAccessOutputStream(org.apache.nifi.controller.repository.io.FlowFileAccessOutputStream) NoSuchElementException(java.util.NoSuchElementException) LimitedInputStream(org.apache.nifi.controller.repository.io.LimitedInputStream) LinkedHashSet(java.util.LinkedHashSet) OutputStream(java.io.OutputStream) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) Files(java.nio.file.Files) ProvenanceEventType(org.apache.nifi.provenance.ProvenanceEventType) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) TaskTerminationOutputStream(org.apache.nifi.controller.repository.io.TaskTerminationOutputStream) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) StreamUtils(org.apache.nifi.stream.io.StreamUtils) Closeable(java.io.Closeable) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) Collections(java.util.Collections) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue) DisableOnCloseInputStream(org.apache.nifi.controller.repository.io.DisableOnCloseInputStream) InputStream(java.io.InputStream) Path(java.nio.file.Path) FlowFile(org.apache.nifi.flowfile.FlowFile) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue) ByteCountingOutputStream(org.apache.nifi.stream.io.ByteCountingOutputStream) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ProvenanceEventBuilder(org.apache.nifi.provenance.ProvenanceEventBuilder) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Aggregations

FlowFileHandlingException (org.apache.nifi.processor.exception.FlowFileHandlingException)9 IOException (java.io.IOException)6 HashMap (java.util.HashMap)6 FlowFile (org.apache.nifi.flowfile.FlowFile)6 InputStream (java.io.InputStream)5 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5 OutputStream (java.io.OutputStream)4 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)4 ContentClaim (org.apache.nifi.controller.repository.claim.ContentClaim)4 FlowFileAccessException (org.apache.nifi.processor.exception.FlowFileAccessException)4 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)4 OutputStreamCallback (org.apache.nifi.processor.io.OutputStreamCallback)4 Closeable (java.io.Closeable)3 AtomicLong (java.util.concurrent.atomic.AtomicLong)3 ProcessSession (org.apache.nifi.processor.ProcessSession)3 Relationship (org.apache.nifi.processor.Relationship)3 ProcessException (org.apache.nifi.processor.exception.ProcessException)3 StandardProvenanceEventRecord (org.apache.nifi.provenance.StandardProvenanceEventRecord)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2