Search in sources :

Example 16 with ContentClaim

use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.

the class StandardProcessSession method handleContentNotFound.

private void handleContentNotFound(final ContentNotFoundException nfe, final StandardRepositoryRecord suspectRecord) {
    final ContentClaim registeredClaim = suspectRecord.getOriginalClaim();
    final ContentClaim transientClaim = suspectRecord.getWorkingClaim();
    final ContentClaim missingClaim = nfe.getMissingClaim();
    final ProvenanceEventRecord dropEvent = provenanceReporter.drop(suspectRecord.getCurrent(), nfe.getMessage() == null ? "Content Not Found" : nfe.getMessage());
    if (dropEvent != null) {
        context.getProvenanceRepository().registerEvent(dropEvent);
    }
    if (missingClaim == registeredClaim) {
        suspectRecord.markForAbort();
        rollback();
        throw new MissingFlowFileException("Unable to find content for FlowFile", nfe);
    }
    if (missingClaim == transientClaim) {
        rollback();
        throw new MissingFlowFileException("Unable to find content for FlowFile", nfe);
    }
}
Also used : ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) MissingFlowFileException(org.apache.nifi.processor.exception.MissingFlowFileException)

Example 17 with ContentClaim

use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.

the class StandardProcessSession method write.

@Override
public OutputStream write(FlowFile source) {
    verifyTaskActive();
    source = validateRecordState(source);
    final StandardRepositoryRecord record = records.get(source);
    ContentClaim newClaim = null;
    try {
        newClaim = claimCache.getContentClaim();
        claimLog.debug("Creating ContentClaim {} for 'write' for {}", newClaim, source);
        ensureNotAppending(newClaim);
        final OutputStream rawStream = claimCache.write(newClaim);
        final OutputStream disableOnClose = new DisableOnCloseOutputStream(rawStream);
        final ByteCountingOutputStream countingOut = new ByteCountingOutputStream(disableOnClose);
        final FlowFile sourceFlowFile = source;
        final ContentClaim updatedClaim = newClaim;
        final OutputStream errorHandlingOutputStream = new OutputStream() {

            private boolean closed = false;

            @Override
            public void write(final int b) throws IOException {
                try {
                    countingOut.write(b);
                } catch (final IOException ioe) {
                    LOG.error("Failed to write content to " + sourceFlowFile + "; rolling back session", ioe);
                    rollback(true);
                    close();
                    throw new FlowFileAccessException("Failed to write to Content Repository for " + sourceFlowFile, ioe);
                }
            }

            @Override
            public void write(final byte[] b) throws IOException {
                try {
                    countingOut.write(b);
                } catch (final IOException ioe) {
                    LOG.error("Failed to write content to " + sourceFlowFile + "; rolling back session", ioe);
                    rollback(true);
                    close();
                    throw new FlowFileAccessException("Failed to write to Content Repository for " + sourceFlowFile, ioe);
                }
            }

            @Override
            public void write(final byte[] b, final int off, final int len) throws IOException {
                try {
                    countingOut.write(b, off, len);
                } catch (final IOException ioe) {
                    LOG.error("Failed to write content to " + sourceFlowFile + "; rolling back session", ioe);
                    rollback(true);
                    close();
                    throw new FlowFileAccessException("Failed to write to Content Repository for " + sourceFlowFile, ioe);
                }
            }

            @Override
            public void flush() throws IOException {
                try {
                    countingOut.flush();
                } catch (final IOException ioe) {
                    LOG.error("Failed to write content to " + sourceFlowFile + "; rolling back session", ioe);
                    rollback(true);
                    close();
                    throw new FlowFileAccessException("Failed to write to Content Repository for " + sourceFlowFile, ioe);
                }
            }

            @Override
            public void close() throws IOException {
                if (closed) {
                    return;
                }
                closed = true;
                writeRecursionSet.remove(sourceFlowFile);
                final long bytesWritten = countingOut.getBytesWritten();
                StandardProcessSession.this.bytesWritten += bytesWritten;
                final OutputStream removed = openOutputStreams.remove(sourceFlowFile);
                if (removed == null) {
                    LOG.error("Closed Session's OutputStream but there was no entry for it in the map; sourceFlowFile={}; map={}", sourceFlowFile, openOutputStreams);
                }
                flush();
                removeTemporaryClaim(record);
                final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).contentClaim(updatedClaim).contentClaimOffset(Math.max(0, updatedClaim.getLength() - bytesWritten)).size(bytesWritten).build();
                record.setWorking(newFile);
            }
        };
        writeRecursionSet.add(source);
        openOutputStreams.put(source, errorHandlingOutputStream);
        return createTaskTerminationStream(errorHandlingOutputStream);
    } catch (final ContentNotFoundException nfe) {
        // need to reset write claim before we can remove the claim
        resetWriteClaims();
        destroyContent(newClaim);
        handleContentNotFound(nfe, record);
        throw nfe;
    } catch (final FlowFileAccessException ffae) {
        // need to reset write claim before we can remove the claim
        resetWriteClaims();
        destroyContent(newClaim);
        throw ffae;
    } catch (final IOException ioe) {
        // need to reset write claim before we can remove the claim
        resetWriteClaims();
        destroyContent(newClaim);
        throw new ProcessException("IOException thrown from " + connectableDescription + ": " + ioe.toString(), ioe);
    } catch (final Throwable t) {
        // need to reset write claim before we can remove the claim
        resetWriteClaims();
        destroyContent(newClaim);
        throw t;
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) ByteCountingOutputStream(org.apache.nifi.stream.io.ByteCountingOutputStream) DisableOnCloseOutputStream(org.apache.nifi.controller.repository.io.DisableOnCloseOutputStream) BufferedOutputStream(java.io.BufferedOutputStream) FlowFileAccessOutputStream(org.apache.nifi.controller.repository.io.FlowFileAccessOutputStream) OutputStream(java.io.OutputStream) TaskTerminationOutputStream(org.apache.nifi.controller.repository.io.TaskTerminationOutputStream) DisableOnCloseOutputStream(org.apache.nifi.controller.repository.io.DisableOnCloseOutputStream) IOException(java.io.IOException) ByteCountingOutputStream(org.apache.nifi.stream.io.ByteCountingOutputStream) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) ProcessException(org.apache.nifi.processor.exception.ProcessException)

Example 18 with ContentClaim

use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.

the class StandardProcessSession method enrich.

@Override
public StandardProvenanceEventRecord enrich(final ProvenanceEventRecord rawEvent, final FlowFile flowFile) {
    verifyTaskActive();
    final StandardRepositoryRecord repoRecord = records.get(flowFile);
    if (repoRecord == null) {
        throw new FlowFileHandlingException(flowFile + " is not known in this session (" + toString() + ")");
    }
    final StandardProvenanceEventRecord.Builder recordBuilder = new StandardProvenanceEventRecord.Builder().fromEvent(rawEvent);
    if (repoRecord.getCurrent() != null && repoRecord.getCurrentClaim() != null) {
        final ContentClaim currentClaim = repoRecord.getCurrentClaim();
        final long currentOffset = repoRecord.getCurrentClaimOffset();
        final long size = flowFile.getSize();
        final ResourceClaim resourceClaim = currentClaim.getResourceClaim();
        recordBuilder.setCurrentContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), currentOffset + currentClaim.getOffset(), size);
    }
    if (repoRecord.getOriginal() != null && repoRecord.getOriginalClaim() != null) {
        final ContentClaim originalClaim = repoRecord.getOriginalClaim();
        final long originalOffset = repoRecord.getOriginal().getContentClaimOffset();
        final long originalSize = repoRecord.getOriginal().getSize();
        final ResourceClaim resourceClaim = originalClaim.getResourceClaim();
        recordBuilder.setPreviousContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), originalOffset + originalClaim.getOffset(), originalSize);
    }
    final FlowFileQueue originalQueue = repoRecord.getOriginalQueue();
    if (originalQueue != null) {
        recordBuilder.setSourceQueueIdentifier(originalQueue.getIdentifier());
    }
    recordBuilder.setAttributes(repoRecord.getOriginalAttributes(), repoRecord.getUpdatedAttributes());
    return recordBuilder.build();
}
Also used : StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) ResourceClaim(org.apache.nifi.controller.repository.claim.ResourceClaim) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue)

Example 19 with ContentClaim

use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.

the class StandardProcessSession method migrate.

private void migrate(final StandardProcessSession newOwner, Collection<FlowFile> flowFiles) {
    // We don't call validateRecordState() here because we want to allow migration of FlowFiles that have already been marked as removed or transferred, etc.
    flowFiles = flowFiles.stream().map(this::getMostRecent).collect(Collectors.toList());
    for (final FlowFile flowFile : flowFiles) {
        if (openInputStreams.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " + "has an open InputStream for the FlowFile, created by calling ProcessSession.read(FlowFile)");
        }
        if (openOutputStreams.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " + "has an open OutputStream for the FlowFile, created by calling ProcessSession.write(FlowFile)");
        }
        if (readRecursionSet.containsKey(flowFile)) {
            throw new IllegalStateException(flowFile + " already in use for an active callback or InputStream created by ProcessSession.read(FlowFile) has not been closed");
        }
        if (writeRecursionSet.contains(flowFile)) {
            throw new IllegalStateException(flowFile + " already in use for an active callback or OutputStream created by ProcessSession.write(FlowFile) has not been closed");
        }
        final StandardRepositoryRecord record = records.get(flowFile);
        if (record == null) {
            throw new FlowFileHandlingException(flowFile + " is not known in this session (" + toString() + ")");
        }
    }
    // If we have a FORK event for one of the given FlowFiles, then all children must also be migrated. Otherwise, we
    // could have a case where we have FlowFile A transferred and eventually exiting the flow and later the 'newOwner'
    // ProcessSession is committed, claiming to have created FlowFiles from the parent, which is no longer even in
    // the flow. This would be very confusing when looking at the provenance for the FlowFile, so it is best to avoid this.
    final Set<String> flowFileIds = flowFiles.stream().map(ff -> ff.getAttribute(CoreAttributes.UUID.key())).collect(Collectors.toSet());
    for (final Map.Entry<FlowFile, ProvenanceEventBuilder> entry : forkEventBuilders.entrySet()) {
        final FlowFile eventFlowFile = entry.getKey();
        if (flowFiles.contains(eventFlowFile)) {
            final ProvenanceEventBuilder eventBuilder = entry.getValue();
            for (final String childId : eventBuilder.getChildFlowFileIds()) {
                if (!flowFileIds.contains(childId)) {
                    throw new IllegalStateException("Cannot migrate " + eventFlowFile + " to a new session because it was forked to create " + eventBuilder.getChildFlowFileIds().size() + " children and not all children are being migrated. If any FlowFile is forked, all of its children must also be migrated at the same time as the forked FlowFile");
                }
            }
        }
    }
    // event builder for the new owner of the FlowFile and remove the child from our fork event builder.
    for (final Map.Entry<FlowFile, ProvenanceEventBuilder> entry : forkEventBuilders.entrySet()) {
        final FlowFile eventFlowFile = entry.getKey();
        final ProvenanceEventBuilder eventBuilder = entry.getValue();
        final Set<String> childrenIds = new HashSet<>(eventBuilder.getChildFlowFileIds());
        ProvenanceEventBuilder copy = null;
        for (final FlowFile flowFile : flowFiles) {
            final String flowFileId = flowFile.getAttribute(CoreAttributes.UUID.key());
            if (childrenIds.contains(flowFileId)) {
                eventBuilder.removeChildFlowFile(flowFile);
                if (copy == null) {
                    copy = eventBuilder.copy();
                    copy.getChildFlowFileIds().clear();
                }
                copy.addChildFlowFile(flowFileId);
            }
        }
        if (copy != null) {
            newOwner.forkEventBuilders.put(eventFlowFile, copy);
        }
    }
    newOwner.processingStartTime = Math.min(newOwner.processingStartTime, processingStartTime);
    for (final FlowFile flowFile : flowFiles) {
        final FlowFileRecord flowFileRecord = (FlowFileRecord) flowFile;
        final StandardRepositoryRecord repoRecord = this.records.remove(flowFile);
        newOwner.records.put(flowFileRecord, repoRecord);
        // Adjust the counts for Connections for each FlowFile that was pulled from a Connection.
        // We do not have to worry about accounting for 'input counts' on connections because those
        // are incremented only during a checkpoint, and anything that's been checkpointed has
        // also been committed above.
        final FlowFileQueue inputQueue = repoRecord.getOriginalQueue();
        if (inputQueue != null) {
            final String connectionId = inputQueue.getIdentifier();
            incrementConnectionOutputCounts(connectionId, -1, -repoRecord.getOriginal().getSize());
            newOwner.incrementConnectionOutputCounts(connectionId, 1, repoRecord.getOriginal().getSize());
            unacknowledgedFlowFiles.get(inputQueue).remove(flowFile);
            newOwner.unacknowledgedFlowFiles.computeIfAbsent(inputQueue, queue -> new HashSet<>()).add(flowFileRecord);
            flowFilesIn--;
            contentSizeIn -= flowFile.getSize();
            newOwner.flowFilesIn++;
            newOwner.contentSizeIn += flowFile.getSize();
        }
        final String flowFileId = flowFile.getAttribute(CoreAttributes.UUID.key());
        if (removedFlowFiles.remove(flowFileId)) {
            newOwner.removedFlowFiles.add(flowFileId);
            newOwner.removedCount++;
            newOwner.removedBytes += flowFile.getSize();
            removedCount--;
            removedBytes -= flowFile.getSize();
        }
        if (createdFlowFiles.remove(flowFileId)) {
            newOwner.createdFlowFiles.add(flowFileId);
        }
        if (repoRecord.getTransferRelationship() != null) {
            flowFilesOut--;
            contentSizeOut -= flowFile.getSize();
            newOwner.flowFilesOut++;
            newOwner.contentSizeOut += flowFile.getSize();
        }
        final List<ProvenanceEventRecord> events = generatedProvenanceEvents.remove(flowFile);
        if (events != null) {
            newOwner.generatedProvenanceEvents.put(flowFile, events);
        }
        final ContentClaim currentClaim = repoRecord.getCurrentClaim();
        if (currentClaim != null) {
            final ByteCountingOutputStream appendableStream = appendableStreams.remove(currentClaim);
            if (appendableStream != null) {
                newOwner.appendableStreams.put(currentClaim, appendableStream);
            }
        }
        final Path toDelete = deleteOnCommit.remove(flowFile);
        if (toDelete != null) {
            newOwner.deleteOnCommit.put(flowFile, toDelete);
        }
    }
    provenanceReporter.migrate(newOwner.provenanceReporter, flowFileIds);
}
Also used : OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) FlowFileFilter(org.apache.nifi.processor.FlowFileFilter) TerminatedTaskException(org.apache.nifi.processor.exception.TerminatedTaskException) LoggerFactory(org.slf4j.LoggerFactory) QueueSize(org.apache.nifi.controller.queue.QueueSize) ByteCountingOutputStream(org.apache.nifi.stream.io.ByteCountingOutputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) DisableOnCloseOutputStream(org.apache.nifi.controller.repository.io.DisableOnCloseOutputStream) TaskTerminationInputStream(org.apache.nifi.controller.repository.io.TaskTerminationInputStream) Map(java.util.Map) Connectable(org.apache.nifi.connectable.Connectable) Connection(org.apache.nifi.connectable.Connection) Path(java.nio.file.Path) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) ByteCountingInputStream(org.apache.nifi.stream.io.ByteCountingInputStream) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) FlowFile(org.apache.nifi.flowfile.FlowFile) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) UUID(java.util.UUID) EOFException(java.io.EOFException) Collectors(java.util.stream.Collectors) MissingFlowFileException(org.apache.nifi.processor.exception.MissingFlowFileException) StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) Objects(java.util.Objects) TaskTermination(org.apache.nifi.controller.lifecycle.TaskTermination) List(java.util.List) ProvenanceEventBuilder(org.apache.nifi.provenance.ProvenanceEventBuilder) Pattern(java.util.regex.Pattern) StandardFlowFileEvent(org.apache.nifi.controller.repository.metrics.StandardFlowFileEvent) StreamCallback(org.apache.nifi.processor.io.StreamCallback) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) ProcessorNode(org.apache.nifi.controller.ProcessorNode) FlowFileAccessInputStream(org.apache.nifi.controller.repository.io.FlowFileAccessInputStream) HashMap(java.util.HashMap) ProvenanceEventRepository(org.apache.nifi.provenance.ProvenanceEventRepository) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) ProcessException(org.apache.nifi.processor.exception.ProcessException) BufferedOutputStream(java.io.BufferedOutputStream) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ProvenanceReporter(org.apache.nifi.provenance.ProvenanceReporter) Relationship(org.apache.nifi.processor.Relationship) ResourceClaim(org.apache.nifi.controller.repository.claim.ResourceClaim) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) ContentClaimWriteCache(org.apache.nifi.controller.repository.claim.ContentClaimWriteCache) FlowFileAccessOutputStream(org.apache.nifi.controller.repository.io.FlowFileAccessOutputStream) NoSuchElementException(java.util.NoSuchElementException) LimitedInputStream(org.apache.nifi.controller.repository.io.LimitedInputStream) LinkedHashSet(java.util.LinkedHashSet) OutputStream(java.io.OutputStream) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) Files(java.nio.file.Files) ProvenanceEventType(org.apache.nifi.provenance.ProvenanceEventType) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) TaskTerminationOutputStream(org.apache.nifi.controller.repository.io.TaskTerminationOutputStream) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) StreamUtils(org.apache.nifi.stream.io.StreamUtils) Closeable(java.io.Closeable) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) Collections(java.util.Collections) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue) DisableOnCloseInputStream(org.apache.nifi.controller.repository.io.DisableOnCloseInputStream) InputStream(java.io.InputStream) Path(java.nio.file.Path) FlowFile(org.apache.nifi.flowfile.FlowFile) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue) ByteCountingOutputStream(org.apache.nifi.stream.io.ByteCountingOutputStream) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ProvenanceEventBuilder(org.apache.nifi.provenance.ProvenanceEventBuilder) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Example 20 with ContentClaim

use of org.apache.nifi.controller.repository.claim.ContentClaim in project nifi by apache.

the class StandardProcessSession method summarizeEvents.

private String summarizeEvents(final Checkpoint checkpoint) {
    // relationship to flowfile ID's
    final Map<Relationship, Set<String>> transferMap = new HashMap<>();
    final Set<String> modifiedFlowFileIds = new HashSet<>();
    int largestTransferSetSize = 0;
    for (final Map.Entry<FlowFileRecord, StandardRepositoryRecord> entry : checkpoint.records.entrySet()) {
        final FlowFile flowFile = entry.getKey();
        final StandardRepositoryRecord record = entry.getValue();
        final Relationship relationship = record.getTransferRelationship();
        if (Relationship.SELF.equals(relationship)) {
            continue;
        }
        Set<String> transferIds = transferMap.get(relationship);
        if (transferIds == null) {
            transferIds = new HashSet<>();
            transferMap.put(relationship, transferIds);
        }
        transferIds.add(flowFile.getAttribute(CoreAttributes.UUID.key()));
        largestTransferSetSize = Math.max(largestTransferSetSize, transferIds.size());
        final ContentClaim workingClaim = record.getWorkingClaim();
        if (workingClaim != null && workingClaim != record.getOriginalClaim() && record.getTransferRelationship() != null) {
            modifiedFlowFileIds.add(flowFile.getAttribute(CoreAttributes.UUID.key()));
        }
    }
    final int numRemoved = checkpoint.removedFlowFiles.size();
    final int numModified = modifiedFlowFileIds.size();
    final int numCreated = checkpoint.createdFlowFiles.size();
    final StringBuilder sb = new StringBuilder(512);
    if (!LOG.isDebugEnabled() && (largestTransferSetSize > VERBOSE_LOG_THRESHOLD || numModified > VERBOSE_LOG_THRESHOLD || numCreated > VERBOSE_LOG_THRESHOLD || numRemoved > VERBOSE_LOG_THRESHOLD)) {
        if (numCreated > 0) {
            sb.append("created ").append(numCreated).append(" FlowFiles, ");
        }
        if (numModified > 0) {
            sb.append("modified ").append(modifiedFlowFileIds.size()).append(" FlowFiles, ");
        }
        if (numRemoved > 0) {
            sb.append("removed ").append(numRemoved).append(" FlowFiles, ");
        }
        for (final Map.Entry<Relationship, Set<String>> entry : transferMap.entrySet()) {
            if (entry.getKey() != null) {
                sb.append("Transferred ").append(entry.getValue().size()).append(" FlowFiles");
                final Relationship relationship = entry.getKey();
                if (relationship != Relationship.ANONYMOUS) {
                    sb.append(" to '").append(relationship.getName()).append("', ");
                }
            }
        }
    } else {
        if (numCreated > 0) {
            sb.append("created FlowFiles ").append(checkpoint.createdFlowFiles).append(", ");
        }
        if (numModified > 0) {
            sb.append("modified FlowFiles ").append(modifiedFlowFileIds).append(", ");
        }
        if (numRemoved > 0) {
            sb.append("removed FlowFiles ").append(checkpoint.removedFlowFiles).append(", ");
        }
        for (final Map.Entry<Relationship, Set<String>> entry : transferMap.entrySet()) {
            if (entry.getKey() != null) {
                sb.append("Transferred FlowFiles ").append(entry.getValue());
                final Relationship relationship = entry.getKey();
                if (relationship != Relationship.ANONYMOUS) {
                    sb.append(" to '").append(relationship.getName()).append("', ");
                }
            }
        }
    }
    if (sb.length() > 2 && sb.subSequence(sb.length() - 2, sb.length()).equals(", ")) {
        sb.delete(sb.length() - 2, sb.length());
    }
    // don't add processing time if we did nothing, because we don't log the summary anyway
    if (sb.length() > 0) {
        final long processingNanos = checkpoint.processingTime;
        sb.append(", Processing Time = ");
        formatNanos(processingNanos, sb);
    }
    return sb.toString();
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) Set(java.util.Set) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Aggregations

ContentClaim (org.apache.nifi.controller.repository.claim.ContentClaim)79 StandardContentClaim (org.apache.nifi.controller.repository.claim.StandardContentClaim)51 Test (org.junit.Test)40 OutputStream (java.io.OutputStream)39 ByteArrayOutputStream (java.io.ByteArrayOutputStream)30 IOException (java.io.IOException)26 InputStream (java.io.InputStream)22 ResourceClaim (org.apache.nifi.controller.repository.claim.ResourceClaim)22 ByteArrayInputStream (java.io.ByteArrayInputStream)20 FlowFile (org.apache.nifi.flowfile.FlowFile)19 Path (java.nio.file.Path)18 ArrayList (java.util.ArrayList)16 HashMap (java.util.HashMap)16 FlowFileQueue (org.apache.nifi.controller.queue.FlowFileQueue)14 Map (java.util.Map)13 FileOutputStream (java.io.FileOutputStream)12 FilterOutputStream (java.io.FilterOutputStream)12 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)12 FlowFileAccessException (org.apache.nifi.processor.exception.FlowFileAccessException)12 ProvenanceEventRecord (org.apache.nifi.provenance.ProvenanceEventRecord)12