Search in sources :

Example 21 with Connectable

use of org.apache.nifi.connectable.Connectable in project nifi by apache.

the class StandardProcessGroup method isDisconnected.

private boolean isDisconnected(final Snippet snippet) {
    final Set<Connectable> connectables = getAllConnectables(snippet);
    for (final String id : getKeys(snippet.getRemoteProcessGroups())) {
        final RemoteProcessGroup remoteGroup = getRemoteProcessGroup(id);
        connectables.addAll(remoteGroup.getInputPorts());
        connectables.addAll(remoteGroup.getOutputPorts());
    }
    final Set<String> connectionIds = snippet.getConnections().keySet();
    for (final Connectable connectable : connectables) {
        for (final Connection conn : connectable.getIncomingConnections()) {
            if (!connectionIds.contains(conn.getIdentifier())) {
                return false;
            }
        }
        for (final Connection conn : connectable.getConnections()) {
            if (!connectionIds.contains(conn.getIdentifier())) {
                return false;
            }
        }
    }
    final Set<Connectable> recursiveConnectables = new HashSet<>(connectables);
    for (final String id : snippet.getProcessGroups().keySet()) {
        final ProcessGroup childGroup = getProcessGroup(id);
        recursiveConnectables.addAll(findAllConnectables(childGroup, true));
    }
    for (final String id : connectionIds) {
        final Connection connection = getConnection(id);
        if (!recursiveConnectables.contains(connection.getSource()) || !recursiveConnectables.contains(connection.getDestination())) {
            return false;
        }
    }
    return true;
}
Also used : VersionedRemoteProcessGroup(org.apache.nifi.registry.flow.VersionedRemoteProcessGroup) Connectable(org.apache.nifi.connectable.Connectable) Connection(org.apache.nifi.connectable.Connection) VersionedConnection(org.apache.nifi.registry.flow.VersionedConnection) VersionedProcessGroup(org.apache.nifi.registry.flow.VersionedProcessGroup) VersionedRemoteProcessGroup(org.apache.nifi.registry.flow.VersionedRemoteProcessGroup) LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet)

Example 22 with Connectable

use of org.apache.nifi.connectable.Connectable in project nifi by apache.

the class StandardProcessGroup method findLocalConnectable.

private static Connectable findLocalConnectable(final String identifier, final ProcessGroup group) {
    final ProcessorNode procNode = group.getProcessor(identifier);
    if (procNode != null) {
        return procNode;
    }
    final Port inPort = group.getInputPort(identifier);
    if (inPort != null) {
        return inPort;
    }
    final Port outPort = group.getOutputPort(identifier);
    if (outPort != null) {
        return outPort;
    }
    final Funnel funnel = group.getFunnel(identifier);
    if (funnel != null) {
        return funnel;
    }
    for (final RemoteProcessGroup remoteProcessGroup : group.getRemoteProcessGroups()) {
        final RemoteGroupPort remoteInputPort = remoteProcessGroup.getInputPort(identifier);
        if (remoteInputPort != null) {
            return remoteInputPort;
        }
        final RemoteGroupPort remoteOutputPort = remoteProcessGroup.getOutputPort(identifier);
        if (remoteOutputPort != null) {
            return remoteOutputPort;
        }
    }
    for (final ProcessGroup childGroup : group.getProcessGroups()) {
        final Connectable childGroupConnectable = findLocalConnectable(identifier, childGroup);
        if (childGroupConnectable != null) {
            return childGroupConnectable;
        }
    }
    return null;
}
Also used : VersionedFunnel(org.apache.nifi.registry.flow.VersionedFunnel) Funnel(org.apache.nifi.connectable.Funnel) VersionedRemoteProcessGroup(org.apache.nifi.registry.flow.VersionedRemoteProcessGroup) ProcessorNode(org.apache.nifi.controller.ProcessorNode) VersionedRemoteGroupPort(org.apache.nifi.registry.flow.VersionedRemoteGroupPort) RemoteGroupPort(org.apache.nifi.remote.RemoteGroupPort) Connectable(org.apache.nifi.connectable.Connectable) RootGroupPort(org.apache.nifi.remote.RootGroupPort) VersionedRemoteGroupPort(org.apache.nifi.registry.flow.VersionedRemoteGroupPort) Port(org.apache.nifi.connectable.Port) VersionedPort(org.apache.nifi.registry.flow.VersionedPort) LocalPort(org.apache.nifi.connectable.LocalPort) RemoteGroupPort(org.apache.nifi.remote.RemoteGroupPort) VersionedProcessGroup(org.apache.nifi.registry.flow.VersionedProcessGroup) VersionedRemoteProcessGroup(org.apache.nifi.registry.flow.VersionedRemoteProcessGroup)

Example 23 with Connectable

use of org.apache.nifi.connectable.Connectable in project nifi by apache.

the class StandardProcessSession method removeExpired.

private void removeExpired(final Set<FlowFileRecord> flowFiles, final Connection connection) {
    if (flowFiles.isEmpty()) {
        return;
    }
    LOG.info("{} {} FlowFiles have expired and will be removed", new Object[] { this, flowFiles.size() });
    final List<RepositoryRecord> expiredRecords = new ArrayList<>(flowFiles.size());
    final Connectable connectable = context.getConnectable();
    final String processorType = connectable.getComponentType();
    final StandardProvenanceReporter expiredReporter = new StandardProvenanceReporter(this, connectable.getIdentifier(), processorType, context.getProvenanceRepository(), this);
    final Map<String, FlowFileRecord> recordIdMap = new HashMap<>();
    for (final FlowFileRecord flowFile : flowFiles) {
        recordIdMap.put(flowFile.getAttribute(CoreAttributes.UUID.key()), flowFile);
        final StandardRepositoryRecord record = new StandardRepositoryRecord(connection.getFlowFileQueue(), flowFile);
        record.markForDelete();
        expiredRecords.add(record);
        expiredReporter.expire(flowFile, "Expiration Threshold = " + connection.getFlowFileQueue().getFlowFileExpiration());
        decrementClaimCount(flowFile.getContentClaim());
        final long flowFileLife = System.currentTimeMillis() - flowFile.getEntryDate();
        final Object terminator = connectable instanceof ProcessorNode ? ((ProcessorNode) connectable).getProcessor() : connectable;
        LOG.info("{} terminated by {} due to FlowFile expiration; life of FlowFile = {} ms", new Object[] { flowFile, terminator, flowFileLife });
    }
    try {
        final Iterable<ProvenanceEventRecord> iterable = new Iterable<ProvenanceEventRecord>() {

            @Override
            public Iterator<ProvenanceEventRecord> iterator() {
                final Iterator<ProvenanceEventRecord> expiredEventIterator = expiredReporter.getEvents().iterator();
                final Iterator<ProvenanceEventRecord> enrichingIterator = new Iterator<ProvenanceEventRecord>() {

                    @Override
                    public boolean hasNext() {
                        return expiredEventIterator.hasNext();
                    }

                    @Override
                    public ProvenanceEventRecord next() {
                        final ProvenanceEventRecord event = expiredEventIterator.next();
                        final StandardProvenanceEventRecord.Builder enriched = new StandardProvenanceEventRecord.Builder().fromEvent(event);
                        final FlowFileRecord record = recordIdMap.get(event.getFlowFileUuid());
                        if (record == null) {
                            return null;
                        }
                        final ContentClaim claim = record.getContentClaim();
                        if (claim != null) {
                            final ResourceClaim resourceClaim = claim.getResourceClaim();
                            enriched.setCurrentContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), record.getContentClaimOffset() + claim.getOffset(), record.getSize());
                            enriched.setPreviousContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), record.getContentClaimOffset() + claim.getOffset(), record.getSize());
                        }
                        enriched.setAttributes(record.getAttributes(), Collections.<String, String>emptyMap());
                        return enriched.build();
                    }

                    @Override
                    public void remove() {
                        throw new UnsupportedOperationException();
                    }
                };
                return enrichingIterator;
            }
        };
        context.getProvenanceRepository().registerEvents(iterable);
        context.getFlowFileRepository().updateRepository(expiredRecords);
    } catch (final IOException e) {
        LOG.error("Failed to update FlowFile Repository to record expired records due to {}", e);
    }
}
Also used : ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IOException(java.io.IOException) StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) ProcessorNode(org.apache.nifi.controller.ProcessorNode) Connectable(org.apache.nifi.connectable.Connectable) StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) Iterator(java.util.Iterator) ResourceClaim(org.apache.nifi.controller.repository.claim.ResourceClaim)

Example 24 with Connectable

use of org.apache.nifi.connectable.Connectable in project nifi by apache.

the class StandardProcessSession method registerForkEvent.

private void registerForkEvent(final FlowFile parent, final FlowFile child) {
    ProvenanceEventBuilder eventBuilder = forkEventBuilders.get(parent);
    if (eventBuilder == null) {
        eventBuilder = context.getProvenanceRepository().eventBuilder();
        eventBuilder.setEventType(ProvenanceEventType.FORK);
        eventBuilder.setFlowFileEntryDate(parent.getEntryDate());
        eventBuilder.setLineageStartDate(parent.getLineageStartDate());
        eventBuilder.setFlowFileUUID(parent.getAttribute(CoreAttributes.UUID.key()));
        eventBuilder.setComponentId(context.getConnectable().getIdentifier());
        final Connectable connectable = context.getConnectable();
        final String processorType = connectable.getComponentType();
        eventBuilder.setComponentType(processorType);
        eventBuilder.addParentFlowFile(parent);
        updateEventContentClaims(eventBuilder, parent, records.get(parent));
        forkEventBuilders.put(parent, eventBuilder);
    }
    eventBuilder.addChildFlowFile(child);
}
Also used : Connectable(org.apache.nifi.connectable.Connectable) ProvenanceEventBuilder(org.apache.nifi.provenance.ProvenanceEventBuilder)

Example 25 with Connectable

use of org.apache.nifi.connectable.Connectable in project nifi by apache.

the class StandardProcessSession method rollback.

private void rollback(final boolean penalize, final boolean rollbackCheckpoint) {
    if (LOG.isDebugEnabled()) {
        LOG.debug("{} session rollback called, FlowFile records are {} {}", this, loggableFlowfileInfo(), new Throwable("Stack Trace on rollback"));
    }
    deleteOnCommit.clear();
    closeStreams(openInputStreams, "rolled back", "input");
    closeStreams(openOutputStreams, "rolled back", "output");
    try {
        claimCache.reset();
    } catch (IOException e1) {
        LOG.warn("{} Attempted to close Output Stream for {} due to session rollback but close failed", this, this.connectableDescription, e1);
    }
    final Set<StandardRepositoryRecord> recordsToHandle = new HashSet<>();
    recordsToHandle.addAll(records.values());
    if (rollbackCheckpoint) {
        final Checkpoint existingCheckpoint = this.checkpoint;
        this.checkpoint = null;
        if (existingCheckpoint != null && existingCheckpoint.records != null) {
            recordsToHandle.addAll(existingCheckpoint.records.values());
        }
    }
    resetWriteClaims();
    resetReadClaim();
    if (recordsToHandle.isEmpty()) {
        LOG.trace("{} was rolled back, but no events were performed by this ProcessSession", this);
        acknowledgeRecords();
        resetState();
        return;
    }
    for (final StandardRepositoryRecord record : recordsToHandle) {
        // remove the working claims if they are different than the originals.
        removeTemporaryClaim(record);
    }
    final Set<RepositoryRecord> abortedRecords = new HashSet<>();
    final Set<StandardRepositoryRecord> transferRecords = new HashSet<>();
    for (final StandardRepositoryRecord record : recordsToHandle) {
        if (record.isMarkedForAbort()) {
            decrementClaimCount(record.getWorkingClaim());
            if (record.getCurrentClaim() != null && !record.getCurrentClaim().equals(record.getWorkingClaim())) {
                // if working & original claim are same, don't remove twice; we only want to remove the original
                // if it's different from the working. Otherwise, we remove two claimant counts. This causes
                // an issue if we only updated the flowfile attributes.
                decrementClaimCount(record.getCurrentClaim());
            }
            abortedRecords.add(record);
        } else {
            transferRecords.add(record);
        }
    }
    // Put the FlowFiles that are not marked for abort back to their original queues
    for (final StandardRepositoryRecord record : transferRecords) {
        if (record.getOriginal() != null) {
            final FlowFileQueue originalQueue = record.getOriginalQueue();
            if (originalQueue != null) {
                if (penalize) {
                    final long expirationEpochMillis = System.currentTimeMillis() + context.getConnectable().getPenalizationPeriod(TimeUnit.MILLISECONDS);
                    final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getOriginal()).penaltyExpirationTime(expirationEpochMillis).build();
                    originalQueue.put(newFile);
                } else {
                    originalQueue.put(record.getOriginal());
                }
            }
        }
    }
    if (!abortedRecords.isEmpty()) {
        try {
            context.getFlowFileRepository().updateRepository(abortedRecords);
        } catch (final IOException ioe) {
            LOG.error("Unable to update FlowFile repository for aborted records due to {}", ioe.toString());
            if (LOG.isDebugEnabled()) {
                LOG.error("", ioe);
            }
        }
    }
    // If we have transient claims that need to be cleaned up, do so.
    final List<ContentClaim> transientClaims = recordsToHandle.stream().flatMap(record -> record.getTransientClaims().stream()).collect(Collectors.toList());
    if (!transientClaims.isEmpty()) {
        final RepositoryRecord repoRecord = new TransientClaimRepositoryRecord(transientClaims);
        try {
            context.getFlowFileRepository().updateRepository(Collections.singletonList(repoRecord));
        } catch (final IOException ioe) {
            LOG.error("Unable to update FlowFile repository to cleanup transient claims due to {}", ioe.toString());
            if (LOG.isDebugEnabled()) {
                LOG.error("", ioe);
            }
        }
    }
    final Connectable connectable = context.getConnectable();
    final StandardFlowFileEvent flowFileEvent = new StandardFlowFileEvent(connectable.getIdentifier());
    flowFileEvent.setBytesRead(bytesRead);
    flowFileEvent.setBytesWritten(bytesWritten);
    flowFileEvent.setCounters(immediateCounters);
    // update event repository
    try {
        context.getFlowFileEventRepository().updateRepository(flowFileEvent);
    } catch (final Exception e) {
        LOG.error("Failed to update FlowFileEvent Repository due to " + e);
        if (LOG.isDebugEnabled()) {
            LOG.error("", e);
        }
    }
    acknowledgeRecords();
    resetState();
}
Also used : OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) FlowFileFilter(org.apache.nifi.processor.FlowFileFilter) TerminatedTaskException(org.apache.nifi.processor.exception.TerminatedTaskException) LoggerFactory(org.slf4j.LoggerFactory) QueueSize(org.apache.nifi.controller.queue.QueueSize) ByteCountingOutputStream(org.apache.nifi.stream.io.ByteCountingOutputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) DisableOnCloseOutputStream(org.apache.nifi.controller.repository.io.DisableOnCloseOutputStream) TaskTerminationInputStream(org.apache.nifi.controller.repository.io.TaskTerminationInputStream) Map(java.util.Map) Connectable(org.apache.nifi.connectable.Connectable) Connection(org.apache.nifi.connectable.Connection) Path(java.nio.file.Path) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) ByteCountingInputStream(org.apache.nifi.stream.io.ByteCountingInputStream) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) FlowFile(org.apache.nifi.flowfile.FlowFile) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) UUID(java.util.UUID) EOFException(java.io.EOFException) Collectors(java.util.stream.Collectors) MissingFlowFileException(org.apache.nifi.processor.exception.MissingFlowFileException) StandardProvenanceEventRecord(org.apache.nifi.provenance.StandardProvenanceEventRecord) Objects(java.util.Objects) TaskTermination(org.apache.nifi.controller.lifecycle.TaskTermination) List(java.util.List) ProvenanceEventBuilder(org.apache.nifi.provenance.ProvenanceEventBuilder) Pattern(java.util.regex.Pattern) StandardFlowFileEvent(org.apache.nifi.controller.repository.metrics.StandardFlowFileEvent) StreamCallback(org.apache.nifi.processor.io.StreamCallback) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) ProcessorNode(org.apache.nifi.controller.ProcessorNode) FlowFileAccessInputStream(org.apache.nifi.controller.repository.io.FlowFileAccessInputStream) HashMap(java.util.HashMap) ProvenanceEventRepository(org.apache.nifi.provenance.ProvenanceEventRepository) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) ProcessException(org.apache.nifi.processor.exception.ProcessException) BufferedOutputStream(java.io.BufferedOutputStream) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ProvenanceReporter(org.apache.nifi.provenance.ProvenanceReporter) Relationship(org.apache.nifi.processor.Relationship) ResourceClaim(org.apache.nifi.controller.repository.claim.ResourceClaim) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) ContentClaimWriteCache(org.apache.nifi.controller.repository.claim.ContentClaimWriteCache) FlowFileAccessOutputStream(org.apache.nifi.controller.repository.io.FlowFileAccessOutputStream) NoSuchElementException(java.util.NoSuchElementException) LimitedInputStream(org.apache.nifi.controller.repository.io.LimitedInputStream) LinkedHashSet(java.util.LinkedHashSet) OutputStream(java.io.OutputStream) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) Files(java.nio.file.Files) ProvenanceEventType(org.apache.nifi.provenance.ProvenanceEventType) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) TaskTerminationOutputStream(org.apache.nifi.controller.repository.io.TaskTerminationOutputStream) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) StreamUtils(org.apache.nifi.stream.io.StreamUtils) Closeable(java.io.Closeable) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) Collections(java.util.Collections) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue) DisableOnCloseInputStream(org.apache.nifi.controller.repository.io.DisableOnCloseInputStream) InputStream(java.io.InputStream) IOException(java.io.IOException) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue) TerminatedTaskException(org.apache.nifi.processor.exception.TerminatedTaskException) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) EOFException(java.io.EOFException) MissingFlowFileException(org.apache.nifi.processor.exception.MissingFlowFileException) FlowFileHandlingException(org.apache.nifi.processor.exception.FlowFileHandlingException) ProcessException(org.apache.nifi.processor.exception.ProcessException) NoSuchElementException(java.util.NoSuchElementException) IOException(java.io.IOException) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) Connectable(org.apache.nifi.connectable.Connectable) StandardFlowFileEvent(org.apache.nifi.controller.repository.metrics.StandardFlowFileEvent) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Aggregations

Connectable (org.apache.nifi.connectable.Connectable)35 Connection (org.apache.nifi.connectable.Connection)18 ArrayList (java.util.ArrayList)12 HashSet (java.util.HashSet)12 ProcessGroup (org.apache.nifi.groups.ProcessGroup)12 VersionedProcessGroup (org.apache.nifi.registry.flow.VersionedProcessGroup)10 RemoteGroupPort (org.apache.nifi.remote.RemoteGroupPort)10 RemoteProcessGroup (org.apache.nifi.groups.RemoteProcessGroup)9 VersionedConnection (org.apache.nifi.registry.flow.VersionedConnection)9 Relationship (org.apache.nifi.processor.Relationship)8 HashMap (java.util.HashMap)7 Collection (java.util.Collection)6 LinkedHashSet (java.util.LinkedHashSet)6 Funnel (org.apache.nifi.connectable.Funnel)6 Port (org.apache.nifi.connectable.Port)6 ProcessorNode (org.apache.nifi.controller.ProcessorNode)6 RootGroupPort (org.apache.nifi.remote.RootGroupPort)6 IOException (java.io.IOException)5 List (java.util.List)5 VersionedRemoteProcessGroup (org.apache.nifi.registry.flow.VersionedRemoteProcessGroup)5