Search in sources :

Example 16 with DataFlow

use of org.apache.nifi.cluster.protocol.DataFlow in project nifi by apache.

the class StandardFlowService method loadFromConnectionResponse.

private void loadFromConnectionResponse(final ConnectionResponse response) throws ConnectionException {
    writeLock.lock();
    try {
        if (response.getNodeConnectionStatuses() != null) {
            clusterCoordinator.resetNodeStatuses(response.getNodeConnectionStatuses().stream().collect(Collectors.toMap(status -> status.getNodeIdentifier(), status -> status)));
        }
        // get the dataflow from the response
        final DataFlow dataFlow = response.getDataFlow();
        if (logger.isTraceEnabled()) {
            logger.trace("ResponseFlow = " + new String(dataFlow.getFlow(), StandardCharsets.UTF_8));
        }
        // load new controller state
        loadFromBytes(dataFlow, true);
        // set node ID on controller before we start heartbeating because heartbeat needs node ID
        nodeId = response.getNodeIdentifier();
        logger.info("Setting Flow Controller's Node ID: " + nodeId);
        controller.setNodeId(nodeId);
        clusterCoordinator.setLocalNodeIdentifier(nodeId);
        clusterCoordinator.setConnected(true);
        revisionManager.reset(response.getComponentRevisions().stream().map(rev -> rev.toRevision()).collect(Collectors.toList()));
        // mark the node as clustered
        controller.setClustered(true, response.getInstanceId());
        controller.setConnectionStatus(new NodeConnectionStatus(nodeId, NodeConnectionState.CONNECTED));
        // Initialize the controller after the flow is loaded so we don't take any actions on repos until everything is good
        initializeController();
        // start the processors as indicated by the dataflow
        controller.onFlowInitialized(autoResumeState);
        loadSnippets(dataFlow.getSnippets());
        controller.startHeartbeating();
    } catch (final UninheritableFlowException ufe) {
        throw new UninheritableFlowException(CONNECTION_EXCEPTION_MSG_PREFIX + "local flow is different than cluster flow.", ufe);
    } catch (final MissingBundleException mbe) {
        throw new MissingBundleException(CONNECTION_EXCEPTION_MSG_PREFIX + "cluster flow contains bundles that do not exist on the current node", mbe);
    } catch (final FlowSerializationException fse) {
        throw new ConnectionException(CONNECTION_EXCEPTION_MSG_PREFIX + "local or cluster flow is malformed.", fse);
    } catch (final FlowSynchronizationException fse) {
        throw new FlowSynchronizationException(CONNECTION_EXCEPTION_MSG_PREFIX + "local flow controller partially updated. " + "Administrator should disconnect node and review flow for corruption.", fse);
    } catch (final Exception ex) {
        throw new ConnectionException("Failed to connect node to cluster due to: " + ex, ex);
    } finally {
        writeLock.unlock();
    }
}
Also used : FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) DataFlow(org.apache.nifi.cluster.protocol.DataFlow) NodeConnectionStatus(org.apache.nifi.cluster.coordination.node.NodeConnectionStatus) ConnectionException(org.apache.nifi.cluster.ConnectionException) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) ConnectionException(org.apache.nifi.cluster.ConnectionException) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) LifeCycleStartException(org.apache.nifi.lifecycle.LifeCycleStartException) NoClusterCoordinatorException(org.apache.nifi.cluster.exception.NoClusterCoordinatorException) IOException(java.io.IOException) ProtocolException(org.apache.nifi.cluster.protocol.ProtocolException)

Example 17 with DataFlow

use of org.apache.nifi.cluster.protocol.DataFlow in project nifi by apache.

the class StandardFlowService method load.

@Override
public void load(final DataFlow dataFlow) throws IOException, FlowSerializationException, FlowSynchronizationException, UninheritableFlowException, MissingBundleException {
    if (configuredForClustering) {
        // Create the initial flow from disk if it exists, or from serializing the empty root group in flow controller
        final DataFlow initialFlow = (dataFlow == null) ? createDataFlow() : dataFlow;
        if (logger.isTraceEnabled()) {
            logger.trace("InitialFlow = " + new String(initialFlow.getFlow(), StandardCharsets.UTF_8));
        }
        // Sync the initial flow into the flow controller so that if the flow came from disk we loaded the
        // whole flow into the flow controller and applied any bundle upgrades
        writeLock.lock();
        try {
            loadFromBytes(initialFlow, true);
        } finally {
            writeLock.unlock();
        }
        // Get the proposed flow by serializing the flow controller which now has the synced version from above
        final DataFlow proposedFlow = createDataFlowFromController();
        if (logger.isTraceEnabled()) {
            logger.trace("ProposedFlow = " + new String(proposedFlow.getFlow(), StandardCharsets.UTF_8));
        }
        /*
             * Attempt to connect to the cluster. If the manager is able to
             * provide a data flow, then the manager will send a connection
             * response. If the manager was unable to be located, then
             * the response will be null and we should load the local dataflow
             * and heartbeat until a manager is located.
             */
        final boolean localFlowEmpty = StandardFlowSynchronizer.isEmpty(proposedFlow);
        final ConnectionResponse response = connect(true, localFlowEmpty, proposedFlow);
        // obtain write lock while we are updating the controller. We need to ensure that we don't
        // obtain the lock before calling connect(), though, or we will end up getting a deadlock
        // because the node that is receiving the connection request won't be able to get the current
        // flow, as that requires a read lock.
        writeLock.lock();
        try {
            if (response == null || response.shouldTryLater()) {
                logger.info("Flow controller will load local dataflow and suspend connection handshake until a cluster connection response is received.");
                // set node ID on controller before we start heartbeating because heartbeat needs node ID
                controller.setNodeId(nodeId);
                clusterCoordinator.setLocalNodeIdentifier(nodeId);
                // set node as clustered, since it is trying to connect to a cluster
                controller.setClustered(true, null);
                clusterCoordinator.setConnected(false);
                controller.setConnectionStatus(new NodeConnectionStatus(nodeId, DisconnectionCode.NOT_YET_CONNECTED));
                /*
                     * Start heartbeating. Heartbeats will fail because we can't reach
                     * the manager, but when we locate the manager, the node will
                     * reconnect and establish a connection to the cluster. The
                     * heartbeat is the trigger that will cause the manager to
                     * issue a reconnect request.
                     */
                controller.startHeartbeating();
                // Initialize the controller after the flow is loaded so we don't take any actions on repos until everything is good
                initializeController();
                // notify controller that flow is initialized
                try {
                    controller.onFlowInitialized(autoResumeState);
                } catch (final Exception ex) {
                    logger.warn("Unable to start all processors due to invalid flow configuration.");
                    if (logger.isDebugEnabled()) {
                        logger.warn(StringUtils.EMPTY, ex);
                    }
                }
            } else {
                try {
                    loadFromConnectionResponse(response);
                } catch (final Exception e) {
                    logger.error("Failed to load flow from cluster due to: " + e, e);
                    handleConnectionFailure(e);
                    throw new IOException(e);
                }
            }
            // save the flow in the controller so we write out the latest flow with any updated bundles to disk
            dao.save(controller, true);
        } finally {
            writeLock.unlock();
        }
    } else {
        writeLock.lock();
        try {
            // operating in standalone mode, so load proposed flow and initialize the controller
            loadFromBytes(dataFlow, true);
            initializeController();
            dao.save(controller, true);
        } finally {
            writeLock.unlock();
        }
    }
}
Also used : IOException(java.io.IOException) ConnectionResponse(org.apache.nifi.cluster.protocol.ConnectionResponse) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) DataFlow(org.apache.nifi.cluster.protocol.DataFlow) NodeConnectionStatus(org.apache.nifi.cluster.coordination.node.NodeConnectionStatus) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) ConnectionException(org.apache.nifi.cluster.ConnectionException) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) LifeCycleStartException(org.apache.nifi.lifecycle.LifeCycleStartException) NoClusterCoordinatorException(org.apache.nifi.cluster.exception.NoClusterCoordinatorException) IOException(java.io.IOException) ProtocolException(org.apache.nifi.cluster.protocol.ProtocolException)

Example 18 with DataFlow

use of org.apache.nifi.cluster.protocol.DataFlow in project nifi by apache.

the class StandardFlowService method loadFromBytes.

// write lock must already be acquired
private void loadFromBytes(final DataFlow proposedFlow, final boolean allowEmptyFlow) throws IOException, FlowSerializationException, FlowSynchronizationException, UninheritableFlowException, MissingBundleException {
    logger.trace("Loading flow from bytes");
    // resolve the given flow (null means load flow from disk)
    final DataFlow actualProposedFlow;
    final byte[] flowBytes;
    final byte[] authorizerFingerprint;
    final Set<String> missingComponents;
    if (proposedFlow == null) {
        final ByteArrayOutputStream flowOnDisk = new ByteArrayOutputStream();
        copyCurrentFlow(flowOnDisk);
        flowBytes = flowOnDisk.toByteArray();
        authorizerFingerprint = getAuthorizerFingerprint();
        missingComponents = new HashSet<>();
        logger.debug("Loaded Flow from bytes");
    } else {
        flowBytes = proposedFlow.getFlow();
        authorizerFingerprint = proposedFlow.getAuthorizerFingerprint();
        missingComponents = proposedFlow.getMissingComponents();
        logger.debug("Loaded flow from proposed flow");
    }
    actualProposedFlow = new StandardDataFlow(flowBytes, null, authorizerFingerprint, missingComponents);
    // load the flow
    logger.debug("Loading proposed flow into FlowController");
    dao.load(controller, actualProposedFlow);
    final ProcessGroup rootGroup = controller.getGroup(controller.getRootGroupId());
    if (rootGroup.isEmpty() && !allowEmptyFlow) {
        throw new FlowSynchronizationException("Failed to load flow because unable to connect to cluster and local flow is empty");
    }
    final List<Template> templates = loadTemplates();
    for (final Template template : templates) {
        final Template existing = rootGroup.getTemplate(template.getIdentifier());
        if (existing == null) {
            logger.info("Imported Template '{}' to Root Group", template.getDetails().getName());
            rootGroup.addTemplate(template);
        } else {
            logger.info("Template '{}' was already present in Root Group so will not import from file", template.getDetails().getName());
        }
    }
}
Also used : StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) ProcessGroup(org.apache.nifi.groups.ProcessGroup) ByteArrayOutputStream(java.io.ByteArrayOutputStream) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) DataFlow(org.apache.nifi.cluster.protocol.DataFlow)

Example 19 with DataFlow

use of org.apache.nifi.cluster.protocol.DataFlow in project nifi by apache.

the class StandardFlowSynchronizer method sync.

@Override
public void sync(final FlowController controller, final DataFlow proposedFlow, final StringEncryptor encryptor) throws FlowSerializationException, UninheritableFlowException, FlowSynchronizationException, MissingBundleException {
    // handle corner cases involving no proposed flow
    if (proposedFlow == null) {
        if (controller.getGroup(controller.getRootGroupId()).isEmpty()) {
            // no sync to perform
            return;
        } else {
            throw new UninheritableFlowException("Proposed configuration is empty, but the controller contains a data flow.");
        }
    }
    // determine if the controller already had flow sync'd to it
    final boolean flowAlreadySynchronized = controller.isFlowSynchronized();
    logger.debug("Synching FlowController with proposed flow: Controller is Already Synchronized = {}", flowAlreadySynchronized);
    // serialize controller state to bytes
    final byte[] existingFlow;
    final boolean existingFlowEmpty;
    try {
        if (flowAlreadySynchronized) {
            existingFlow = toBytes(controller);
            existingFlowEmpty = controller.getGroup(controller.getRootGroupId()).isEmpty() && controller.getAllReportingTasks().isEmpty() && controller.getAllControllerServices().isEmpty() && controller.getFlowRegistryClient().getRegistryIdentifiers().isEmpty();
        } else {
            existingFlow = readFlowFromDisk();
            if (existingFlow == null || existingFlow.length == 0) {
                existingFlowEmpty = true;
            } else {
                final Document document = parseFlowBytes(existingFlow);
                final Element rootElement = document.getDocumentElement();
                final FlowEncodingVersion encodingVersion = FlowEncodingVersion.parse(rootElement);
                logger.trace("Setting controller thread counts");
                final Integer maxThreadCount = getInteger(rootElement, "maxThreadCount");
                if (maxThreadCount == null) {
                    controller.setMaxTimerDrivenThreadCount(getInt(rootElement, "maxTimerDrivenThreadCount"));
                    controller.setMaxEventDrivenThreadCount(getInt(rootElement, "maxEventDrivenThreadCount"));
                } else {
                    controller.setMaxTimerDrivenThreadCount(maxThreadCount * 2 / 3);
                    controller.setMaxEventDrivenThreadCount(maxThreadCount / 3);
                }
                final Element reportingTasksElement = DomUtils.getChild(rootElement, "reportingTasks");
                final List<Element> taskElements;
                if (reportingTasksElement == null) {
                    taskElements = Collections.emptyList();
                } else {
                    taskElements = DomUtils.getChildElementsByTagName(reportingTasksElement, "reportingTask");
                }
                final Element controllerServicesElement = DomUtils.getChild(rootElement, "controllerServices");
                final List<Element> unrootedControllerServiceElements;
                if (controllerServicesElement == null) {
                    unrootedControllerServiceElements = Collections.emptyList();
                } else {
                    unrootedControllerServiceElements = DomUtils.getChildElementsByTagName(controllerServicesElement, "controllerService");
                }
                final boolean registriesPresent;
                final Element registriesElement = DomUtils.getChild(rootElement, "registries");
                if (registriesElement == null) {
                    registriesPresent = false;
                } else {
                    final List<Element> flowRegistryElems = DomUtils.getChildElementsByTagName(registriesElement, "flowRegistry");
                    registriesPresent = !flowRegistryElems.isEmpty();
                }
                logger.trace("Parsing process group from DOM");
                final Element rootGroupElement = (Element) rootElement.getElementsByTagName("rootGroup").item(0);
                final ProcessGroupDTO rootGroupDto = FlowFromDOMFactory.getProcessGroup(null, rootGroupElement, encryptor, encodingVersion);
                existingFlowEmpty = taskElements.isEmpty() && unrootedControllerServiceElements.isEmpty() && isEmpty(rootGroupDto) && !registriesPresent;
                logger.debug("Existing Flow Empty = {}", existingFlowEmpty);
            }
        }
    } catch (final IOException e) {
        throw new FlowSerializationException(e);
    }
    logger.trace("Exporting snippets from controller");
    final byte[] existingSnippets = controller.getSnippetManager().export();
    logger.trace("Getting Authorizer fingerprint from controller");
    final byte[] existingAuthFingerprint;
    final ManagedAuthorizer managedAuthorizer;
    final Authorizer authorizer = controller.getAuthorizer();
    if (AuthorizerCapabilityDetection.isManagedAuthorizer(authorizer)) {
        managedAuthorizer = (ManagedAuthorizer) authorizer;
        existingAuthFingerprint = managedAuthorizer.getFingerprint().getBytes(StandardCharsets.UTF_8);
    } else {
        existingAuthFingerprint = null;
        managedAuthorizer = null;
    }
    final Set<String> missingComponents = new HashSet<>();
    controller.getAllControllerServices().stream().filter(cs -> cs.isExtensionMissing()).forEach(cs -> missingComponents.add(cs.getIdentifier()));
    controller.getAllReportingTasks().stream().filter(r -> r.isExtensionMissing()).forEach(r -> missingComponents.add(r.getIdentifier()));
    controller.getRootGroup().findAllProcessors().stream().filter(p -> p.isExtensionMissing()).forEach(p -> missingComponents.add(p.getIdentifier()));
    final DataFlow existingDataFlow = new StandardDataFlow(existingFlow, existingSnippets, existingAuthFingerprint, missingComponents);
    Document configuration = null;
    // check that the proposed flow is inheritable by the controller
    try {
        if (existingFlowEmpty) {
            configuration = parseFlowBytes(proposedFlow.getFlow());
            if (configuration != null) {
                logger.trace("Checking bundle compatibility");
                checkBundleCompatibility(configuration);
            }
        } else {
            logger.trace("Checking flow inheritability");
            final String problemInheritingFlow = checkFlowInheritability(existingDataFlow, proposedFlow, controller);
            if (problemInheritingFlow != null) {
                throw new UninheritableFlowException("Proposed configuration is not inheritable by the flow controller because of flow differences: " + problemInheritingFlow);
            }
        }
    } catch (final FingerprintException fe) {
        throw new FlowSerializationException("Failed to generate flow fingerprints", fe);
    }
    logger.trace("Checking missing component inheritability");
    final String problemInheritingMissingComponents = checkMissingComponentsInheritability(existingDataFlow, proposedFlow);
    if (problemInheritingMissingComponents != null) {
        throw new UninheritableFlowException("Proposed Flow is not inheritable by the flow controller because of differences in missing components: " + problemInheritingMissingComponents);
    }
    logger.trace("Checking authorizer inheritability");
    final AuthorizerInheritability authInheritability = checkAuthorizerInheritability(authorizer, existingDataFlow, proposedFlow);
    if (!authInheritability.isInheritable() && authInheritability.getReason() != null) {
        throw new UninheritableFlowException("Proposed Authorizer is not inheritable by the flow controller because of Authorizer differences: " + authInheritability.getReason());
    }
    // create document by parsing proposed flow bytes
    logger.trace("Parsing proposed flow bytes as DOM document");
    if (configuration == null) {
        configuration = parseFlowBytes(proposedFlow.getFlow());
    }
    // attempt to sync controller with proposed flow
    try {
        if (configuration != null) {
            synchronized (configuration) {
                // get the root element
                final Element rootElement = (Element) configuration.getElementsByTagName("flowController").item(0);
                final FlowEncodingVersion encodingVersion = FlowEncodingVersion.parse(rootElement);
                // set controller config
                logger.trace("Updating flow config");
                final Integer maxThreadCount = getInteger(rootElement, "maxThreadCount");
                if (maxThreadCount == null) {
                    controller.setMaxTimerDrivenThreadCount(getInt(rootElement, "maxTimerDrivenThreadCount"));
                    controller.setMaxEventDrivenThreadCount(getInt(rootElement, "maxEventDrivenThreadCount"));
                } else {
                    controller.setMaxTimerDrivenThreadCount(maxThreadCount * 2 / 3);
                    controller.setMaxEventDrivenThreadCount(maxThreadCount / 3);
                }
                // get the root group XML element
                final Element rootGroupElement = (Element) rootElement.getElementsByTagName("rootGroup").item(0);
                if (!flowAlreadySynchronized || existingFlowEmpty) {
                    final Element registriesElement = DomUtils.getChild(rootElement, "registries");
                    if (registriesElement != null) {
                        final List<Element> flowRegistryElems = DomUtils.getChildElementsByTagName(registriesElement, "flowRegistry");
                        for (final Element flowRegistryElement : flowRegistryElems) {
                            final String registryId = getString(flowRegistryElement, "id");
                            final String registryName = getString(flowRegistryElement, "name");
                            final String registryUrl = getString(flowRegistryElement, "url");
                            final String description = getString(flowRegistryElement, "description");
                            final FlowRegistryClient client = controller.getFlowRegistryClient();
                            client.addFlowRegistry(registryId, registryName, registryUrl, description);
                        }
                    }
                }
                // if this controller isn't initialized or its empty, add the root group, otherwise update
                final ProcessGroup rootGroup;
                if (!flowAlreadySynchronized || existingFlowEmpty) {
                    logger.trace("Adding root process group");
                    rootGroup = addProcessGroup(controller, /* parent group */
                    null, rootGroupElement, encryptor, encodingVersion);
                } else {
                    logger.trace("Updating root process group");
                    rootGroup = updateProcessGroup(controller, /* parent group */
                    null, rootGroupElement, encryptor, encodingVersion);
                }
                rootGroup.findAllRemoteProcessGroups().forEach(RemoteProcessGroup::initialize);
                // If there are any Templates that do not exist in the Proposed Flow that do exist in the 'existing flow', we need
                // to ensure that we also add those to the appropriate Process Groups, so that we don't lose them.
                final Document existingFlowConfiguration = parseFlowBytes(existingFlow);
                if (existingFlowConfiguration != null) {
                    final Element existingRootElement = (Element) existingFlowConfiguration.getElementsByTagName("flowController").item(0);
                    if (existingRootElement != null) {
                        final Element existingRootGroupElement = (Element) existingRootElement.getElementsByTagName("rootGroup").item(0);
                        if (existingRootElement != null) {
                            final FlowEncodingVersion existingEncodingVersion = FlowEncodingVersion.parse(existingFlowConfiguration.getDocumentElement());
                            addLocalTemplates(existingRootGroupElement, rootGroup, existingEncodingVersion);
                        }
                    }
                }
                // get all the reporting task elements
                final Element reportingTasksElement = DomUtils.getChild(rootElement, "reportingTasks");
                final List<Element> reportingTaskElements = new ArrayList<>();
                if (reportingTasksElement != null) {
                    reportingTaskElements.addAll(DomUtils.getChildElementsByTagName(reportingTasksElement, "reportingTask"));
                }
                // get/create all the reporting task nodes and DTOs, but don't apply their scheduled state yet
                final Map<ReportingTaskNode, ReportingTaskDTO> reportingTaskNodesToDTOs = new HashMap<>();
                for (final Element taskElement : reportingTaskElements) {
                    final ReportingTaskDTO dto = FlowFromDOMFactory.getReportingTask(taskElement, encryptor);
                    final ReportingTaskNode reportingTask = getOrCreateReportingTask(controller, dto, flowAlreadySynchronized, existingFlowEmpty);
                    reportingTaskNodesToDTOs.put(reportingTask, dto);
                }
                final Element controllerServicesElement = DomUtils.getChild(rootElement, "controllerServices");
                if (controllerServicesElement != null) {
                    final List<Element> serviceElements = DomUtils.getChildElementsByTagName(controllerServicesElement, "controllerService");
                    if (!flowAlreadySynchronized || existingFlowEmpty) {
                        // If the encoding version is null, we are loading a flow from NiFi 0.x, where Controller
                        // Services could not be scoped by Process Group. As a result, we want to move the Process Groups
                        // to the root Group. Otherwise, we want to use a null group, which indicates a Controller-level
                        // Controller Service.
                        final ProcessGroup group = (encodingVersion == null) ? rootGroup : null;
                        final Map<ControllerServiceNode, Element> controllerServices = ControllerServiceLoader.loadControllerServices(serviceElements, controller, group, encryptor);
                        // reference them, and if so we need to clone the CS and update the reporting task reference
                        if (group != null) {
                            // find all the controller service ids referenced by reporting tasks
                            final Set<String> controllerServicesInReportingTasks = reportingTaskNodesToDTOs.keySet().stream().flatMap(r -> r.getProperties().entrySet().stream()).filter(e -> e.getKey().getControllerServiceDefinition() != null).map(e -> e.getValue()).collect(Collectors.toSet());
                            // find the controller service nodes for each id referenced by a reporting task
                            final Set<ControllerServiceNode> controllerServicesToClone = controllerServices.keySet().stream().filter(cs -> controllerServicesInReportingTasks.contains(cs.getIdentifier())).collect(Collectors.toSet());
                            // clone the controller services and map the original id to the clone
                            final Map<String, ControllerServiceNode> controllerServiceMapping = new HashMap<>();
                            for (ControllerServiceNode controllerService : controllerServicesToClone) {
                                final ControllerServiceNode clone = ControllerServiceLoader.cloneControllerService(controller, controllerService);
                                controller.addRootControllerService(clone);
                                controllerServiceMapping.put(controllerService.getIdentifier(), clone);
                            }
                            // update the reporting tasks to reference the cloned controller services
                            updateReportingTaskControllerServices(reportingTaskNodesToDTOs.keySet(), controllerServiceMapping);
                            // enable all the cloned controller services
                            ControllerServiceLoader.enableControllerServices(controllerServiceMapping.values(), controller, autoResumeState);
                        }
                        // enable all the original controller services
                        ControllerServiceLoader.enableControllerServices(controllerServices, controller, encryptor, autoResumeState);
                    }
                }
                scaleRootGroup(rootGroup, encodingVersion);
                // now that controller services are loaded and enabled we can apply the scheduled state to each reporting task
                for (Map.Entry<ReportingTaskNode, ReportingTaskDTO> entry : reportingTaskNodesToDTOs.entrySet()) {
                    applyReportingTaskScheduleState(controller, entry.getValue(), entry.getKey(), flowAlreadySynchronized, existingFlowEmpty);
                }
            }
        }
        // clear the snippets that are currently in memory
        logger.trace("Clearing existing snippets");
        final SnippetManager snippetManager = controller.getSnippetManager();
        snippetManager.clear();
        // if proposed flow has any snippets, load them
        logger.trace("Loading proposed snippets");
        final byte[] proposedSnippets = proposedFlow.getSnippets();
        if (proposedSnippets != null && proposedSnippets.length > 0) {
            for (final StandardSnippet snippet : SnippetManager.parseBytes(proposedSnippets)) {
                snippetManager.addSnippet(snippet);
            }
        }
        // if auths are inheritable and we have a policy based authorizer, then inherit
        if (authInheritability.isInheritable() && managedAuthorizer != null) {
            logger.trace("Inheriting authorizations");
            final String proposedAuthFingerprint = new String(proposedFlow.getAuthorizerFingerprint(), StandardCharsets.UTF_8);
            managedAuthorizer.inheritFingerprint(proposedAuthFingerprint);
        }
        logger.debug("Finished syncing flows");
    } catch (final Exception ex) {
        throw new FlowSynchronizationException(ex);
    }
}
Also used : Arrays(java.util.Arrays) GZIPInputStream(java.util.zip.GZIPInputStream) Size(org.apache.nifi.connectable.Size) ConnectionDTO(org.apache.nifi.web.api.dto.ConnectionDTO) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) PositionDTO(org.apache.nifi.web.api.dto.PositionDTO) ProcessGroupDTO(org.apache.nifi.web.api.dto.ProcessGroupDTO) Document(org.w3c.dom.Document) Map(java.util.Map) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) RootGroupPort(org.apache.nifi.remote.RootGroupPort) Connectable(org.apache.nifi.connectable.Connectable) Connection(org.apache.nifi.connectable.Connection) Path(java.nio.file.Path) FunnelDTO(org.apache.nifi.web.api.dto.FunnelDTO) LoggingXmlParserErrorHandler(org.apache.nifi.util.LoggingXmlParserErrorHandler) FlowFilePrioritizer(org.apache.nifi.flowfile.FlowFilePrioritizer) FileUtils(org.apache.nifi.util.file.FileUtils) Set(java.util.Set) StandardFlowSerializer(org.apache.nifi.controller.serialization.StandardFlowSerializer) StandardCharsets(java.nio.charset.StandardCharsets) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) PortDTO(org.apache.nifi.web.api.dto.PortDTO) AuthorizerCapabilityDetection(org.apache.nifi.authorization.AuthorizerCapabilityDetection) Position(org.apache.nifi.connectable.Position) RemoteProcessGroup(org.apache.nifi.groups.RemoteProcessGroup) DocumentBuilderFactory(javax.xml.parsers.DocumentBuilderFactory) StandardVersionControlInformation(org.apache.nifi.registry.flow.StandardVersionControlInformation) ByteArrayOutputStream(java.io.ByteArrayOutputStream) SiteToSiteTransportProtocol(org.apache.nifi.remote.protocol.SiteToSiteTransportProtocol) ReportingInitializationContext(org.apache.nifi.reporting.ReportingInitializationContext) Schema(javax.xml.validation.Schema) CollectionUtils(org.apache.commons.collections4.CollectionUtils) ArrayList(java.util.ArrayList) Relationship(org.apache.nifi.processor.Relationship) VersionedFlowState(org.apache.nifi.registry.flow.VersionedFlowState) SchemaFactory(javax.xml.validation.SchemaFactory) Files(java.nio.file.Files) FlowEncodingVersion(org.apache.nifi.controller.serialization.FlowEncodingVersion) IOException(java.io.IOException) ExecutionNode(org.apache.nifi.scheduling.ExecutionNode) BulletinFactory(org.apache.nifi.events.BulletinFactory) Authorizer(org.apache.nifi.authorization.Authorizer) NiFiProperties(org.apache.nifi.util.NiFiProperties) FlowFromDOMFactory(org.apache.nifi.controller.serialization.FlowFromDOMFactory) DocumentBuilder(javax.xml.parsers.DocumentBuilder) Severity(org.apache.nifi.reporting.Severity) ProcessorInstantiationException(org.apache.nifi.controller.exception.ProcessorInstantiationException) ControllerServiceLoader(org.apache.nifi.controller.service.ControllerServiceLoader) ProcessGroup(org.apache.nifi.groups.ProcessGroup) BundleCoordinate(org.apache.nifi.bundle.BundleCoordinate) ProcessorConfigDTO(org.apache.nifi.web.api.dto.ProcessorConfigDTO) URL(java.net.URL) InitializationException(org.apache.nifi.reporting.InitializationException) ConnectableType(org.apache.nifi.connectable.ConnectableType) LoggerFactory(org.slf4j.LoggerFactory) Port(org.apache.nifi.connectable.Port) FingerprintException(org.apache.nifi.fingerprint.FingerprintException) BundleDTO(org.apache.nifi.web.api.dto.BundleDTO) ReportingTaskInstantiationException(org.apache.nifi.controller.reporting.ReportingTaskInstantiationException) FlowSynchronizer(org.apache.nifi.controller.serialization.FlowSynchronizer) LabelDTO(org.apache.nifi.web.api.dto.LabelDTO) ByteArrayInputStream(java.io.ByteArrayInputStream) TemplateDTO(org.apache.nifi.web.api.dto.TemplateDTO) SchedulingStrategy(org.apache.nifi.scheduling.SchedulingStrategy) Label(org.apache.nifi.controller.label.Label) FlowRegistryClient(org.apache.nifi.registry.flow.FlowRegistryClient) ControllerServiceDTO(org.apache.nifi.web.api.dto.ControllerServiceDTO) StandardOpenOption(java.nio.file.StandardOpenOption) RemoteProcessGroupPortDescriptor(org.apache.nifi.groups.RemoteProcessGroupPortDescriptor) BundleUtils(org.apache.nifi.util.BundleUtils) Collectors(java.util.stream.Collectors) List(java.util.List) UninheritableAuthorizationsException(org.apache.nifi.authorization.exception.UninheritableAuthorizationsException) FingerprintFactory(org.apache.nifi.fingerprint.FingerprintFactory) SAXException(org.xml.sax.SAXException) ProcessorDTO(org.apache.nifi.web.api.dto.ProcessorDTO) ControllerServiceState(org.apache.nifi.controller.service.ControllerServiceState) ReportingTaskDTO(org.apache.nifi.web.api.dto.ReportingTaskDTO) FlowSnippetDTO(org.apache.nifi.web.api.dto.FlowSnippetDTO) DataFlow(org.apache.nifi.cluster.protocol.DataFlow) RemoteProcessGroupDTO(org.apache.nifi.web.api.dto.RemoteProcessGroupDTO) Funnel(org.apache.nifi.connectable.Funnel) ControllerServiceNode(org.apache.nifi.controller.service.ControllerServiceNode) SimpleProcessLogger(org.apache.nifi.processor.SimpleProcessLogger) HashMap(java.util.HashMap) ComponentLog(org.apache.nifi.logging.ComponentLog) DomUtils(org.apache.nifi.util.DomUtils) HashSet(java.util.HashSet) FlowRegistry(org.apache.nifi.registry.flow.FlowRegistry) StringEncryptor(org.apache.nifi.encrypt.StringEncryptor) VersionControlInformationDTO(org.apache.nifi.web.api.dto.VersionControlInformationDTO) Node(org.w3c.dom.Node) XMLConstants(javax.xml.XMLConstants) LogLevel(org.apache.nifi.logging.LogLevel) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) ManagedAuthorizer(org.apache.nifi.authorization.ManagedAuthorizer) Logger(org.slf4j.Logger) NodeList(org.w3c.dom.NodeList) RemoteGroupPort(org.apache.nifi.remote.RemoteGroupPort) TimeUnit(java.util.concurrent.TimeUnit) Element(org.w3c.dom.Element) ParserConfigurationException(javax.xml.parsers.ParserConfigurationException) Collections(java.util.Collections) StandardReportingInitializationContext(org.apache.nifi.controller.reporting.StandardReportingInitializationContext) ConnectableDTO(org.apache.nifi.web.api.dto.ConnectableDTO) InputStream(java.io.InputStream) HashMap(java.util.HashMap) Element(org.w3c.dom.Element) FlowRegistryClient(org.apache.nifi.registry.flow.FlowRegistryClient) ArrayList(java.util.ArrayList) FlowEncodingVersion(org.apache.nifi.controller.serialization.FlowEncodingVersion) Document(org.w3c.dom.Document) Authorizer(org.apache.nifi.authorization.Authorizer) ManagedAuthorizer(org.apache.nifi.authorization.ManagedAuthorizer) ProcessGroupDTO(org.apache.nifi.web.api.dto.ProcessGroupDTO) RemoteProcessGroupDTO(org.apache.nifi.web.api.dto.RemoteProcessGroupDTO) FingerprintException(org.apache.nifi.fingerprint.FingerprintException) HashSet(java.util.HashSet) RemoteProcessGroup(org.apache.nifi.groups.RemoteProcessGroup) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) IOException(java.io.IOException) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) DataFlow(org.apache.nifi.cluster.protocol.DataFlow) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) IOException(java.io.IOException) ProcessorInstantiationException(org.apache.nifi.controller.exception.ProcessorInstantiationException) InitializationException(org.apache.nifi.reporting.InitializationException) FingerprintException(org.apache.nifi.fingerprint.FingerprintException) ReportingTaskInstantiationException(org.apache.nifi.controller.reporting.ReportingTaskInstantiationException) UninheritableAuthorizationsException(org.apache.nifi.authorization.exception.UninheritableAuthorizationsException) SAXException(org.xml.sax.SAXException) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) ParserConfigurationException(javax.xml.parsers.ParserConfigurationException) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) ControllerServiceNode(org.apache.nifi.controller.service.ControllerServiceNode) ManagedAuthorizer(org.apache.nifi.authorization.ManagedAuthorizer) RemoteProcessGroup(org.apache.nifi.groups.RemoteProcessGroup) ProcessGroup(org.apache.nifi.groups.ProcessGroup) ReportingTaskDTO(org.apache.nifi.web.api.dto.ReportingTaskDTO) Map(java.util.Map) HashMap(java.util.HashMap)

Example 20 with DataFlow

use of org.apache.nifi.cluster.protocol.DataFlow in project nifi by apache.

the class TestJaxbProtocolUtils method testRoundTripConnectionResponse.

@Test
public void testRoundTripConnectionResponse() throws JAXBException {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final ConnectionResponseMessage msg = new ConnectionResponseMessage();
    final NodeIdentifier nodeId = new NodeIdentifier("id", "localhost", 8000, "localhost", 8001, "localhost", 8002, 8003, true);
    final DataFlow dataFlow = new StandardDataFlow(new byte[0], new byte[0], new byte[0], new HashSet<>());
    final List<NodeConnectionStatus> nodeStatuses = Collections.singletonList(new NodeConnectionStatus(nodeId, DisconnectionCode.NOT_YET_CONNECTED));
    final List<ComponentRevision> componentRevisions = Collections.singletonList(ComponentRevision.fromRevision(new Revision(8L, "client-1", "component-1")));
    msg.setConnectionResponse(new ConnectionResponse(nodeId, dataFlow, "instance-1", nodeStatuses, componentRevisions));
    JaxbProtocolUtils.JAXB_CONTEXT.createMarshaller().marshal(msg, baos);
    final Object unmarshalled = JaxbProtocolUtils.JAXB_CONTEXT.createUnmarshaller().unmarshal(new ByteArrayInputStream(baos.toByteArray()));
    assertTrue(unmarshalled instanceof ConnectionResponseMessage);
    final ConnectionResponseMessage unmarshalledMsg = (ConnectionResponseMessage) unmarshalled;
    final List<ComponentRevision> revisions = msg.getConnectionResponse().getComponentRevisions();
    assertEquals(1, revisions.size());
    assertEquals(8L, revisions.get(0).getVersion().longValue());
    assertEquals("client-1", revisions.get(0).getClientId());
    assertEquals("component-1", revisions.get(0).getComponentId());
    assertEquals(revisions, unmarshalledMsg.getConnectionResponse().getComponentRevisions());
}
Also used : ByteArrayOutputStream(java.io.ByteArrayOutputStream) ConnectionResponse(org.apache.nifi.cluster.protocol.ConnectionResponse) DataFlow(org.apache.nifi.cluster.protocol.DataFlow) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) NodeConnectionStatus(org.apache.nifi.cluster.coordination.node.NodeConnectionStatus) ComponentRevision(org.apache.nifi.cluster.protocol.ComponentRevision) Revision(org.apache.nifi.web.Revision) ComponentRevision(org.apache.nifi.cluster.protocol.ComponentRevision) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) ByteArrayInputStream(java.io.ByteArrayInputStream) NodeIdentifier(org.apache.nifi.cluster.protocol.NodeIdentifier) ConnectionResponseMessage(org.apache.nifi.cluster.protocol.message.ConnectionResponseMessage) Test(org.junit.Test)

Aggregations

DataFlow (org.apache.nifi.cluster.protocol.DataFlow)20 Test (org.junit.Test)14 StandardDataFlow (org.apache.nifi.cluster.protocol.StandardDataFlow)11 FlowSynchronizer (org.apache.nifi.controller.serialization.FlowSynchronizer)9 FingerprintFactory (org.apache.nifi.fingerprint.FingerprintFactory)6 IOException (java.io.IOException)5 ProcessGroup (org.apache.nifi.groups.ProcessGroup)5 InputStream (java.io.InputStream)4 HashSet (java.util.HashSet)4 FlowSynchronizationException (org.apache.nifi.controller.serialization.FlowSynchronizationException)4 ControllerServiceNode (org.apache.nifi.controller.service.ControllerServiceNode)4 StringEncryptor (org.apache.nifi.encrypt.StringEncryptor)4 ByteArrayOutputStream (java.io.ByteArrayOutputStream)3 FileInputStream (java.io.FileInputStream)3 URL (java.net.URL)3 StandardCharsets (java.nio.charset.StandardCharsets)3 ArrayList (java.util.ArrayList)3 Arrays (java.util.Arrays)3 Collections (java.util.Collections)3 HashMap (java.util.HashMap)3