Search in sources :

Example 11 with StandardDataFlow

use of org.apache.nifi.cluster.protocol.StandardDataFlow in project nifi by apache.

the class TestNodeClusterCoordinator method testProposedIdentifierResolvedIfConflict.

@Test
public void testProposedIdentifierResolvedIfConflict() {
    final NodeIdentifier id1 = new NodeIdentifier("1234", "localhost", 8000, "localhost", 9000, "localhost", 10000, 11000, false);
    final NodeIdentifier conflictingId = new NodeIdentifier("1234", "localhost", 8001, "localhost", 9000, "localhost", 10000, 11000, false);
    final ConnectionRequest connectionRequest = new ConnectionRequest(id1, new StandardDataFlow(new byte[0], new byte[0], new byte[0], new HashSet<>()));
    final ConnectionRequestMessage crm = new ConnectionRequestMessage();
    crm.setConnectionRequest(connectionRequest);
    final ProtocolMessage response = coordinator.handle(crm);
    assertNotNull(response);
    assertTrue(response instanceof ConnectionResponseMessage);
    final ConnectionResponseMessage responseMessage = (ConnectionResponseMessage) response;
    final NodeIdentifier resolvedNodeId = responseMessage.getConnectionResponse().getNodeIdentifier();
    assertEquals(id1, resolvedNodeId);
    final ConnectionRequest conRequest2 = new ConnectionRequest(conflictingId, new StandardDataFlow(new byte[0], new byte[0], new byte[0], new HashSet<>()));
    final ConnectionRequestMessage crm2 = new ConnectionRequestMessage();
    crm2.setConnectionRequest(conRequest2);
    final ProtocolMessage conflictingResponse = coordinator.handle(crm2);
    assertNotNull(conflictingResponse);
    assertTrue(conflictingResponse instanceof ConnectionResponseMessage);
    final ConnectionResponseMessage conflictingResponseMessage = (ConnectionResponseMessage) conflictingResponse;
    final NodeIdentifier conflictingNodeId = conflictingResponseMessage.getConnectionResponse().getNodeIdentifier();
    assertNotSame(id1.getId(), conflictingNodeId.getId());
    assertEquals(conflictingId.getApiAddress(), conflictingNodeId.getApiAddress());
    assertEquals(conflictingId.getApiPort(), conflictingNodeId.getApiPort());
    assertEquals(conflictingId.getSiteToSiteAddress(), conflictingNodeId.getSiteToSiteAddress());
    assertEquals(conflictingId.getSiteToSitePort(), conflictingNodeId.getSiteToSitePort());
    assertEquals(conflictingId.getSocketAddress(), conflictingNodeId.getSocketAddress());
    assertEquals(conflictingId.getSocketPort(), conflictingNodeId.getSocketPort());
}
Also used : ConnectionRequest(org.apache.nifi.cluster.protocol.ConnectionRequest) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) ConnectionRequestMessage(org.apache.nifi.cluster.protocol.message.ConnectionRequestMessage) NodeIdentifier(org.apache.nifi.cluster.protocol.NodeIdentifier) ConnectionResponseMessage(org.apache.nifi.cluster.protocol.message.ConnectionResponseMessage) ProtocolMessage(org.apache.nifi.cluster.protocol.message.ProtocolMessage) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 12 with StandardDataFlow

use of org.apache.nifi.cluster.protocol.StandardDataFlow in project nifi by apache.

the class TestNodeClusterCoordinator method setup.

@Before
public void setup() throws IOException {
    System.setProperty(NiFiProperties.PROPERTIES_FILE_PATH, "src/test/resources/conf/nifi.properties");
    senderListener = Mockito.mock(ClusterCoordinationProtocolSenderListener.class);
    nodeStatuses = Collections.synchronizedList(new ArrayList<>());
    final EventReporter eventReporter = Mockito.mock(EventReporter.class);
    final RevisionManager revisionManager = Mockito.mock(RevisionManager.class);
    Mockito.when(revisionManager.getAllRevisions()).thenReturn(Collections.emptyList());
    coordinator = new NodeClusterCoordinator(senderListener, eventReporter, null, new FirstVoteWinsFlowElection(), null, revisionManager, createProperties(), null) {

        @Override
        void notifyOthersOfNodeStatusChange(NodeConnectionStatus updatedStatus, boolean notifyAllNodes, boolean waitForCoordinator) {
            nodeStatuses.add(updatedStatus);
        }
    };
    final FlowService flowService = Mockito.mock(FlowService.class);
    final StandardDataFlow dataFlow = new StandardDataFlow(new byte[50], new byte[50], new byte[50], new HashSet<>());
    Mockito.when(flowService.createDataFlow()).thenReturn(dataFlow);
    coordinator.setFlowService(flowService);
}
Also used : StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) RevisionManager(org.apache.nifi.web.revision.RevisionManager) ArrayList(java.util.ArrayList) ClusterCoordinationProtocolSenderListener(org.apache.nifi.cluster.protocol.impl.ClusterCoordinationProtocolSenderListener) EventReporter(org.apache.nifi.events.EventReporter) FlowService(org.apache.nifi.services.FlowService) Before(org.junit.Before)

Example 13 with StandardDataFlow

use of org.apache.nifi.cluster.protocol.StandardDataFlow in project nifi by apache.

the class StandardFlowService method loadFromBytes.

// write lock must already be acquired
private void loadFromBytes(final DataFlow proposedFlow, final boolean allowEmptyFlow) throws IOException, FlowSerializationException, FlowSynchronizationException, UninheritableFlowException, MissingBundleException {
    logger.trace("Loading flow from bytes");
    // resolve the given flow (null means load flow from disk)
    final DataFlow actualProposedFlow;
    final byte[] flowBytes;
    final byte[] authorizerFingerprint;
    final Set<String> missingComponents;
    if (proposedFlow == null) {
        final ByteArrayOutputStream flowOnDisk = new ByteArrayOutputStream();
        copyCurrentFlow(flowOnDisk);
        flowBytes = flowOnDisk.toByteArray();
        authorizerFingerprint = getAuthorizerFingerprint();
        missingComponents = new HashSet<>();
        logger.debug("Loaded Flow from bytes");
    } else {
        flowBytes = proposedFlow.getFlow();
        authorizerFingerprint = proposedFlow.getAuthorizerFingerprint();
        missingComponents = proposedFlow.getMissingComponents();
        logger.debug("Loaded flow from proposed flow");
    }
    actualProposedFlow = new StandardDataFlow(flowBytes, null, authorizerFingerprint, missingComponents);
    // load the flow
    logger.debug("Loading proposed flow into FlowController");
    dao.load(controller, actualProposedFlow);
    final ProcessGroup rootGroup = controller.getGroup(controller.getRootGroupId());
    if (rootGroup.isEmpty() && !allowEmptyFlow) {
        throw new FlowSynchronizationException("Failed to load flow because unable to connect to cluster and local flow is empty");
    }
    final List<Template> templates = loadTemplates();
    for (final Template template : templates) {
        final Template existing = rootGroup.getTemplate(template.getIdentifier());
        if (existing == null) {
            logger.info("Imported Template '{}' to Root Group", template.getDetails().getName());
            rootGroup.addTemplate(template);
        } else {
            logger.info("Template '{}' was already present in Root Group so will not import from file", template.getDetails().getName());
        }
    }
}
Also used : StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) ProcessGroup(org.apache.nifi.groups.ProcessGroup) ByteArrayOutputStream(java.io.ByteArrayOutputStream) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) DataFlow(org.apache.nifi.cluster.protocol.DataFlow)

Example 14 with StandardDataFlow

use of org.apache.nifi.cluster.protocol.StandardDataFlow in project nifi by apache.

the class StandardFlowSynchronizer method sync.

@Override
public void sync(final FlowController controller, final DataFlow proposedFlow, final StringEncryptor encryptor) throws FlowSerializationException, UninheritableFlowException, FlowSynchronizationException, MissingBundleException {
    // handle corner cases involving no proposed flow
    if (proposedFlow == null) {
        if (controller.getGroup(controller.getRootGroupId()).isEmpty()) {
            // no sync to perform
            return;
        } else {
            throw new UninheritableFlowException("Proposed configuration is empty, but the controller contains a data flow.");
        }
    }
    // determine if the controller already had flow sync'd to it
    final boolean flowAlreadySynchronized = controller.isFlowSynchronized();
    logger.debug("Synching FlowController with proposed flow: Controller is Already Synchronized = {}", flowAlreadySynchronized);
    // serialize controller state to bytes
    final byte[] existingFlow;
    final boolean existingFlowEmpty;
    try {
        if (flowAlreadySynchronized) {
            existingFlow = toBytes(controller);
            existingFlowEmpty = controller.getGroup(controller.getRootGroupId()).isEmpty() && controller.getAllReportingTasks().isEmpty() && controller.getAllControllerServices().isEmpty() && controller.getFlowRegistryClient().getRegistryIdentifiers().isEmpty();
        } else {
            existingFlow = readFlowFromDisk();
            if (existingFlow == null || existingFlow.length == 0) {
                existingFlowEmpty = true;
            } else {
                final Document document = parseFlowBytes(existingFlow);
                final Element rootElement = document.getDocumentElement();
                final FlowEncodingVersion encodingVersion = FlowEncodingVersion.parse(rootElement);
                logger.trace("Setting controller thread counts");
                final Integer maxThreadCount = getInteger(rootElement, "maxThreadCount");
                if (maxThreadCount == null) {
                    controller.setMaxTimerDrivenThreadCount(getInt(rootElement, "maxTimerDrivenThreadCount"));
                    controller.setMaxEventDrivenThreadCount(getInt(rootElement, "maxEventDrivenThreadCount"));
                } else {
                    controller.setMaxTimerDrivenThreadCount(maxThreadCount * 2 / 3);
                    controller.setMaxEventDrivenThreadCount(maxThreadCount / 3);
                }
                final Element reportingTasksElement = DomUtils.getChild(rootElement, "reportingTasks");
                final List<Element> taskElements;
                if (reportingTasksElement == null) {
                    taskElements = Collections.emptyList();
                } else {
                    taskElements = DomUtils.getChildElementsByTagName(reportingTasksElement, "reportingTask");
                }
                final Element controllerServicesElement = DomUtils.getChild(rootElement, "controllerServices");
                final List<Element> unrootedControllerServiceElements;
                if (controllerServicesElement == null) {
                    unrootedControllerServiceElements = Collections.emptyList();
                } else {
                    unrootedControllerServiceElements = DomUtils.getChildElementsByTagName(controllerServicesElement, "controllerService");
                }
                final boolean registriesPresent;
                final Element registriesElement = DomUtils.getChild(rootElement, "registries");
                if (registriesElement == null) {
                    registriesPresent = false;
                } else {
                    final List<Element> flowRegistryElems = DomUtils.getChildElementsByTagName(registriesElement, "flowRegistry");
                    registriesPresent = !flowRegistryElems.isEmpty();
                }
                logger.trace("Parsing process group from DOM");
                final Element rootGroupElement = (Element) rootElement.getElementsByTagName("rootGroup").item(0);
                final ProcessGroupDTO rootGroupDto = FlowFromDOMFactory.getProcessGroup(null, rootGroupElement, encryptor, encodingVersion);
                existingFlowEmpty = taskElements.isEmpty() && unrootedControllerServiceElements.isEmpty() && isEmpty(rootGroupDto) && !registriesPresent;
                logger.debug("Existing Flow Empty = {}", existingFlowEmpty);
            }
        }
    } catch (final IOException e) {
        throw new FlowSerializationException(e);
    }
    logger.trace("Exporting snippets from controller");
    final byte[] existingSnippets = controller.getSnippetManager().export();
    logger.trace("Getting Authorizer fingerprint from controller");
    final byte[] existingAuthFingerprint;
    final ManagedAuthorizer managedAuthorizer;
    final Authorizer authorizer = controller.getAuthorizer();
    if (AuthorizerCapabilityDetection.isManagedAuthorizer(authorizer)) {
        managedAuthorizer = (ManagedAuthorizer) authorizer;
        existingAuthFingerprint = managedAuthorizer.getFingerprint().getBytes(StandardCharsets.UTF_8);
    } else {
        existingAuthFingerprint = null;
        managedAuthorizer = null;
    }
    final Set<String> missingComponents = new HashSet<>();
    controller.getAllControllerServices().stream().filter(cs -> cs.isExtensionMissing()).forEach(cs -> missingComponents.add(cs.getIdentifier()));
    controller.getAllReportingTasks().stream().filter(r -> r.isExtensionMissing()).forEach(r -> missingComponents.add(r.getIdentifier()));
    controller.getRootGroup().findAllProcessors().stream().filter(p -> p.isExtensionMissing()).forEach(p -> missingComponents.add(p.getIdentifier()));
    final DataFlow existingDataFlow = new StandardDataFlow(existingFlow, existingSnippets, existingAuthFingerprint, missingComponents);
    Document configuration = null;
    // check that the proposed flow is inheritable by the controller
    try {
        if (existingFlowEmpty) {
            configuration = parseFlowBytes(proposedFlow.getFlow());
            if (configuration != null) {
                logger.trace("Checking bundle compatibility");
                checkBundleCompatibility(configuration);
            }
        } else {
            logger.trace("Checking flow inheritability");
            final String problemInheritingFlow = checkFlowInheritability(existingDataFlow, proposedFlow, controller);
            if (problemInheritingFlow != null) {
                throw new UninheritableFlowException("Proposed configuration is not inheritable by the flow controller because of flow differences: " + problemInheritingFlow);
            }
        }
    } catch (final FingerprintException fe) {
        throw new FlowSerializationException("Failed to generate flow fingerprints", fe);
    }
    logger.trace("Checking missing component inheritability");
    final String problemInheritingMissingComponents = checkMissingComponentsInheritability(existingDataFlow, proposedFlow);
    if (problemInheritingMissingComponents != null) {
        throw new UninheritableFlowException("Proposed Flow is not inheritable by the flow controller because of differences in missing components: " + problemInheritingMissingComponents);
    }
    logger.trace("Checking authorizer inheritability");
    final AuthorizerInheritability authInheritability = checkAuthorizerInheritability(authorizer, existingDataFlow, proposedFlow);
    if (!authInheritability.isInheritable() && authInheritability.getReason() != null) {
        throw new UninheritableFlowException("Proposed Authorizer is not inheritable by the flow controller because of Authorizer differences: " + authInheritability.getReason());
    }
    // create document by parsing proposed flow bytes
    logger.trace("Parsing proposed flow bytes as DOM document");
    if (configuration == null) {
        configuration = parseFlowBytes(proposedFlow.getFlow());
    }
    // attempt to sync controller with proposed flow
    try {
        if (configuration != null) {
            synchronized (configuration) {
                // get the root element
                final Element rootElement = (Element) configuration.getElementsByTagName("flowController").item(0);
                final FlowEncodingVersion encodingVersion = FlowEncodingVersion.parse(rootElement);
                // set controller config
                logger.trace("Updating flow config");
                final Integer maxThreadCount = getInteger(rootElement, "maxThreadCount");
                if (maxThreadCount == null) {
                    controller.setMaxTimerDrivenThreadCount(getInt(rootElement, "maxTimerDrivenThreadCount"));
                    controller.setMaxEventDrivenThreadCount(getInt(rootElement, "maxEventDrivenThreadCount"));
                } else {
                    controller.setMaxTimerDrivenThreadCount(maxThreadCount * 2 / 3);
                    controller.setMaxEventDrivenThreadCount(maxThreadCount / 3);
                }
                // get the root group XML element
                final Element rootGroupElement = (Element) rootElement.getElementsByTagName("rootGroup").item(0);
                if (!flowAlreadySynchronized || existingFlowEmpty) {
                    final Element registriesElement = DomUtils.getChild(rootElement, "registries");
                    if (registriesElement != null) {
                        final List<Element> flowRegistryElems = DomUtils.getChildElementsByTagName(registriesElement, "flowRegistry");
                        for (final Element flowRegistryElement : flowRegistryElems) {
                            final String registryId = getString(flowRegistryElement, "id");
                            final String registryName = getString(flowRegistryElement, "name");
                            final String registryUrl = getString(flowRegistryElement, "url");
                            final String description = getString(flowRegistryElement, "description");
                            final FlowRegistryClient client = controller.getFlowRegistryClient();
                            client.addFlowRegistry(registryId, registryName, registryUrl, description);
                        }
                    }
                }
                // if this controller isn't initialized or its empty, add the root group, otherwise update
                final ProcessGroup rootGroup;
                if (!flowAlreadySynchronized || existingFlowEmpty) {
                    logger.trace("Adding root process group");
                    rootGroup = addProcessGroup(controller, /* parent group */
                    null, rootGroupElement, encryptor, encodingVersion);
                } else {
                    logger.trace("Updating root process group");
                    rootGroup = updateProcessGroup(controller, /* parent group */
                    null, rootGroupElement, encryptor, encodingVersion);
                }
                rootGroup.findAllRemoteProcessGroups().forEach(RemoteProcessGroup::initialize);
                // If there are any Templates that do not exist in the Proposed Flow that do exist in the 'existing flow', we need
                // to ensure that we also add those to the appropriate Process Groups, so that we don't lose them.
                final Document existingFlowConfiguration = parseFlowBytes(existingFlow);
                if (existingFlowConfiguration != null) {
                    final Element existingRootElement = (Element) existingFlowConfiguration.getElementsByTagName("flowController").item(0);
                    if (existingRootElement != null) {
                        final Element existingRootGroupElement = (Element) existingRootElement.getElementsByTagName("rootGroup").item(0);
                        if (existingRootElement != null) {
                            final FlowEncodingVersion existingEncodingVersion = FlowEncodingVersion.parse(existingFlowConfiguration.getDocumentElement());
                            addLocalTemplates(existingRootGroupElement, rootGroup, existingEncodingVersion);
                        }
                    }
                }
                // get all the reporting task elements
                final Element reportingTasksElement = DomUtils.getChild(rootElement, "reportingTasks");
                final List<Element> reportingTaskElements = new ArrayList<>();
                if (reportingTasksElement != null) {
                    reportingTaskElements.addAll(DomUtils.getChildElementsByTagName(reportingTasksElement, "reportingTask"));
                }
                // get/create all the reporting task nodes and DTOs, but don't apply their scheduled state yet
                final Map<ReportingTaskNode, ReportingTaskDTO> reportingTaskNodesToDTOs = new HashMap<>();
                for (final Element taskElement : reportingTaskElements) {
                    final ReportingTaskDTO dto = FlowFromDOMFactory.getReportingTask(taskElement, encryptor);
                    final ReportingTaskNode reportingTask = getOrCreateReportingTask(controller, dto, flowAlreadySynchronized, existingFlowEmpty);
                    reportingTaskNodesToDTOs.put(reportingTask, dto);
                }
                final Element controllerServicesElement = DomUtils.getChild(rootElement, "controllerServices");
                if (controllerServicesElement != null) {
                    final List<Element> serviceElements = DomUtils.getChildElementsByTagName(controllerServicesElement, "controllerService");
                    if (!flowAlreadySynchronized || existingFlowEmpty) {
                        // If the encoding version is null, we are loading a flow from NiFi 0.x, where Controller
                        // Services could not be scoped by Process Group. As a result, we want to move the Process Groups
                        // to the root Group. Otherwise, we want to use a null group, which indicates a Controller-level
                        // Controller Service.
                        final ProcessGroup group = (encodingVersion == null) ? rootGroup : null;
                        final Map<ControllerServiceNode, Element> controllerServices = ControllerServiceLoader.loadControllerServices(serviceElements, controller, group, encryptor);
                        // reference them, and if so we need to clone the CS and update the reporting task reference
                        if (group != null) {
                            // find all the controller service ids referenced by reporting tasks
                            final Set<String> controllerServicesInReportingTasks = reportingTaskNodesToDTOs.keySet().stream().flatMap(r -> r.getProperties().entrySet().stream()).filter(e -> e.getKey().getControllerServiceDefinition() != null).map(e -> e.getValue()).collect(Collectors.toSet());
                            // find the controller service nodes for each id referenced by a reporting task
                            final Set<ControllerServiceNode> controllerServicesToClone = controllerServices.keySet().stream().filter(cs -> controllerServicesInReportingTasks.contains(cs.getIdentifier())).collect(Collectors.toSet());
                            // clone the controller services and map the original id to the clone
                            final Map<String, ControllerServiceNode> controllerServiceMapping = new HashMap<>();
                            for (ControllerServiceNode controllerService : controllerServicesToClone) {
                                final ControllerServiceNode clone = ControllerServiceLoader.cloneControllerService(controller, controllerService);
                                controller.addRootControllerService(clone);
                                controllerServiceMapping.put(controllerService.getIdentifier(), clone);
                            }
                            // update the reporting tasks to reference the cloned controller services
                            updateReportingTaskControllerServices(reportingTaskNodesToDTOs.keySet(), controllerServiceMapping);
                            // enable all the cloned controller services
                            ControllerServiceLoader.enableControllerServices(controllerServiceMapping.values(), controller, autoResumeState);
                        }
                        // enable all the original controller services
                        ControllerServiceLoader.enableControllerServices(controllerServices, controller, encryptor, autoResumeState);
                    }
                }
                scaleRootGroup(rootGroup, encodingVersion);
                // now that controller services are loaded and enabled we can apply the scheduled state to each reporting task
                for (Map.Entry<ReportingTaskNode, ReportingTaskDTO> entry : reportingTaskNodesToDTOs.entrySet()) {
                    applyReportingTaskScheduleState(controller, entry.getValue(), entry.getKey(), flowAlreadySynchronized, existingFlowEmpty);
                }
            }
        }
        // clear the snippets that are currently in memory
        logger.trace("Clearing existing snippets");
        final SnippetManager snippetManager = controller.getSnippetManager();
        snippetManager.clear();
        // if proposed flow has any snippets, load them
        logger.trace("Loading proposed snippets");
        final byte[] proposedSnippets = proposedFlow.getSnippets();
        if (proposedSnippets != null && proposedSnippets.length > 0) {
            for (final StandardSnippet snippet : SnippetManager.parseBytes(proposedSnippets)) {
                snippetManager.addSnippet(snippet);
            }
        }
        // if auths are inheritable and we have a policy based authorizer, then inherit
        if (authInheritability.isInheritable() && managedAuthorizer != null) {
            logger.trace("Inheriting authorizations");
            final String proposedAuthFingerprint = new String(proposedFlow.getAuthorizerFingerprint(), StandardCharsets.UTF_8);
            managedAuthorizer.inheritFingerprint(proposedAuthFingerprint);
        }
        logger.debug("Finished syncing flows");
    } catch (final Exception ex) {
        throw new FlowSynchronizationException(ex);
    }
}
Also used : Arrays(java.util.Arrays) GZIPInputStream(java.util.zip.GZIPInputStream) Size(org.apache.nifi.connectable.Size) ConnectionDTO(org.apache.nifi.web.api.dto.ConnectionDTO) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) PositionDTO(org.apache.nifi.web.api.dto.PositionDTO) ProcessGroupDTO(org.apache.nifi.web.api.dto.ProcessGroupDTO) Document(org.w3c.dom.Document) Map(java.util.Map) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) RootGroupPort(org.apache.nifi.remote.RootGroupPort) Connectable(org.apache.nifi.connectable.Connectable) Connection(org.apache.nifi.connectable.Connection) Path(java.nio.file.Path) FunnelDTO(org.apache.nifi.web.api.dto.FunnelDTO) LoggingXmlParserErrorHandler(org.apache.nifi.util.LoggingXmlParserErrorHandler) FlowFilePrioritizer(org.apache.nifi.flowfile.FlowFilePrioritizer) FileUtils(org.apache.nifi.util.file.FileUtils) Set(java.util.Set) StandardFlowSerializer(org.apache.nifi.controller.serialization.StandardFlowSerializer) StandardCharsets(java.nio.charset.StandardCharsets) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) PortDTO(org.apache.nifi.web.api.dto.PortDTO) AuthorizerCapabilityDetection(org.apache.nifi.authorization.AuthorizerCapabilityDetection) Position(org.apache.nifi.connectable.Position) RemoteProcessGroup(org.apache.nifi.groups.RemoteProcessGroup) DocumentBuilderFactory(javax.xml.parsers.DocumentBuilderFactory) StandardVersionControlInformation(org.apache.nifi.registry.flow.StandardVersionControlInformation) ByteArrayOutputStream(java.io.ByteArrayOutputStream) SiteToSiteTransportProtocol(org.apache.nifi.remote.protocol.SiteToSiteTransportProtocol) ReportingInitializationContext(org.apache.nifi.reporting.ReportingInitializationContext) Schema(javax.xml.validation.Schema) CollectionUtils(org.apache.commons.collections4.CollectionUtils) ArrayList(java.util.ArrayList) Relationship(org.apache.nifi.processor.Relationship) VersionedFlowState(org.apache.nifi.registry.flow.VersionedFlowState) SchemaFactory(javax.xml.validation.SchemaFactory) Files(java.nio.file.Files) FlowEncodingVersion(org.apache.nifi.controller.serialization.FlowEncodingVersion) IOException(java.io.IOException) ExecutionNode(org.apache.nifi.scheduling.ExecutionNode) BulletinFactory(org.apache.nifi.events.BulletinFactory) Authorizer(org.apache.nifi.authorization.Authorizer) NiFiProperties(org.apache.nifi.util.NiFiProperties) FlowFromDOMFactory(org.apache.nifi.controller.serialization.FlowFromDOMFactory) DocumentBuilder(javax.xml.parsers.DocumentBuilder) Severity(org.apache.nifi.reporting.Severity) ProcessorInstantiationException(org.apache.nifi.controller.exception.ProcessorInstantiationException) ControllerServiceLoader(org.apache.nifi.controller.service.ControllerServiceLoader) ProcessGroup(org.apache.nifi.groups.ProcessGroup) BundleCoordinate(org.apache.nifi.bundle.BundleCoordinate) ProcessorConfigDTO(org.apache.nifi.web.api.dto.ProcessorConfigDTO) URL(java.net.URL) InitializationException(org.apache.nifi.reporting.InitializationException) ConnectableType(org.apache.nifi.connectable.ConnectableType) LoggerFactory(org.slf4j.LoggerFactory) Port(org.apache.nifi.connectable.Port) FingerprintException(org.apache.nifi.fingerprint.FingerprintException) BundleDTO(org.apache.nifi.web.api.dto.BundleDTO) ReportingTaskInstantiationException(org.apache.nifi.controller.reporting.ReportingTaskInstantiationException) FlowSynchronizer(org.apache.nifi.controller.serialization.FlowSynchronizer) LabelDTO(org.apache.nifi.web.api.dto.LabelDTO) ByteArrayInputStream(java.io.ByteArrayInputStream) TemplateDTO(org.apache.nifi.web.api.dto.TemplateDTO) SchedulingStrategy(org.apache.nifi.scheduling.SchedulingStrategy) Label(org.apache.nifi.controller.label.Label) FlowRegistryClient(org.apache.nifi.registry.flow.FlowRegistryClient) ControllerServiceDTO(org.apache.nifi.web.api.dto.ControllerServiceDTO) StandardOpenOption(java.nio.file.StandardOpenOption) RemoteProcessGroupPortDescriptor(org.apache.nifi.groups.RemoteProcessGroupPortDescriptor) BundleUtils(org.apache.nifi.util.BundleUtils) Collectors(java.util.stream.Collectors) List(java.util.List) UninheritableAuthorizationsException(org.apache.nifi.authorization.exception.UninheritableAuthorizationsException) FingerprintFactory(org.apache.nifi.fingerprint.FingerprintFactory) SAXException(org.xml.sax.SAXException) ProcessorDTO(org.apache.nifi.web.api.dto.ProcessorDTO) ControllerServiceState(org.apache.nifi.controller.service.ControllerServiceState) ReportingTaskDTO(org.apache.nifi.web.api.dto.ReportingTaskDTO) FlowSnippetDTO(org.apache.nifi.web.api.dto.FlowSnippetDTO) DataFlow(org.apache.nifi.cluster.protocol.DataFlow) RemoteProcessGroupDTO(org.apache.nifi.web.api.dto.RemoteProcessGroupDTO) Funnel(org.apache.nifi.connectable.Funnel) ControllerServiceNode(org.apache.nifi.controller.service.ControllerServiceNode) SimpleProcessLogger(org.apache.nifi.processor.SimpleProcessLogger) HashMap(java.util.HashMap) ComponentLog(org.apache.nifi.logging.ComponentLog) DomUtils(org.apache.nifi.util.DomUtils) HashSet(java.util.HashSet) FlowRegistry(org.apache.nifi.registry.flow.FlowRegistry) StringEncryptor(org.apache.nifi.encrypt.StringEncryptor) VersionControlInformationDTO(org.apache.nifi.web.api.dto.VersionControlInformationDTO) Node(org.w3c.dom.Node) XMLConstants(javax.xml.XMLConstants) LogLevel(org.apache.nifi.logging.LogLevel) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) ManagedAuthorizer(org.apache.nifi.authorization.ManagedAuthorizer) Logger(org.slf4j.Logger) NodeList(org.w3c.dom.NodeList) RemoteGroupPort(org.apache.nifi.remote.RemoteGroupPort) TimeUnit(java.util.concurrent.TimeUnit) Element(org.w3c.dom.Element) ParserConfigurationException(javax.xml.parsers.ParserConfigurationException) Collections(java.util.Collections) StandardReportingInitializationContext(org.apache.nifi.controller.reporting.StandardReportingInitializationContext) ConnectableDTO(org.apache.nifi.web.api.dto.ConnectableDTO) InputStream(java.io.InputStream) HashMap(java.util.HashMap) Element(org.w3c.dom.Element) FlowRegistryClient(org.apache.nifi.registry.flow.FlowRegistryClient) ArrayList(java.util.ArrayList) FlowEncodingVersion(org.apache.nifi.controller.serialization.FlowEncodingVersion) Document(org.w3c.dom.Document) Authorizer(org.apache.nifi.authorization.Authorizer) ManagedAuthorizer(org.apache.nifi.authorization.ManagedAuthorizer) ProcessGroupDTO(org.apache.nifi.web.api.dto.ProcessGroupDTO) RemoteProcessGroupDTO(org.apache.nifi.web.api.dto.RemoteProcessGroupDTO) FingerprintException(org.apache.nifi.fingerprint.FingerprintException) HashSet(java.util.HashSet) RemoteProcessGroup(org.apache.nifi.groups.RemoteProcessGroup) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) IOException(java.io.IOException) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) DataFlow(org.apache.nifi.cluster.protocol.DataFlow) FlowSerializationException(org.apache.nifi.controller.serialization.FlowSerializationException) IOException(java.io.IOException) ProcessorInstantiationException(org.apache.nifi.controller.exception.ProcessorInstantiationException) InitializationException(org.apache.nifi.reporting.InitializationException) FingerprintException(org.apache.nifi.fingerprint.FingerprintException) ReportingTaskInstantiationException(org.apache.nifi.controller.reporting.ReportingTaskInstantiationException) UninheritableAuthorizationsException(org.apache.nifi.authorization.exception.UninheritableAuthorizationsException) SAXException(org.xml.sax.SAXException) FlowSynchronizationException(org.apache.nifi.controller.serialization.FlowSynchronizationException) ParserConfigurationException(javax.xml.parsers.ParserConfigurationException) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) ControllerServiceNode(org.apache.nifi.controller.service.ControllerServiceNode) ManagedAuthorizer(org.apache.nifi.authorization.ManagedAuthorizer) RemoteProcessGroup(org.apache.nifi.groups.RemoteProcessGroup) ProcessGroup(org.apache.nifi.groups.ProcessGroup) ReportingTaskDTO(org.apache.nifi.web.api.dto.ReportingTaskDTO) Map(java.util.Map) HashMap(java.util.HashMap)

Example 15 with StandardDataFlow

use of org.apache.nifi.cluster.protocol.StandardDataFlow in project nifi by apache.

the class TestJaxbProtocolUtils method testRoundTripConnectionResponse.

@Test
public void testRoundTripConnectionResponse() throws JAXBException {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final ConnectionResponseMessage msg = new ConnectionResponseMessage();
    final NodeIdentifier nodeId = new NodeIdentifier("id", "localhost", 8000, "localhost", 8001, "localhost", 8002, 8003, true);
    final DataFlow dataFlow = new StandardDataFlow(new byte[0], new byte[0], new byte[0], new HashSet<>());
    final List<NodeConnectionStatus> nodeStatuses = Collections.singletonList(new NodeConnectionStatus(nodeId, DisconnectionCode.NOT_YET_CONNECTED));
    final List<ComponentRevision> componentRevisions = Collections.singletonList(ComponentRevision.fromRevision(new Revision(8L, "client-1", "component-1")));
    msg.setConnectionResponse(new ConnectionResponse(nodeId, dataFlow, "instance-1", nodeStatuses, componentRevisions));
    JaxbProtocolUtils.JAXB_CONTEXT.createMarshaller().marshal(msg, baos);
    final Object unmarshalled = JaxbProtocolUtils.JAXB_CONTEXT.createUnmarshaller().unmarshal(new ByteArrayInputStream(baos.toByteArray()));
    assertTrue(unmarshalled instanceof ConnectionResponseMessage);
    final ConnectionResponseMessage unmarshalledMsg = (ConnectionResponseMessage) unmarshalled;
    final List<ComponentRevision> revisions = msg.getConnectionResponse().getComponentRevisions();
    assertEquals(1, revisions.size());
    assertEquals(8L, revisions.get(0).getVersion().longValue());
    assertEquals("client-1", revisions.get(0).getClientId());
    assertEquals("component-1", revisions.get(0).getComponentId());
    assertEquals(revisions, unmarshalledMsg.getConnectionResponse().getComponentRevisions());
}
Also used : ByteArrayOutputStream(java.io.ByteArrayOutputStream) ConnectionResponse(org.apache.nifi.cluster.protocol.ConnectionResponse) DataFlow(org.apache.nifi.cluster.protocol.DataFlow) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) NodeConnectionStatus(org.apache.nifi.cluster.coordination.node.NodeConnectionStatus) ComponentRevision(org.apache.nifi.cluster.protocol.ComponentRevision) Revision(org.apache.nifi.web.Revision) ComponentRevision(org.apache.nifi.cluster.protocol.ComponentRevision) StandardDataFlow(org.apache.nifi.cluster.protocol.StandardDataFlow) ByteArrayInputStream(java.io.ByteArrayInputStream) NodeIdentifier(org.apache.nifi.cluster.protocol.NodeIdentifier) ConnectionResponseMessage(org.apache.nifi.cluster.protocol.message.ConnectionResponseMessage) Test(org.junit.Test)

Aggregations

StandardDataFlow (org.apache.nifi.cluster.protocol.StandardDataFlow)15 HashSet (java.util.HashSet)10 ByteArrayOutputStream (java.io.ByteArrayOutputStream)9 Test (org.junit.Test)9 NodeIdentifier (org.apache.nifi.cluster.protocol.NodeIdentifier)5 StandardFlowSerializer (org.apache.nifi.controller.serialization.StandardFlowSerializer)5 FlowSerializer (org.apache.nifi.controller.serialization.FlowSerializer)4 ArrayList (java.util.ArrayList)3 ConnectionRequest (org.apache.nifi.cluster.protocol.ConnectionRequest)3 DataFlow (org.apache.nifi.cluster.protocol.DataFlow)3 ConnectionRequestMessage (org.apache.nifi.cluster.protocol.message.ConnectionRequestMessage)3 RevisionManager (org.apache.nifi.web.revision.RevisionManager)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2 IOException (java.io.IOException)2 InputStream (java.io.InputStream)2 StandardCharsets (java.nio.charset.StandardCharsets)2 Files (java.nio.file.Files)2 Path (java.nio.file.Path)2 StandardOpenOption (java.nio.file.StandardOpenOption)2 Collections (java.util.Collections)2