use of org.apache.nifi.registry.flow.VersionedPort in project nifi by apache.
the class StandardProcessGroup method addInputPort.
private Port addInputPort(final ProcessGroup destination, final VersionedPort proposed, final String componentIdSeed) {
final Port port = flowController.createLocalInputPort(generateUuid(proposed.getIdentifier(), destination.getIdentifier(), componentIdSeed), proposed.getName());
port.setVersionedComponentId(proposed.getIdentifier());
destination.addInputPort(port);
updatePort(port, proposed);
return port;
}
use of org.apache.nifi.registry.flow.VersionedPort in project nifi by apache.
the class StandardProcessGroup method verifyCanUpdate.
@Override
public void verifyCanUpdate(final VersionedFlowSnapshot updatedFlow, final boolean verifyConnectionRemoval, final boolean verifyNotDirty) {
readLock.lock();
try {
final VersionControlInformation versionControlInfo = getVersionControlInformation();
if (versionControlInfo != null) {
if (!versionControlInfo.getFlowIdentifier().equals(updatedFlow.getSnapshotMetadata().getFlowIdentifier())) {
throw new IllegalStateException(this + " is under version control but the given flow does not match the flow that this Process Group is synchronized with");
}
if (verifyNotDirty) {
final VersionedFlowState flowState = versionControlInfo.getStatus().getState();
final boolean modified = flowState == VersionedFlowState.LOCALLY_MODIFIED || flowState == VersionedFlowState.LOCALLY_MODIFIED_AND_STALE;
final Set<FlowDifference> modifications = getModifications();
if (modified) {
final String changes = modifications.stream().map(FlowDifference::toString).collect(Collectors.joining("\n"));
LOG.error("Cannot change the Version of the flow for {} because the Process Group has been modified ({} modifications) " + "since it was last synchronized with the Flow Registry. The following differences were found:\n{}", this, modifications.size(), changes);
throw new IllegalStateException("Cannot change the Version of the flow for " + this + " because the Process Group has been modified (" + modifications.size() + " modifications) since it was last synchronized with the Flow Registry. The Process Group must be" + " reverted to its original form before changing the version.");
}
}
verifyNoDescendantsWithLocalModifications("be updated");
}
final VersionedProcessGroup flowContents = updatedFlow.getFlowContents();
if (verifyConnectionRemoval) {
// Determine which Connections have been removed.
final Map<String, Connection> removedConnectionByVersionedId = new HashMap<>();
// Populate the 'removedConnectionByVersionId' map with all Connections. We key off of the connection's VersionedComponentID
// if it is populated. Otherwise, we key off of its actual ID. We do this because it allows us to then remove from this Map
// any connection that does exist in the proposed flow. This results in us having a Map whose values are those Connections
// that were removed. We can then check for any connections that have data in them. If any Connection is to be removed but
// has data, then we should throw an IllegalStateException.
findAllConnections().stream().forEach(conn -> removedConnectionByVersionedId.put(conn.getVersionedComponentId().orElse(conn.getIdentifier()), conn));
final Set<String> proposedFlowConnectionIds = new HashSet<>();
findAllConnectionIds(flowContents, proposedFlowConnectionIds);
for (final String proposedConnectionId : proposedFlowConnectionIds) {
removedConnectionByVersionedId.remove(proposedConnectionId);
}
// If any connection that was removed has data in it, throw an IllegalStateException
for (final Connection connection : removedConnectionByVersionedId.values()) {
final FlowFileQueue flowFileQueue = connection.getFlowFileQueue();
if (!flowFileQueue.isEmpty()) {
throw new IllegalStateException(this + " cannot be updated to the proposed version of the flow because the " + "proposed version does not contain " + connection + " and the connection currently has data in the queue.");
}
}
}
// Determine which input ports were removed from this process group
final Map<String, Port> removedInputPortsByVersionId = new HashMap<>();
getInputPorts().stream().filter(port -> port.getVersionedComponentId().isPresent()).forEach(port -> removedInputPortsByVersionId.put(port.getVersionedComponentId().get(), port));
flowContents.getInputPorts().stream().map(VersionedPort::getIdentifier).forEach(id -> removedInputPortsByVersionId.remove(id));
// Ensure that there are no incoming connections for any Input Port that was removed.
for (final Port inputPort : removedInputPortsByVersionId.values()) {
final List<Connection> incomingConnections = inputPort.getIncomingConnections();
if (!incomingConnections.isEmpty()) {
throw new IllegalStateException(this + " cannot be updated to the proposed version of the flow because the proposed version does not contain the Input Port " + inputPort + " and the Input Port currently has an incoming connections");
}
}
// Determine which output ports were removed from this process group
final Map<String, Port> removedOutputPortsByVersionId = new HashMap<>();
getOutputPorts().stream().filter(port -> port.getVersionedComponentId().isPresent()).forEach(port -> removedOutputPortsByVersionId.put(port.getVersionedComponentId().get(), port));
flowContents.getOutputPorts().stream().map(VersionedPort::getIdentifier).forEach(id -> removedOutputPortsByVersionId.remove(id));
// Ensure that there are no outgoing connections for any Output Port that was removed.
for (final Port outputPort : removedOutputPortsByVersionId.values()) {
final Set<Connection> outgoingConnections = outputPort.getConnections();
if (!outgoingConnections.isEmpty()) {
throw new IllegalStateException(this + " cannot be updated to the proposed version of the flow because the proposed version does not contain the Output Port " + outputPort + " and the Output Port currently has an outgoing connections");
}
}
// Find any Process Groups that may have been deleted. If we find any Process Group that was deleted, and that Process Group
// has Templates, then we fail because the Templates have to be removed first.
final Map<String, VersionedProcessGroup> proposedProcessGroups = new HashMap<>();
findAllProcessGroups(updatedFlow.getFlowContents(), proposedProcessGroups);
for (final ProcessGroup childGroup : findAllProcessGroups()) {
if (childGroup.getTemplates().isEmpty()) {
continue;
}
final Optional<String> versionedIdOption = childGroup.getVersionedComponentId();
if (!versionedIdOption.isPresent()) {
continue;
}
final String versionedId = versionedIdOption.get();
if (!proposedProcessGroups.containsKey(versionedId)) {
// Process Group was removed.
throw new IllegalStateException(this + " cannot be updated to the proposed version of the flow because the child " + childGroup + " that exists locally has one or more Templates, and the proposed flow does not contain these templates. " + "A Process Group cannot be deleted while it contains Templates. Please remove the Templates before attempting to change the version of the flow.");
}
}
// Ensure that all Processors are instantiate-able.
final Map<String, VersionedProcessor> proposedProcessors = new HashMap<>();
findAllProcessors(updatedFlow.getFlowContents(), proposedProcessors);
findAllProcessors().stream().filter(proc -> proc.getVersionedComponentId().isPresent()).forEach(proc -> proposedProcessors.remove(proc.getVersionedComponentId().get()));
for (final VersionedProcessor processorToAdd : proposedProcessors.values()) {
final BundleCoordinate coordinate = toCoordinate(processorToAdd.getBundle());
try {
flowController.createProcessor(processorToAdd.getType(), UUID.randomUUID().toString(), coordinate, false);
} catch (Exception e) {
throw new IllegalArgumentException("Unable to create Processor of type " + processorToAdd.getType(), e);
}
}
// Ensure that all Controller Services are instantiate-able.
final Map<String, VersionedControllerService> proposedServices = new HashMap<>();
findAllControllerServices(updatedFlow.getFlowContents(), proposedServices);
findAllControllerServices().stream().filter(service -> service.getVersionedComponentId().isPresent()).forEach(service -> proposedServices.remove(service.getVersionedComponentId().get()));
for (final VersionedControllerService serviceToAdd : proposedServices.values()) {
final BundleCoordinate coordinate = toCoordinate(serviceToAdd.getBundle());
try {
flowController.createControllerService(serviceToAdd.getType(), UUID.randomUUID().toString(), coordinate, Collections.emptySet(), false);
} catch (Exception e) {
throw new IllegalArgumentException("Unable to create Controller Service of type " + serviceToAdd.getType(), e);
}
}
// Ensure that all Prioritizers are instantiate-able.
final Map<String, VersionedConnection> proposedConnections = new HashMap<>();
findAllConnections(updatedFlow.getFlowContents(), proposedConnections);
findAllConnections().stream().filter(conn -> conn.getVersionedComponentId().isPresent()).forEach(conn -> proposedConnections.remove(conn.getVersionedComponentId().get()));
for (final VersionedConnection connectionToAdd : proposedConnections.values()) {
if (connectionToAdd.getPrioritizers() != null) {
for (final String prioritizerType : connectionToAdd.getPrioritizers()) {
try {
flowController.createPrioritizer(prioritizerType);
} catch (Exception e) {
throw new IllegalArgumentException("Unable to create Prioritizer of type " + prioritizerType, e);
}
}
}
}
} finally {
readLock.unlock();
}
}
use of org.apache.nifi.registry.flow.VersionedPort in project nifi by apache.
the class StandardProcessGroup method updateProcessGroup.
private void updateProcessGroup(final ProcessGroup group, final VersionedProcessGroup proposed, final String componentIdSeed, final Set<String> updatedVersionedComponentIds, final boolean updatePosition, final boolean updateName, final boolean updateDescendantVersionedGroups, final Set<String> variablesToSkip) throws ProcessorInstantiationException {
group.setComments(proposed.getComments());
if (updateName) {
group.setName(proposed.getName());
}
if (updatePosition && proposed.getPosition() != null) {
group.setPosition(new Position(proposed.getPosition().getX(), proposed.getPosition().getY()));
}
// Determine which variables have been added/removed and add/remove them from this group's variable registry.
// We don't worry about if a variable value has changed, because variables are designed to be 'environment specific.'
// As a result, once imported, we won't update variables to match the remote flow, but we will add any missing variables
// and remove any variables that are no longer part of the remote flow.
final Set<String> existingVariableNames = group.getVariableRegistry().getVariableMap().keySet().stream().map(VariableDescriptor::getName).collect(Collectors.toSet());
final Map<String, String> updatedVariableMap = new HashMap<>();
// If any new variables exist in the proposed flow, add those to the variable registry.
for (final Map.Entry<String, String> entry : proposed.getVariables().entrySet()) {
if (!existingVariableNames.contains(entry.getKey()) && !variablesToSkip.contains(entry.getKey())) {
updatedVariableMap.put(entry.getKey(), entry.getValue());
}
}
group.setVariables(updatedVariableMap);
final VersionedFlowCoordinates remoteCoordinates = proposed.getVersionedFlowCoordinates();
if (remoteCoordinates == null) {
group.disconnectVersionControl(false);
} else {
final String registryId = flowController.getFlowRegistryClient().getFlowRegistryId(remoteCoordinates.getRegistryUrl());
final String bucketId = remoteCoordinates.getBucketId();
final String flowId = remoteCoordinates.getFlowId();
final int version = remoteCoordinates.getVersion();
final FlowRegistry flowRegistry = flowController.getFlowRegistryClient().getFlowRegistry(registryId);
final String registryName = flowRegistry == null ? registryId : flowRegistry.getName();
final VersionedFlowState flowState = remoteCoordinates.getLatest() ? VersionedFlowState.UP_TO_DATE : VersionedFlowState.STALE;
final VersionControlInformation vci = new StandardVersionControlInformation.Builder().registryId(registryId).registryName(registryName).bucketId(bucketId).bucketName(bucketId).flowId(flowId).flowName(flowId).version(version).flowSnapshot(proposed).status(new StandardVersionedFlowStatus(flowState, flowState.getDescription())).build();
group.setVersionControlInformation(vci, Collections.emptyMap());
}
// Controller Services
// Controller Services have to be handled a bit differently than other components. This is because Processors and Controller
// Services may reference other Controller Services. Since we may be adding Service A, which depends on Service B, before adding
// Service B, we need to ensure that we create all Controller Services first and then call updateControllerService for each
// Controller Service. This way, we ensure that all services have been created before setting the properties. This allows us to
// properly obtain the correct mapping of Controller Service VersionedComponentID to Controller Service instance id.
final Map<String, ControllerServiceNode> servicesByVersionedId = group.getControllerServices(false).stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> controllerServicesRemoved = new HashSet<>(servicesByVersionedId.keySet());
final Map<ControllerServiceNode, VersionedControllerService> services = new HashMap<>();
// Add any Controller Service that does not yet exist.
for (final VersionedControllerService proposedService : proposed.getControllerServices()) {
ControllerServiceNode service = servicesByVersionedId.get(proposedService.getIdentifier());
if (service == null) {
service = addControllerService(group, proposedService, componentIdSeed);
LOG.info("Added {} to {}", service, this);
}
services.put(service, proposedService);
}
// Update all of the Controller Services to match the VersionedControllerService
for (final Map.Entry<ControllerServiceNode, VersionedControllerService> entry : services.entrySet()) {
final ControllerServiceNode service = entry.getKey();
final VersionedControllerService proposedService = entry.getValue();
if (updatedVersionedComponentIds.contains(proposedService.getIdentifier())) {
updateControllerService(service, proposedService);
LOG.info("Updated {}", service);
}
controllerServicesRemoved.remove(proposedService.getIdentifier());
}
// Child groups
final Map<String, ProcessGroup> childGroupsByVersionedId = group.getProcessGroups().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> childGroupsRemoved = new HashSet<>(childGroupsByVersionedId.keySet());
for (final VersionedProcessGroup proposedChildGroup : proposed.getProcessGroups()) {
final ProcessGroup childGroup = childGroupsByVersionedId.get(proposedChildGroup.getIdentifier());
final VersionedFlowCoordinates childCoordinates = proposedChildGroup.getVersionedFlowCoordinates();
if (childGroup == null) {
final ProcessGroup added = addProcessGroup(group, proposedChildGroup, componentIdSeed, variablesToSkip);
flowController.onProcessGroupAdded(added);
added.findAllRemoteProcessGroups().stream().forEach(RemoteProcessGroup::initialize);
LOG.info("Added {} to {}", added, this);
} else if (childCoordinates == null || updateDescendantVersionedGroups) {
updateProcessGroup(childGroup, proposedChildGroup, componentIdSeed, updatedVersionedComponentIds, true, true, updateDescendantVersionedGroups, variablesToSkip);
LOG.info("Updated {}", childGroup);
}
childGroupsRemoved.remove(proposedChildGroup.getIdentifier());
}
// Funnels
final Map<String, Funnel> funnelsByVersionedId = group.getFunnels().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> funnelsRemoved = new HashSet<>(funnelsByVersionedId.keySet());
for (final VersionedFunnel proposedFunnel : proposed.getFunnels()) {
final Funnel funnel = funnelsByVersionedId.get(proposedFunnel.getIdentifier());
if (funnel == null) {
final Funnel added = addFunnel(group, proposedFunnel, componentIdSeed);
flowController.onFunnelAdded(added);
LOG.info("Added {} to {}", added, this);
} else if (updatedVersionedComponentIds.contains(proposedFunnel.getIdentifier())) {
updateFunnel(funnel, proposedFunnel);
LOG.info("Updated {}", funnel);
} else {
funnel.setPosition(new Position(proposedFunnel.getPosition().getX(), proposedFunnel.getPosition().getY()));
}
funnelsRemoved.remove(proposedFunnel.getIdentifier());
}
// Input Ports
final Map<String, Port> inputPortsByVersionedId = group.getInputPorts().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> inputPortsRemoved = new HashSet<>(inputPortsByVersionedId.keySet());
for (final VersionedPort proposedPort : proposed.getInputPorts()) {
final Port port = inputPortsByVersionedId.get(proposedPort.getIdentifier());
if (port == null) {
final Port added = addInputPort(group, proposedPort, componentIdSeed);
flowController.onInputPortAdded(added);
LOG.info("Added {} to {}", added, this);
} else if (updatedVersionedComponentIds.contains(proposedPort.getIdentifier())) {
updatePort(port, proposedPort);
LOG.info("Updated {}", port);
} else {
port.setPosition(new Position(proposedPort.getPosition().getX(), proposedPort.getPosition().getY()));
}
inputPortsRemoved.remove(proposedPort.getIdentifier());
}
// Output Ports
final Map<String, Port> outputPortsByVersionedId = group.getOutputPorts().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> outputPortsRemoved = new HashSet<>(outputPortsByVersionedId.keySet());
for (final VersionedPort proposedPort : proposed.getOutputPorts()) {
final Port port = outputPortsByVersionedId.get(proposedPort.getIdentifier());
if (port == null) {
final Port added = addOutputPort(group, proposedPort, componentIdSeed);
flowController.onOutputPortAdded(added);
LOG.info("Added {} to {}", added, this);
} else if (updatedVersionedComponentIds.contains(proposedPort.getIdentifier())) {
updatePort(port, proposedPort);
LOG.info("Updated {}", port);
} else {
port.setPosition(new Position(proposedPort.getPosition().getX(), proposedPort.getPosition().getY()));
}
outputPortsRemoved.remove(proposedPort.getIdentifier());
}
// Labels
final Map<String, Label> labelsByVersionedId = group.getLabels().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> labelsRemoved = new HashSet<>(labelsByVersionedId.keySet());
for (final VersionedLabel proposedLabel : proposed.getLabels()) {
final Label label = labelsByVersionedId.get(proposedLabel.getIdentifier());
if (label == null) {
final Label added = addLabel(group, proposedLabel, componentIdSeed);
LOG.info("Added {} to {}", added, this);
} else if (updatedVersionedComponentIds.contains(proposedLabel.getIdentifier())) {
updateLabel(label, proposedLabel);
LOG.info("Updated {}", label);
} else {
label.setPosition(new Position(proposedLabel.getPosition().getX(), proposedLabel.getPosition().getY()));
}
labelsRemoved.remove(proposedLabel.getIdentifier());
}
// Processors
final Map<String, ProcessorNode> processorsByVersionedId = group.getProcessors().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> processorsRemoved = new HashSet<>(processorsByVersionedId.keySet());
final Map<ProcessorNode, Set<Relationship>> autoTerminatedRelationships = new HashMap<>();
for (final VersionedProcessor proposedProcessor : proposed.getProcessors()) {
final ProcessorNode processor = processorsByVersionedId.get(proposedProcessor.getIdentifier());
if (processor == null) {
final ProcessorNode added = addProcessor(group, proposedProcessor, componentIdSeed);
flowController.onProcessorAdded(added);
final Set<Relationship> proposedAutoTerminated = proposedProcessor.getAutoTerminatedRelationships() == null ? Collections.emptySet() : proposedProcessor.getAutoTerminatedRelationships().stream().map(relName -> added.getRelationship(relName)).collect(Collectors.toSet());
autoTerminatedRelationships.put(added, proposedAutoTerminated);
LOG.info("Added {} to {}", added, this);
} else if (updatedVersionedComponentIds.contains(proposedProcessor.getIdentifier())) {
updateProcessor(processor, proposedProcessor);
final Set<Relationship> proposedAutoTerminated = proposedProcessor.getAutoTerminatedRelationships() == null ? Collections.emptySet() : proposedProcessor.getAutoTerminatedRelationships().stream().map(relName -> processor.getRelationship(relName)).collect(Collectors.toSet());
if (!processor.getAutoTerminatedRelationships().equals(proposedAutoTerminated)) {
autoTerminatedRelationships.put(processor, proposedAutoTerminated);
}
LOG.info("Updated {}", processor);
} else {
processor.setPosition(new Position(proposedProcessor.getPosition().getX(), proposedProcessor.getPosition().getY()));
}
processorsRemoved.remove(proposedProcessor.getIdentifier());
}
// Remote Groups
final Map<String, RemoteProcessGroup> rpgsByVersionedId = group.getRemoteProcessGroups().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> rpgsRemoved = new HashSet<>(rpgsByVersionedId.keySet());
for (final VersionedRemoteProcessGroup proposedRpg : proposed.getRemoteProcessGroups()) {
final RemoteProcessGroup rpg = rpgsByVersionedId.get(proposedRpg.getIdentifier());
if (rpg == null) {
final RemoteProcessGroup added = addRemoteProcessGroup(group, proposedRpg, componentIdSeed);
LOG.info("Added {} to {}", added, this);
} else if (updatedVersionedComponentIds.contains(proposedRpg.getIdentifier())) {
updateRemoteProcessGroup(rpg, proposedRpg, componentIdSeed);
LOG.info("Updated {}", rpg);
} else {
rpg.setPosition(new Position(proposedRpg.getPosition().getX(), proposedRpg.getPosition().getY()));
}
rpgsRemoved.remove(proposedRpg.getIdentifier());
}
// Connections
final Map<String, Connection> connectionsByVersionedId = group.getConnections().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
final Set<String> connectionsRemoved = new HashSet<>(connectionsByVersionedId.keySet());
for (final VersionedConnection proposedConnection : proposed.getConnections()) {
final Connection connection = connectionsByVersionedId.get(proposedConnection.getIdentifier());
if (connection == null) {
final Connection added = addConnection(group, proposedConnection, componentIdSeed);
flowController.onConnectionAdded(added);
LOG.info("Added {} to {}", added, this);
} else if (isUpdateable(connection)) {
// If the connection needs to be updated, then the source and destination will already have
// been stopped (else, the validation above would fail). So if the source or the destination is running,
// then we know that we don't need to update the connection.
updateConnection(connection, proposedConnection);
LOG.info("Updated {}", connection);
}
connectionsRemoved.remove(proposedConnection.getIdentifier());
}
// to remove a component if it has a connection going to it!
for (final String removedVersionedId : connectionsRemoved) {
final Connection connection = connectionsByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", connection, group);
group.removeConnection(connection);
flowController.onConnectionRemoved(connection);
}
// Once the appropriate connections have been removed, we may now update Processors' auto-terminated relationships.
// We cannot do this above, in the 'updateProcessor' call because if a connection is removed and changed to auto-terminated,
// then updating this in the updateProcessor call above would attempt to set the Relationship to being auto-terminated while a
// Connection for that relationship exists. This will throw an Exception.
autoTerminatedRelationships.forEach((proc, rels) -> proc.setAutoTerminatedRelationships(rels));
// Remove all controller services no longer in use
for (final String removedVersionedId : controllerServicesRemoved) {
final ControllerServiceNode service = servicesByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", service, group);
// Must remove Controller Service through Flow Controller in order to remove from cache
flowController.removeControllerService(service);
}
for (final String removedVersionedId : funnelsRemoved) {
final Funnel funnel = funnelsByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", funnel, group);
group.removeFunnel(funnel);
}
for (final String removedVersionedId : inputPortsRemoved) {
final Port port = inputPortsByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", port, group);
group.removeInputPort(port);
}
for (final String removedVersionedId : outputPortsRemoved) {
final Port port = outputPortsByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", port, group);
group.removeOutputPort(port);
}
for (final String removedVersionedId : labelsRemoved) {
final Label label = labelsByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", label, group);
group.removeLabel(label);
}
for (final String removedVersionedId : processorsRemoved) {
final ProcessorNode processor = processorsByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", processor, group);
group.removeProcessor(processor);
}
for (final String removedVersionedId : rpgsRemoved) {
final RemoteProcessGroup rpg = rpgsByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", rpg, group);
group.removeRemoteProcessGroup(rpg);
}
for (final String removedVersionedId : childGroupsRemoved) {
final ProcessGroup childGroup = childGroupsByVersionedId.get(removedVersionedId);
LOG.info("Removing {} from {}", childGroup, group);
group.removeProcessGroup(childGroup);
}
}
use of org.apache.nifi.registry.flow.VersionedPort in project nifi by apache.
the class NiFiRegistryFlowMapper method mapPort.
public VersionedPort mapPort(final Port port) {
final VersionedPort versionedPort = new InstantiatedVersionedPort(port.getIdentifier(), port.getProcessGroupIdentifier());
versionedPort.setIdentifier(getId(port.getVersionedComponentId(), port.getIdentifier()));
versionedPort.setGroupIdentifier(getGroupId(port.getProcessGroupIdentifier()));
versionedPort.setComments(port.getComments());
versionedPort.setConcurrentlySchedulableTaskCount(port.getMaxConcurrentTasks());
versionedPort.setName(port.getName());
versionedPort.setPosition(mapPosition(port.getPosition()));
versionedPort.setType(PortType.valueOf(port.getConnectableType().name()));
return versionedPort;
}
use of org.apache.nifi.registry.flow.VersionedPort in project nifi-minifi by apache.
the class VersionedProcessGroupEnricher method enrich.
public void enrich(final VersionedProcessGroup versionedProcessGroup) {
List<VersionedProcessGroup> allVersionedProcessGroups = getAllVersionedProcessGroups(versionedProcessGroup);
Set<VersionedRemoteProcessGroup> remoteProcessGroups = getAll(allVersionedProcessGroups, VersionedProcessGroup::getRemoteProcessGroups).collect(Collectors.toSet());
Map<String, String> connectableNameMap = getAll(allVersionedProcessGroups, VersionedProcessGroup::getProcessors).collect(Collectors.toMap(VersionedComponent::getIdentifier, VersionedComponent::getName));
Map<String, String> rpgIdToTargetIdMap = new HashMap<>();
for (VersionedRemoteProcessGroup remoteProcessGroup : remoteProcessGroups) {
final Set<VersionedRemoteGroupPort> rpgInputPorts = nullToEmpty(remoteProcessGroup.getInputPorts());
final Set<VersionedRemoteGroupPort> rpgOutputPorts = nullToEmpty(remoteProcessGroup.getOutputPorts());
// Map all port DTOs to their respective targetIds
rpgIdToTargetIdMap.putAll(Stream.concat(rpgInputPorts.stream(), rpgOutputPorts.stream()).collect(Collectors.toMap(VersionedRemoteGroupPort::getIdentifier, VersionedRemoteGroupPort::getTargetId)));
addConnectables(connectableNameMap, rpgInputPorts, VersionedRemoteGroupPort::getIdentifier, VersionedRemoteGroupPort::getIdentifier);
addConnectables(connectableNameMap, rpgOutputPorts, VersionedRemoteGroupPort::getIdentifier, VersionedRemoteGroupPort::getIdentifier);
}
addConnectables(connectableNameMap, getAll(allVersionedProcessGroups, VersionedProcessGroup::getInputPorts).collect(Collectors.toList()), VersionedPort::getIdentifier, VersionedPort::getName);
addConnectables(connectableNameMap, getAll(allVersionedProcessGroups, VersionedProcessGroup::getOutputPorts).collect(Collectors.toList()), VersionedPort::getIdentifier, VersionedPort::getName);
final Set<VersionedConnection> connections = getAll(allVersionedProcessGroups, VersionedProcessGroup::getConnections).collect(Collectors.toSet());
// Enrich connection endpoints using known names and overriding with targetIds for remote ports
for (VersionedConnection connection : connections) {
setName(connectableNameMap, connection.getSource(), rpgIdToTargetIdMap);
setName(connectableNameMap, connection.getDestination(), rpgIdToTargetIdMap);
}
// Override any ids that are for Remote Ports to use their target Ids where available
connections.stream().flatMap(connectionDTO -> Stream.of(connectionDTO.getSource(), connectionDTO.getDestination())).filter(connectable -> (connectable.getType() == ConnectableComponentType.REMOTE_OUTPUT_PORT || connectable.getType() == ConnectableComponentType.REMOTE_INPUT_PORT)).forEach(connectable -> connectable.setId(Optional.ofNullable(rpgIdToTargetIdMap.get(connectable.getId())).orElse(connectable.getId())));
// Establish unique names for connections
for (VersionedConnection connection : connections) {
if (StringUtil.isNullOrEmpty(connection.getName())) {
StringBuilder name = new StringBuilder();
ConnectableComponent connectionSource = connection.getSource();
name.append(determineValueForConnectable(connectionSource, rpgIdToTargetIdMap));
name.append("/");
if (connection.getSelectedRelationships() != null && connection.getSelectedRelationships().size() > 0) {
name.append(connection.getSelectedRelationships().iterator().next());
}
name.append("/");
ConnectableComponent connectionDestination = connection.getDestination();
name.append(determineValueForConnectable(connectionDestination, rpgIdToTargetIdMap));
connection.setName(name.toString());
}
}
nullToEmpty(versionedProcessGroup.getProcessGroups()).stream().forEach(pg -> enrich(pg));
}
Aggregations