use of org.apache.nifi.web.api.dto.ConnectionDTO in project kylo by Teradata.
the class CreateFeedBuilder method connectFeedToReusableTemplatexx.
private void connectFeedToReusableTemplatexx(ProcessGroupDTO feedProcessGroup, ProcessGroupDTO categoryProcessGroup) throws NifiComponentNotFoundException {
Stopwatch stopwatch = Stopwatch.createStarted();
String categoryProcessGroupId = categoryProcessGroup.getId();
String categoryParentGroupId = categoryProcessGroup.getParentGroupId();
String categoryProcessGroupName = categoryProcessGroup.getName();
String feedProcessGroupId = feedProcessGroup.getId();
String feedProcessGroupName = feedProcessGroup.getName();
ProcessGroupDTO reusableTemplateCategory = niFiObjectCache.getReusableTemplateCategoryProcessGroup();
if (reusableTemplateCategory == null) {
throw new NifiClientRuntimeException("Unable to find the Reusable Template Group. Please ensure NiFi has the 'reusable_templates' processgroup and appropriate reusable flow for this feed." + " You may need to import the base reusable template for this feed.");
}
String reusableTemplateCategoryGroupId = reusableTemplateCategory.getId();
stopwatch.stop();
log.debug("Time to get reusableTemplateCategory: {} ", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
Stopwatch totalStopWatch = Stopwatch.createUnstarted();
for (InputOutputPort port : inputOutputPorts) {
totalStopWatch.start();
stopwatch.start();
PortDTO reusableTemplatePort = niFiObjectCache.getReusableTemplateInputPort(port.getInputPortName());
stopwatch.stop();
log.debug("Time to get reusableTemplate inputPort {} : {} ", port.getInputPortName(), stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
if (reusableTemplatePort != null) {
String categoryOutputPortName = categoryProcessGroupName + " to " + port.getInputPortName();
stopwatch.start();
PortDTO categoryOutputPort = niFiObjectCache.getCategoryOutputPort(categoryProcessGroupId, categoryOutputPortName);
stopwatch.stop();
log.debug("Time to get categoryOutputPort {} : {} ", categoryOutputPortName, stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
if (categoryOutputPort == null) {
stopwatch.start();
// create it
PortDTO portDTO = new PortDTO();
portDTO.setParentGroupId(categoryProcessGroupId);
portDTO.setName(categoryOutputPortName);
categoryOutputPort = restClient.getNiFiRestClient().processGroups().createOutputPort(categoryProcessGroupId, portDTO);
niFiObjectCache.addCategoryOutputPort(categoryProcessGroupId, categoryOutputPort);
stopwatch.stop();
log.debug("Time to create categoryOutputPort {} : {} ", categoryOutputPortName, stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
}
stopwatch.start();
Set<PortDTO> feedOutputPorts = feedProcessGroup.getContents().getOutputPorts();
String feedOutputPortName = port.getOutputPortName();
if (feedOutputPorts == null || feedOutputPorts.isEmpty()) {
feedOutputPorts = restClient.getNiFiRestClient().processGroups().getOutputPorts(feedProcessGroup.getId());
}
PortDTO feedOutputPort = NifiConnectionUtil.findPortMatchingName(feedOutputPorts, feedOutputPortName);
stopwatch.stop();
log.debug("Time to create feedOutputPort {} : {} ", feedOutputPortName, stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
if (feedOutputPort != null) {
stopwatch.start();
// make the connection on the category from feed to category
ConnectionDTO feedOutputToCategoryOutputConnection = niFiObjectCache.getConnection(categoryProcessGroupId, feedOutputPort.getId(), categoryOutputPort.getId());
stopwatch.stop();
log.debug("Time to get feedOutputToCategoryOutputConnection: {} ", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
if (feedOutputToCategoryOutputConnection == null) {
stopwatch.start();
// CONNECT FEED OUTPUT PORT TO THE Category output port
ConnectableDTO source = new ConnectableDTO();
source.setGroupId(feedProcessGroupId);
source.setId(feedOutputPort.getId());
source.setName(feedProcessGroupName);
source.setType(NifiConstants.NIFI_PORT_TYPE.OUTPUT_PORT.name());
ConnectableDTO dest = new ConnectableDTO();
dest.setGroupId(categoryProcessGroupId);
dest.setName(categoryOutputPort.getName());
dest.setId(categoryOutputPort.getId());
dest.setType(NifiConstants.NIFI_PORT_TYPE.OUTPUT_PORT.name());
feedOutputToCategoryOutputConnection = restClient.createConnection(categoryProcessGroupId, source, dest);
niFiObjectCache.addConnection(categoryProcessGroupId, feedOutputToCategoryOutputConnection);
nifiFlowCache.addConnectionToCache(feedOutputToCategoryOutputConnection);
stopwatch.stop();
log.debug("Time to create feedOutputToCategoryOutputConnection: {} ", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
}
stopwatch.start();
// connection made on parent (root) to reusable template
ConnectionDTO categoryToReusableTemplateConnection = niFiObjectCache.getConnection(categoryProcessGroup.getParentGroupId(), categoryOutputPort.getId(), reusableTemplatePort.getId());
stopwatch.stop();
log.debug("Time to get categoryToReusableTemplateConnection: {} ", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
// Now connect the category ProcessGroup to the global template
if (categoryToReusableTemplateConnection == null) {
stopwatch.start();
ConnectableDTO categorySource = new ConnectableDTO();
categorySource.setGroupId(categoryProcessGroupId);
categorySource.setId(categoryOutputPort.getId());
categorySource.setName(categoryOutputPortName);
categorySource.setType(NifiConstants.NIFI_PORT_TYPE.OUTPUT_PORT.name());
ConnectableDTO categoryToGlobalTemplate = new ConnectableDTO();
categoryToGlobalTemplate.setGroupId(reusableTemplateCategoryGroupId);
categoryToGlobalTemplate.setId(reusableTemplatePort.getId());
categoryToGlobalTemplate.setName(reusableTemplatePort.getName());
categoryToGlobalTemplate.setType(NifiConstants.NIFI_PORT_TYPE.INPUT_PORT.name());
categoryToReusableTemplateConnection = restClient.createConnection(categoryParentGroupId, categorySource, categoryToGlobalTemplate);
niFiObjectCache.addConnection(categoryParentGroupId, categoryToReusableTemplateConnection);
nifiFlowCache.addConnectionToCache(categoryToReusableTemplateConnection);
stopwatch.stop();
log.debug("Time to create categoryToReusableTemplateConnection: {} ", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
}
}
}
totalStopWatch.stop();
log.debug("Time to connect feed to {} port. ElapsedTime: {} ", port.getInputPortName(), totalStopWatch.elapsed(TimeUnit.MILLISECONDS));
totalStopWatch.reset();
}
}
use of org.apache.nifi.web.api.dto.ConnectionDTO in project kylo by Teradata.
the class ImportReusableTemplate method removeConnectionsAndInputs.
private boolean removeConnectionsAndInputs() {
Optional<TemplateRemoteInputPortConnections> existingRemoteProcessInputPortInformation = getExistingRemoteProcessInputPortInformation();
if (existingRemoteProcessInputPortInformation.isPresent()) {
// find the input ports that are 'existing' but not 'selected' . These should be deleted
Set<String> inputPortNamesToRemove = remoteProcessGroupInputPortMap.values().stream().filter((remoteInputPort -> !remoteInputPort.isSelected() && existingRemoteProcessInputPortInformation.get().getExistingRemoteInputPortNames().contains(remoteInputPort.getInputPortName()))).map(remoteInputPort -> remoteInputPort.getInputPortName()).collect(Collectors.toSet());
// Find the connections that match the input ports that are to be removed
Set<ConnectionDTO> connectionsToRemove = existingRemoteProcessInputPortInformation.get().getExistingRemoteConnectionsToTemplate().stream().filter(connectionDTO -> inputPortNamesToRemove.contains(connectionDTO.getSource().getName())).collect(Collectors.toSet());
log.info("Removing input ports {}", inputPortNamesToRemove);
Set<ConnectionDTO> connectionsWithQueue = new HashSet<>();
// first validate the queues are empty ... if not warn user the following ports cant be deleted and rollback
connectionsToRemove.stream().forEach(connection -> {
Optional<ConnectionStatusEntity> connectionStatus = nifiRestClient.getNiFiRestClient().connections().getConnectionStatus(connection.getId());
if (connectionStatus.isPresent() && connectionStatus.get().getConnectionStatus().getAggregateSnapshot().getFlowFilesQueued() > 0) {
connectionsWithQueue.add(connection);
}
});
if (!connectionsWithQueue.isEmpty()) {
UploadProgressMessage importStatusMessage = uploadProgressService.addUploadStatus(importTemplateOptions.getUploadKey(), "Unable to remove inputPort and connection for :" + connectionsWithQueue.stream().map(c -> c.getSource().getName()).collect(Collectors.joining(",")) + ". The Queues are not empty. Failed to import template: " + importTemplate.getTemplateName(), true, false);
importTemplate.setValid(false);
importTemplate.setSuccess(false);
return false;
} else {
connectionsToRemove.stream().forEach(connection -> {
nifiRestClient.deleteConnection(connection, false);
getItemsCreated().addDeletedRemoteInputPortConnection(connection);
try {
PortDTO deletedPort = nifiRestClient.getNiFiRestClient().ports().deleteInputPort(connection.getSource().getId());
if (deletedPort != null) {
getItemsCreated().addDeletedRemoteInputPort(deletedPort);
}
} catch (NifiComponentNotFoundException e) {
// this is ok to catch as its deleted already
}
});
UploadProgressMessage importStatusMessage = uploadProgressService.addUploadStatus(importTemplateOptions.getUploadKey(), "Removed inputPort and connection for :" + connectionsToRemove.stream().map(c -> c.getSource().getName()).collect(Collectors.joining(",")) + " for template: " + importTemplate.getTemplateName(), true, true);
}
return true;
}
return true;
}
use of org.apache.nifi.web.api.dto.ConnectionDTO in project nifi-minifi by apache.
the class FlowSnippetDTOEnricher method enrich.
public void enrich(FlowSnippetDTO flowSnippetDTO, final String encodingVersion) {
List<FlowSnippetDTO> allFlowSnippets = getAllFlowSnippets(flowSnippetDTO);
Set<RemoteProcessGroupDTO> remoteProcessGroups = getAll(allFlowSnippets, FlowSnippetDTO::getRemoteProcessGroups).collect(Collectors.toSet());
Map<String, String> connectableNameMap = getAll(allFlowSnippets, FlowSnippetDTO::getProcessors).collect(Collectors.toMap(ComponentDTO::getId, ProcessorDTO::getName));
Map<String, String> rpgIdToTargetIdMap = new HashMap<>();
for (RemoteProcessGroupDTO remoteProcessGroupDTO : remoteProcessGroups) {
final RemoteProcessGroupContentsDTO contents = remoteProcessGroupDTO.getContents();
final Set<RemoteProcessGroupPortDTO> rpgInputPortDtos = nullToEmpty(contents.getInputPorts());
final Set<RemoteProcessGroupPortDTO> rpgOutputPortDtos = nullToEmpty(contents.getOutputPorts());
switch(encodingVersion) {
case "1.2":
// Map all port DTOs to their respective targetIds
rpgIdToTargetIdMap.putAll(Stream.concat(rpgInputPortDtos.stream(), rpgOutputPortDtos.stream()).collect(Collectors.toMap(RemoteProcessGroupPortDTO::getId, RemoteProcessGroupPortDTO::getTargetId)));
break;
default:
break;
}
addConnectables(connectableNameMap, rpgInputPortDtos, RemoteProcessGroupPortDTO::getId, RemoteProcessGroupPortDTO::getId);
addConnectables(connectableNameMap, rpgOutputPortDtos, RemoteProcessGroupPortDTO::getId, RemoteProcessGroupPortDTO::getId);
}
addConnectables(connectableNameMap, getAll(allFlowSnippets, FlowSnippetDTO::getInputPorts).collect(Collectors.toList()), PortDTO::getId, PortDTO::getName);
addConnectables(connectableNameMap, getAll(allFlowSnippets, FlowSnippetDTO::getOutputPorts).collect(Collectors.toList()), PortDTO::getId, PortDTO::getName);
final Set<ConnectionDTO> connections = getAll(allFlowSnippets, FlowSnippetDTO::getConnections).collect(Collectors.toSet());
// Enrich connection endpoints using known names and overriding with targetIds for remote ports
for (ConnectionDTO connection : connections) {
setName(connectableNameMap, connection.getSource(), rpgIdToTargetIdMap);
setName(connectableNameMap, connection.getDestination(), rpgIdToTargetIdMap);
}
// Override any ids that are for Remote Ports to use their target Ids where available
connections.stream().flatMap(connectionDTO -> Stream.of(connectionDTO.getSource(), connectionDTO.getDestination())).filter(connectable -> connectable.getType().equals(ConnectableType.REMOTE_OUTPUT_PORT.toString()) || connectable.getType().equals(ConnectableType.REMOTE_INPUT_PORT.toString())).forEach(connectable -> connectable.setId(Optional.ofNullable(rpgIdToTargetIdMap.get(connectable.getId())).orElse(connectable.getId())));
// Establish unique names for connections
for (ConnectionDTO connection : connections) {
if (StringUtil.isNullOrEmpty(connection.getName())) {
StringBuilder name = new StringBuilder();
ConnectableDTO connectionSource = connection.getSource();
name.append(determineValueForConnectable(connectionSource, rpgIdToTargetIdMap));
name.append("/");
if (connection.getSelectedRelationships() != null && connection.getSelectedRelationships().size() > 0) {
name.append(connection.getSelectedRelationships().iterator().next());
}
name.append("/");
ConnectableDTO connectionDestination = connection.getDestination();
name.append(determineValueForConnectable(connectionDestination, rpgIdToTargetIdMap));
connection.setName(name.toString());
}
}
nullToEmpty(flowSnippetDTO.getProcessGroups()).stream().map(ProcessGroupDTO::getContents).forEach(snippetDTO -> enrich(snippetDTO, encodingVersion));
}
use of org.apache.nifi.web.api.dto.ConnectionDTO in project kylo by Teradata.
the class NiFiFlowConnectionConverter method toConnection.
public static ConnectionDTO toConnection(NifiFlowConnection nifiFlowConnection) {
ConnectionDTO connectionDTO = new ConnectionDTO();
ConnectableDTO source = new ConnectableDTO();
source.setId(nifiFlowConnection.getSourceIdentifier());
connectionDTO.setSource(source);
ConnectableDTO dest = new ConnectableDTO();
dest.setId(nifiFlowConnection.getDestinationIdentifier());
connectionDTO.setDestination(dest);
connectionDTO.setId(nifiFlowConnection.getConnectionIdentifier());
connectionDTO.setName(nifiFlowConnection.getName());
return connectionDTO;
}
use of org.apache.nifi.web.api.dto.ConnectionDTO in project kylo by Teradata.
the class FeedManagerMetadataService method deleteFeed.
@Override
public void deleteFeed(@Nonnull final String feedId) {
// First check if this should be allowed.
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ADMIN_FEEDS);
feedProvider.checkFeedPermission(feedId, FeedAccessControl.DELETE);
// Step 1: Fetch feed metadata
final FeedMetadata feed = feedProvider.getFeedById(feedId);
if (feed == null) {
throw new IllegalArgumentException("Unknown feed: " + feedId);
}
// Step 2: Check category permissions
categoryProvider.checkCategoryPermission(feed.getCategoryId(), CategoryAccessControl.CREATE_FEED);
// Step 3: Check for dependent feeds
if (feed.getUsedByFeeds() != null && !feed.getUsedByFeeds().isEmpty()) {
final List<String> systemNames = feed.getUsedByFeeds().stream().map(FeedSummary::getCategoryAndFeedSystemName).collect(Collectors.toList());
throw new IllegalStateException("Feed is referenced by " + feed.getUsedByFeeds().size() + " other feeds: " + systemNames);
}
// check SLAs
metadataAccess.read(() -> {
boolean hasSlas = serviceLevelAgreementService.hasServiceLevelAgreements(feedProvider.resolveFeed(feedId));
if (hasSlas) {
log.error("Unable to delete " + feed.getCategoryAndFeedDisplayName() + ". 1 or more SLAs exist for this feed. ");
throw new IllegalStateException("Unable to delete the feed. 1 or more Service Level agreements exist for this feed " + feed.getCategoryAndFeedDisplayName() + ". Please delete the SLA's, or remove the feed from the SLA's and try again.");
}
}, MetadataAccess.SERVICE);
// Step 4: Delete hadoop authorization security policies if they exists
if (hadoopAuthorizationService != null) {
metadataAccess.read(() -> {
Feed domainFeed = feedModelTransform.feedToDomain(feed);
String hdfsPaths = (String) domainFeed.getProperties().get(HadoopAuthorizationService.REGISTRATION_HDFS_FOLDERS);
hadoopAuthorizationService.deleteHivePolicy(feed.getSystemCategoryName(), feed.getSystemFeedName());
hadoopAuthorizationService.deleteHdfsPolicy(feed.getSystemCategoryName(), feed.getSystemFeedName(), HadoopAuthorizationService.convertNewlineDelimetedTextToList(hdfsPaths));
});
}
// Step 5: Enable NiFi cleanup flow
boolean hasCleanupFlow = false;
final ProcessGroupDTO feedProcessGroup;
final ProcessGroupDTO categoryProcessGroup = nifiRestClient.getProcessGroupByName("root", feed.getSystemCategoryName(), false, true);
if (categoryProcessGroup != null) {
feedProcessGroup = NifiProcessUtil.findFirstProcessGroupByName(categoryProcessGroup.getContents().getProcessGroups(), feed.getSystemFeedName());
if (feedProcessGroup != null) {
hasCleanupFlow = nifiRestClient.setInputAsRunningByProcessorMatchingType(feedProcessGroup.getId(), "com.thinkbiganalytics.nifi.v2.metadata.TriggerCleanup");
}
}
if (hasCleanupFlow) {
// Wait for input processor to start
try {
Thread.sleep(cleanupDelay);
} catch (InterruptedException e) {
// ignored
}
}
// Step 6: Signal the cleanup event.
notifyFeedCleanup(feed);
// Step 7: Optionally wait for the cleanup job to finish if this feed has a cleanup flow.
if (hasCleanupFlow) {
waitForFeedCleanup(feed);
}
// Step 8: Remove feed from NiFi
if (categoryProcessGroup != null) {
final Set<ConnectionDTO> connections = categoryProcessGroup.getContents().getConnections();
for (ProcessGroupDTO processGroup : NifiProcessUtil.findProcessGroupsByFeedName(categoryProcessGroup.getContents().getProcessGroups(), feed.getSystemFeedName())) {
nifiRestClient.deleteProcessGroupAndConnections(processGroup, connections);
}
// disable any ports that are not connected to anything
nifiRestClient.disableDisconnectedPorts(categoryProcessGroup);
}
// Step 9: Delete database entries
feedProvider.deleteFeed(feedId);
}
Aggregations