use of org.apache.nifi.web.api.dto.ProcessorDTO in project kylo by Teradata.
the class CreateFeedBuilder method build.
/**
* Build the NiFi flow instance
*
* @return an object indicating if the feed flow was successfully built or not
*/
public NifiProcessGroup build() throws FeedCreationException {
try {
log.info("Creating the feed {}.{} ", category, feedName);
newProcessGroup = null;
Stopwatch totalTime = Stopwatch.createStarted();
Stopwatch eventTime = Stopwatch.createStarted();
TemplateDTO template = getTemplate();
if (template != null) {
log.debug("Time to get Template {}. ElapsedTime: {} ms", template.getName(), eventTime(eventTime));
// create the encompassing process group
eventTime.start();
ProcessGroupDTO feedProcessGroup = createProcessGroupForFeed();
log.debug("Time to create process group. ElapsedTime: {} ms", eventTime(eventTime));
if (feedProcessGroup != null) {
String processGroupId = feedProcessGroup.getId();
// snapshot the existing controller services
eventTime.start();
templateCreationHelper.snapshotControllerServiceReferences();
log.debug("Time to snapshotControllerServices. ElapsedTime: {} ms", eventTime(eventTime));
// create the flow from the template
eventTime.start();
TemplateInstance instance = templateCreationHelper.instantiateFlowFromTemplate(processGroupId, templateId);
FlowSnippetDTO feedInstance = instance.getFlowSnippetDTO();
feedProcessGroup.setContents(feedInstance);
log.debug("Time to instantiateFlowFromTemplate. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
String feedCategoryId = feedProcessGroup.getParentGroupId();
ProcessGroupDTO categoryGroup = this.categoryGroup;
if (categoryGroup == null) {
categoryGroup = this.categoryGroup = restClient.getProcessGroup(feedCategoryId, false, false);
}
// update the group with this template?
updatePortConnectionsForProcessGroup(feedProcessGroup, categoryGroup);
log.debug("Time to updatePortConnectionsForProcessGroup. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
// mark the new services that were created as a result of creating the new flow from the template
templateCreationHelper.identifyNewlyCreatedControllerServiceReferences(instance);
log.debug("Time to identifyNewlyCreatedControllerServiceReferences. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
// match the properties incoming to the defined properties
updateProcessGroupProperties(processGroupId, feedProcessGroup.getName());
log.debug("Time to updateProcessGroupProperties. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
// Fetch the Feed Group now that it has the flow in it
ProcessGroupDTO entity = restClient.getProcessGroup(processGroupId, true, true);
log.debug("Time to getProcessGroup. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
ProcessorDTO input = fetchInputProcessorForProcessGroup(entity);
ProcessorDTO cleanupProcessor = NifiProcessUtil.findFirstProcessorsByType(NifiProcessUtil.getInputProcessors(entity), "com.thinkbiganalytics.nifi.v2.metadata.TriggerCleanup");
List<ProcessorDTO> nonInputProcessors = NifiProcessUtil.getNonInputProcessors(entity);
log.debug("Time to fetchInputProcessorForProcessGroup. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
List<NifiProperty> updatedControllerServiceProperties = new ArrayList<>();
// update any references to the controller services and try to assign the value to an enabled service if it is not already
if (input != null) {
updatedControllerServiceProperties.addAll(templateCreationHelper.updateControllerServiceReferences(Lists.newArrayList(input), instance));
}
if (cleanupProcessor != null) {
updatedControllerServiceProperties.addAll(templateCreationHelper.updateControllerServiceReferences(Collections.singletonList(cleanupProcessor), instance));
}
updatedControllerServiceProperties.addAll(templateCreationHelper.updateControllerServiceReferences(nonInputProcessors, instance));
log.debug("Time to updatedControllerServiceProperties. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
// refetch processors for updated errors
entity = restClient.getProcessGroup(processGroupId, true, true);
input = fetchInputProcessorForProcessGroup(entity);
nonInputProcessors = NifiProcessUtil.getNonInputProcessors(entity);
newProcessGroup = new NifiProcessGroup(entity, input, nonInputProcessors);
log.debug("Time to re-fetchInputProcessorForProcessGroup. ElapsedTime: {} ms", eventTime(eventTime));
// Validate and if invalid Delete the process group
if (newProcessGroup.hasFatalErrors()) {
eventTime.start();
removeProcessGroup(entity);
// cleanupControllerServices();
newProcessGroup.setSuccess(false);
log.debug("Time to removeProcessGroup. Errors found. ElapsedTime: {} ms", eventTime(eventTime));
} else {
eventTime.start();
// update the input schedule
updateFeedSchedule(newProcessGroup, input);
log.debug("Time to update feed schedule. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
// just need to update for this processgroup
Collection<ProcessorDTO> processors = NifiProcessUtil.getProcessors(entity);
Collection<ConnectionDTO> connections = NifiConnectionUtil.getAllConnections(entity);
nifiFlowCache.updateFlowForFeed(feedMetadata, entity.getId(), processors, connections);
log.debug("Time to build flow graph with {} processors and {} connections. ElapsedTime: {} ms", processors.size(), connections.size(), eventTime(eventTime));
/*
//Cache the processorIds to the respective flowIds for availability in the ProvenanceReportingTask
NifiVisitableProcessGroup group = nifiFlowCache.getFlowOrder(newProcessGroup.getProcessGroupEntity(), true);
log.debug("Time to get the flow order. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
NifiFlowProcessGroup
flow =
new NifiFlowBuilder().build(
group);
log.debug("Time to build flow graph with {} processors. ElapsedTime: {} ms", flow.getProcessorMap().size(), eventTime(eventTime));
eventTime.start();
nifiFlowCache.updateFlow(feedMetadata, flow);
log.debug("Time to update NiFiFlowCache with {} processors. ElapsedTime: {} ms", flow.getProcessorMap().size(), eventTime(eventTime));
*/
eventTime.start();
// disable all inputs
restClient.disableInputProcessors(newProcessGroup.getProcessGroupEntity().getId());
log.debug("Time to disableInputProcessors. ElapsedTime: {} ms", eventTime(eventTime));
eventTime.start();
// mark everything else as running
templateCreationHelper.markProcessorsAsRunning(newProcessGroup);
log.debug("Time to markNonInputsAsRunning. ElapsedTime: {} ms", eventTime(eventTime));
// if desired start the input processor
if (input != null) {
eventTime.start();
if (enabled) {
markInputAsRunning(newProcessGroup, input);
// /make the input/output ports in the category group as running
if (hasConnectionPorts()) {
templateCreationHelper.markConnectionPortsAsRunning(entity);
}
} else {
// /make the input/output ports in the category group as running
if (hasConnectionPorts()) {
templateCreationHelper.markConnectionPortsAsRunning(entity);
}
markInputAsStopped(newProcessGroup, input);
}
log.debug("Time to mark input as {}. ElapsedTime: {} ms", (enabled ? "Running" : "Stopped"), eventTime(eventTime));
}
if (newProcessGroup.hasFatalErrors()) {
eventTime.start();
rollback();
newProcessGroup.setRolledBack(true);
// cleanupControllerServices();
newProcessGroup.setSuccess(false);
log.debug("Time to rollback on Fatal Errors. ElapsedTime: {} ms", eventTime(eventTime));
}
List<NifiError> templateCreationErrors = templateCreationHelper.getErrors();
if (templateCreationErrors != null) {
errors.addAll(templateCreationErrors);
}
// add any global errors to the object
if (errors != null && !errors.isEmpty()) {
for (NifiError error : errors) {
newProcessGroup.addError(error);
if (error.isFatal()) {
newProcessGroup.setSuccess(false);
if (!newProcessGroup.isRolledBack()) {
rollback();
newProcessGroup.setRolledBack(true);
}
}
}
}
}
eventTime.start();
templateCreationHelper.cleanupControllerServices();
// fix the feed metadata controller service references
updateFeedMetadataControllerServiceReferences(updatedControllerServiceProperties);
log.debug("Time cleanup controller services. ElapsedTime: {} ms", eventTime(eventTime));
// align items
if (this.autoAlign) {
eventTime.start();
log.info("Aligning Feed flows in NiFi ");
AlignProcessGroupComponents alignProcessGroupComponents = new AlignProcessGroupComponents(restClient.getNiFiRestClient(), entity.getParentGroupId());
alignProcessGroupComponents.autoLayout();
// fetch the parent to get that id to align
if (newCategory) {
log.info("This is the first feed created in the category {}. Aligning the categories. ", feedMetadata.getCategory().getSystemName());
new AlignProcessGroupComponents(restClient.getNiFiRestClient(), this.categoryGroup.getParentGroupId()).autoLayout();
}
log.info("Time align feed process groups. ElapsedTime: {} ms", eventTime(eventTime));
} else {
log.info("Skipping auto alignment in NiFi. You can always manually align this category and all of its feeds by using the rest api: /v1/feedmgr/nifi/auto-align/{}", entity.getParentGroupId());
if (newCategory) {
log.info("To re align the categories: /v1/feedmgr/nifi/auto-align/{}", this.categoryGroup.getParentGroupId());
}
}
}
} else {
log.error("Unable to create/save the feed {}. Unable to find a template for id {}", feedName, templateId);
throw new FeedCreationException("Unable to create the feed [" + feedName + "]. Unable to find a template with id " + templateId);
}
log.info("Time save Feed flow in NiFi. ElapsedTime: {} ms", eventTime(totalTime));
return newProcessGroup;
} catch (NifiClientRuntimeException e) {
throw new FeedCreationException("Unable to create the feed [" + feedName + "]. " + e.getMessage(), e);
}
}
use of org.apache.nifi.web.api.dto.ProcessorDTO in project kylo by Teradata.
the class NifiFlowCacheImpl method applyClusterUpdates.
/**
* if Kylo is clustered it needs to sync any updates from the other Kylo instances before proceeding
*/
public synchronized void applyClusterUpdates() {
List<NifiFlowCacheClusterUpdateMessage> updates = nifiFlowCacheClusterManager.findUpdates();
Set<String> templateUpdates = new HashSet<>();
boolean needsUpdates = !updates.isEmpty();
if (needsUpdates) {
log.info("Kylo Cluster Update: Detected changes. About to apply {} updates ", updates.size());
}
updates.stream().forEach(update -> {
switch(update.getType()) {
case FEED:
NifiFlowCacheFeedUpdate feedUpdate = nifiFlowCacheClusterManager.getFeedUpdate(update.getMessage());
log.info("Kylo Cluster Update: Applying Feed Change update for {}", feedUpdate.getFeedName());
updateFlow(feedUpdate);
break;
case FEED2:
NifiFlowCacheFeedUpdate2 feedUpdate2 = nifiFlowCacheClusterManager.getFeedUpdate2(update.getMessage());
log.info("Kylo Cluster Update: Applying Feed Change update for {}", feedUpdate2.getFeedName());
updateFlow(feedUpdate2);
break;
case CONNECTION:
Collection<ConnectionDTO> connectionDTOS = nifiFlowCacheClusterManager.getConnectionsUpdate(update.getMessage());
log.info("Kylo Cluster Update: Applying Connection list update");
updateConnectionMap(connectionDTOS, false);
if (connectionDTOS != null) {
connectionDTOS.stream().forEach(c -> {
niFiObjectCache.addConnection(c.getParentGroupId(), c);
});
}
break;
case PROCESSOR:
Collection<ProcessorDTO> processorDTOS = nifiFlowCacheClusterManager.getProcessorsUpdate(update.getMessage());
log.info("Kylo Cluster Update: Applying Processor list update");
updateProcessorIdNames(processorDTOS, false);
break;
case TEMPLATE:
if (!templateUpdates.contains(update.getMessage())) {
RegisteredTemplate template = nifiFlowCacheClusterManager.getTemplate(update.getMessage());
log.info("Kylo Cluster Update: Applying Template update for {} ", template.getTemplateName());
updateRegisteredTemplate(template, false);
templateUpdates.add(update.getMessage());
}
break;
default:
break;
}
});
if (needsUpdates) {
nifiFlowCacheClusterManager.appliedUpdates(updates);
lastUpdated = DateTime.now();
log.info("Kylo Cluster Update: NiFi Flow File Cache is in sync. All {} updates have been applied to the cache. ", updates.size());
}
}
use of org.apache.nifi.web.api.dto.ProcessorDTO in project kylo by Teradata.
the class ImportReusableTemplate method rollbackTemplateImportInNifi.
/**
* Restore the previous Template back to Nifi
*/
private void rollbackTemplateImportInNifi() {
UploadProgressMessage rollbackMessage = restoreOldTemplateXml();
// If we are working with a reusable flow we need to recreate the old one
if (importTemplate.getTemplateResults() != null && importTemplate.getTemplateResults().isReusableFlowInstance()) {
UploadProgressMessage progressMessage = uploadProgressService.addUploadStatus(importTemplate.getImportOptions().getUploadKey(), "Attempting to restore old instance for: " + importTemplate.getTemplateName());
VersionedProcessGroup versionedProcessGroup = null;
if (importTemplate.getTemplateResults().getVersionedProcessGroup() != null) {
versionedProcessGroup = importTemplate.getTemplateResults().getVersionedProcessGroup();
}
// rename the one we created to a temp name
ProcessGroupDTO groupDTO = nifiRestClient.getNiFiRestClient().processGroups().findById(importTemplate.getTemplateResults().getProcessGroupEntity().getId(), false, false).orElse(null);
if (groupDTO != null) {
String tmpName = groupDTO.getName() + "- " + System.currentTimeMillis();
groupDTO.setName(tmpName);
nifiRestClient.getNiFiRestClient().processGroups().update(groupDTO);
log.info("Rollback Template: {}. Renamed template instance that was just created to a temporary name of {}. This will get deleted later. ", importTemplate.getTemplateName(), tmpName);
}
if (versionedProcessGroup != null) {
progressMessage.update("Rollback Status: Attempting to initialize and verify prior template instance for " + importTemplate.getTemplateName());
// rename the versioned one back
ProcessGroupDTO oldProcessGroup = nifiRestClient.getNiFiRestClient().processGroups().findById(versionedProcessGroup.getProcessGroupPriorToVersioning().getId(), true, true).orElse(null);
if (oldProcessGroup != null) {
oldProcessGroup.setName(versionedProcessGroup.getProcessGroupName());
nifiRestClient.getNiFiRestClient().processGroups().update(oldProcessGroup);
progressMessage.update("Rollback Status: Renamed template process group " + versionedProcessGroup.getVersionedProcessGroupName() + " back to " + versionedProcessGroup.getProcessGroupName());
}
// add back in the connections
List<ConnectionDTO> createdConnections = new ArrayList<>();
List<ConnectionDTO> connections = versionedProcessGroup.getDeletedInputPortConnections();
if (connections != null) {
connections.stream().forEach(connectionDTO -> {
createdConnections.add(nifiRestClient.getNiFiRestClient().processGroups().createConnection(connectionDTO.getParentGroupId(), connectionDTO.getSource(), connectionDTO.getDestination()));
});
uploadProgressService.addUploadStatus(importTemplate.getImportOptions().getUploadKey(), "Rollback Status: Recreated " + createdConnections.size() + " connections ", true, true);
}
List<ProcessorDTO> inputs = versionedProcessGroup.getInputProcessorsPriorToDisabling();
if (inputs != null) {
// update the state
progressMessage.update("Rollback Status: Marking the process group " + versionedProcessGroup.getProcessGroupName() + " as running");
}
}
if (groupDTO != null) {
progressMessage.update("Rollback Status: Removing invalid template instance process group: " + groupDTO.getName());
// delete the new one
try {
nifiRestClient.removeProcessGroup(groupDTO.getId(), groupDTO.getParentGroupId());
} catch (Exception e) {
log.error("Error trying to remove invalid template instance {}", groupDTO.getName(), e);
}
Optional<ProcessGroupDTO> deletedGroup = nifiRestClient.getNiFiRestClient().processGroups().findById(groupDTO.getId(), false, false);
if (deletedGroup.isPresent()) {
progressMessage.update("Rollback Status: Failure", false);
rollbackMessage.update("Rollback Unsuccessful!! The invalid group " + deletedGroup.get().getName() + " still exists. You will need to login to NiFi and verify your reusable templates are correct!", false);
} else {
String message = "Rollback Status: Success.";
if (versionedProcessGroup != null) {
message += " Restored '" + versionedProcessGroup.getVersionedProcessGroupName() + "' back to '" + importTemplate.getTemplateName() + "'";
}
progressMessage.update(message, true);
rollbackMessage.update("Rollback Successful!", true);
}
}
} else {
rollbackMessage.update("Rollback Successful!", true);
}
}
use of org.apache.nifi.web.api.dto.ProcessorDTO in project nifi by apache.
the class StandardNiFiServiceFacade method updateProcessor.
@Override
public ProcessorEntity updateProcessor(final Revision revision, final ProcessorDTO processorDTO) {
// get the component, ensure we have access to it, and perform the update request
final ProcessorNode processorNode = processorDAO.getProcessor(processorDTO.getId());
final RevisionUpdate<ProcessorDTO> snapshot = updateComponent(revision, processorNode, () -> processorDAO.updateProcessor(processorDTO), proc -> dtoFactory.createProcessorDto(proc));
final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processorNode);
final ProcessorStatusDTO status = dtoFactory.createProcessorStatusDto(controllerFacade.getProcessorStatus(processorNode.getIdentifier()));
final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(processorNode.getIdentifier()));
final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
return entityFactory.createProcessorEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, status, bulletinEntities);
}
use of org.apache.nifi.web.api.dto.ProcessorDTO in project nifi by apache.
the class TestFlowController method testInstantiateSnippetWithProcessor.
@Test
public void testInstantiateSnippetWithProcessor() throws ProcessorInstantiationException {
final String id = UUID.randomUUID().toString();
final BundleCoordinate coordinate = systemBundle.getBundleDetails().getCoordinate();
final ProcessorNode processorNode = controller.createProcessor(DummyProcessor.class.getName(), id, coordinate);
// create a processor dto
final ProcessorDTO processorDTO = new ProcessorDTO();
// use a different id here
processorDTO.setId(UUID.randomUUID().toString());
processorDTO.setPosition(new PositionDTO(new Double(0), new Double(0)));
processorDTO.setStyle(processorNode.getStyle());
processorDTO.setParentGroupId("1234");
processorDTO.setInputRequirement(processorNode.getInputRequirement().name());
processorDTO.setPersistsState(processorNode.getProcessor().getClass().isAnnotationPresent(Stateful.class));
processorDTO.setRestricted(processorNode.isRestricted());
processorDTO.setExtensionMissing(processorNode.isExtensionMissing());
processorDTO.setType(processorNode.getCanonicalClassName());
processorDTO.setBundle(new BundleDTO(coordinate.getGroup(), coordinate.getId(), coordinate.getVersion()));
processorDTO.setName(processorNode.getName());
processorDTO.setState(processorNode.getScheduledState().toString());
processorDTO.setRelationships(new ArrayList<>());
processorDTO.setDescription("description");
processorDTO.setSupportsParallelProcessing(!processorNode.isTriggeredSerially());
processorDTO.setSupportsEventDriven(processorNode.isEventDrivenSupported());
processorDTO.setSupportsBatching(processorNode.isSessionBatchingSupported());
ProcessorConfigDTO configDTO = new ProcessorConfigDTO();
configDTO.setSchedulingPeriod(processorNode.getSchedulingPeriod());
configDTO.setPenaltyDuration(processorNode.getPenalizationPeriod());
configDTO.setYieldDuration(processorNode.getYieldPeriod());
configDTO.setRunDurationMillis(processorNode.getRunDuration(TimeUnit.MILLISECONDS));
configDTO.setConcurrentlySchedulableTaskCount(processorNode.getMaxConcurrentTasks());
configDTO.setLossTolerant(processorNode.isLossTolerant());
configDTO.setComments(processorNode.getComments());
configDTO.setBulletinLevel(processorNode.getBulletinLevel().name());
configDTO.setSchedulingStrategy(processorNode.getSchedulingStrategy().name());
configDTO.setExecutionNode(processorNode.getExecutionNode().name());
configDTO.setAnnotationData(processorNode.getAnnotationData());
processorDTO.setConfig(configDTO);
// create the snippet with the processor
final FlowSnippetDTO flowSnippetDTO = new FlowSnippetDTO();
flowSnippetDTO.setProcessors(Collections.singleton(processorDTO));
// instantiate the snippet
assertEquals(0, controller.getRootGroup().getProcessors().size());
controller.instantiateSnippet(controller.getRootGroup(), flowSnippetDTO);
assertEquals(1, controller.getRootGroup().getProcessors().size());
}
Aggregations