use of org.alien4cloud.tosca.model.workflow.Workflow in project alien4cloud by alien4cloud.
the class GraphPathsTest method testOrphan.
/**
* <pre>
* -- a --
* / \
* \ /
* -- b --
* </pre>
*/
@Test
public void testOrphan() {
Workflow wf = new Workflow();
wf.setName(INSTALL);
WorkflowStep a = wf.addStep(new SimpleStep("a"));
WorkflowStep b = wf.addStep(new SimpleStep("b"));
WorkflowUtils.linkSteps(a, b);
WorkflowUtils.linkSteps(b, a);
List<Path> paths = WorkflowGraphUtils.getWorkflowGraphCycles(wf);
System.out.println(paths);
Assert.assertEquals(1, paths.size());
log.info(paths.toString());
}
use of org.alien4cloud.tosca.model.workflow.Workflow in project alien4cloud by alien4cloud.
the class EditorTopologyUploadService method processTopologyParseResult.
private void processTopologyParseResult(Path archivePath, ParsingResult<ArchiveRoot> parsingResult, String workspace) {
// parse the archive.
parsingResult = postProcessor.process(archivePath, parsingResult, workspace);
// check if any blocker error has been found during parsing process.
if (parsingResult.hasError(ParsingErrorLevel.ERROR)) {
// do not save anything if any blocker error has been found during import.
throw new EditorToscaYamlParsingException("Uploaded yaml files is not a valid tosca template", ArchiveParserUtil.toSimpleResult(parsingResult));
}
if (parsingResult.getResult().hasToscaTypes()) {
throw new EditorToscaYamlNotSupportedException("Tosca types are currently not supported in the topology editor context.");
}
if (!parsingResult.getResult().hasToscaTopologyTemplate()) {
throw new EditorToscaYamlNotSupportedException("A topology template is required in the topology edition context.");
}
Topology currentTopology = EditionContextManager.getTopology();
Topology parsedTopology = parsingResult.getResult().getTopology();
final String definitionVersion = parsingResult.getResult().getArchive().getToscaDefinitionsVersion();
if (!currentTopology.getArchiveName().equals(parsedTopology.getArchiveName()) || !currentTopology.getArchiveVersion().equals(parsedTopology.getArchiveVersion())) {
throw new EditorToscaYamlNotSupportedException("Template name and version must be set to [" + currentTopology.getArchiveName() + ":" + currentTopology.getArchiveVersion() + "] and cannot be updated to [" + parsedTopology.getArchiveName() + ":" + parsedTopology.getArchiveVersion() + "]");
}
// Copy static elements from the topology
parsedTopology.setId(currentTopology.getId());
// Update editor tosca context
ToscaContext.get().resetDependencies(parsedTopology.getDependencies());
// init the workflows for the topology based on the yaml
TopologyContext topologyContext = workflowBuilderService.buildCachedTopologyContext(new TopologyContext() {
@Override
public String getDSLVersion() {
return definitionVersion;
}
@Override
public Topology getTopology() {
return parsedTopology;
}
@Override
public <T extends AbstractToscaType> T findElement(Class<T> clazz, String id) {
return ToscaContext.get(clazz, id);
}
});
for (Workflow wf : safe(topologyContext.getTopology().getWorkflows()).values()) {
wf.setHasCustomModifications(true);
}
workflowBuilderService.initWorkflows(topologyContext);
// update the topology in the edition context with the new one
EditionContextManager.get().setTopology(parsingResult.getResult().getTopology());
}
use of org.alien4cloud.tosca.model.workflow.Workflow in project alien4cloud by alien4cloud.
the class WorkflowPostProcessor method splitMultipleActivitiesSteps.
/**
* Called after yaml parsing.
*
* Add support of activities on alien-dsl-2.0.0 and higher.
* For activity, other than the first, we create 1 step per activity.
*/
private void splitMultipleActivitiesSteps(TopologyContext topologyContext) {
if (!ToscaParser.ALIEN_DSL_200.equals(topologyContext.getDSLVersion()) || MapUtils.isEmpty(topologyContext.getTopology().getWorkflows())) {
return;
}
for (Workflow wf : topologyContext.getTopology().getWorkflows().values()) {
if (wf.getSteps() != null) {
Map<String, WorkflowStep> stepsToAdd = new HashMap<>();
Map<String, LinkedList<String>> newStepsNames = new HashMap<>();
for (WorkflowStep step : wf.getSteps().values()) {
if (step.getActivities() == null) {
Node node = ParsingContextExecution.getObjectToNodeMap().get(step);
ParsingContextExecution.getParsingErrors().add(new ParsingError(ParsingErrorLevel.ERROR, ErrorCode.WORKFLOW_HAS_ERRORS, null, getSafeNodeStartMark(node), "Step should have at least one activity", getSafeNodeEndMark(node), step.getName()));
continue;
} else if (step.getActivities().size() < 2) {
continue;
}
// We have a step with multiple activities we'll call it old step
// We will split this step into multiple steps, the first activity will be contained in the step with the same name
LinkedList<String> newStepsNamesForCurrentStep = newStepsNames.computeIfAbsent(step.getName(), k -> new LinkedList<>());
for (int i = 1; i < step.getActivities().size(); i++) {
// here we iterate on activities to create new step
WorkflowStep singleActivityStep = WorkflowUtils.cloneStep(step);
singleActivityStep.setActivities(Lists.newArrayList(step.getActivities().get(i)));
String wfStepName = WorkflowUtils.generateNewWfStepName(wf.getSteps().keySet(), stepsToAdd.keySet(), step.getName());
singleActivityStep.setName(wfStepName);
singleActivityStep.getOnSuccess().clear();
stepsToAdd.put(wfStepName, singleActivityStep);
newStepsNamesForCurrentStep.add(wfStepName);
}
// new steps are created, we can clean activities
step.getActivities().subList(1, step.getActivities().size()).clear();
}
// Generated steps must be executed in a sequential manner
newStepsNames.forEach((stepName, generatedStepsNames) -> {
// first old step is chained to the first generated step
WorkflowStep firstStep = wf.getSteps().get(stepName);
Set<String> currentFirstStepOnSuccess = firstStep.getOnSuccess();
firstStep.setOnSuccess(Sets.newHashSet(generatedStepsNames.getFirst()));
WorkflowStep lastGeneratedStep = stepsToAdd.get(generatedStepsNames.getLast());
lastGeneratedStep.setOnSuccess(currentFirstStepOnSuccess);
for (int i = 0; i < generatedStepsNames.size() - 1; i++) {
// Each generated step is chained with the preceding to create a sequence
stepsToAdd.get(generatedStepsNames.get(i)).addFollowing(generatedStepsNames.get(i + 1));
}
});
// add new steps to the workflow
wf.addAllSteps(stepsToAdd);
}
}
}
use of org.alien4cloud.tosca.model.workflow.Workflow in project alien4cloud by alien4cloud.
the class WorkflowPostProcessor method normalizeWorkflowNames.
private void normalizeWorkflowNames(Map<String, Workflow> workflows) {
for (String oldName : Sets.newHashSet(workflows.keySet())) {
if (!NameValidationUtils.isValid(oldName)) {
String newName = StringUtils.stripAccents(oldName);
newName = NameValidationUtils.DEFAULT_NAME_REPLACE_PATTERN.matcher(newName).replaceAll("_");
String toAppend = "";
int i = 1;
while (workflows.containsKey(newName + toAppend)) {
toAppend = "_" + i++;
}
newName = newName.concat(toAppend);
Workflow wf = workflows.remove(oldName);
wf.setName(newName);
workflows.put(newName, wf);
Node node = ParsingContextExecution.getObjectToNodeMap().get(oldName);
ParsingContextExecution.getParsingErrors().add(new ParsingError(ParsingErrorLevel.WARNING, ErrorCode.INVALID_NAME, "Workflow", getSafeNodeStartMark(node), oldName, getSafeNodeEndMark(node), newName));
}
}
}
use of org.alien4cloud.tosca.model.workflow.Workflow in project alien4cloud by alien4cloud.
the class WorkflowPostProcessor method finalizeParsedWorkflows.
/**
* Called after yaml parsing.
*/
private void finalizeParsedWorkflows(TopologyContext topologyContext, Node node) {
if (MapUtils.isEmpty(topologyContext.getTopology().getWorkflows())) {
return;
}
normalizeWorkflowNames(topologyContext.getTopology().getWorkflows());
for (Workflow wf : topologyContext.getTopology().getWorkflows().values()) {
wf.setStandard(WorkflowUtils.isStandardWorkflow(wf));
if (wf.getSteps() != null) {
for (WorkflowStep step : wf.getSteps().values()) {
if (step.getOnSuccess() != null) {
Iterator<String> followingIds = step.getOnSuccess().iterator();
while (followingIds.hasNext()) {
String followingId = followingIds.next();
WorkflowStep followingStep = wf.getSteps().get(followingId);
if (followingStep == null) {
followingIds.remove();
ParsingContextExecution.getParsingErrors().add(new ParsingError(ParsingErrorLevel.WARNING, ErrorCode.UNKNWON_WORKFLOW_STEP, null, getSafeNodeStartMark(node), null, getSafeNodeEndMark(node), followingId));
} else {
followingStep.addPreceding(step.getName());
}
}
}
}
}
try {
WorkflowUtils.fillHostId(wf, topologyContext);
} catch (NotFoundException e) {
log.trace("Not found exception during fill host id occurs when a relationship specified in workflow does not exist. This exception is ignored as the workflow validation trigger errors for such situations.", e);
}
int errorCount = workflowBuilderService.validateWorkflow(topologyContext, wf);
if (errorCount > 0) {
processWorkflowErrors(wf, wf.getErrors(), node);
}
}
}
Aggregations