use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class TestDeciderService method createConditionalWF.
private WorkflowDef createConditionalWF() {
WorkflowTask workflowTask1 = new WorkflowTask();
workflowTask1.setName("junit_task_1");
Map<String, Object> inputParams1 = new HashMap<>();
inputParams1.put("p1", "workflow.input.param1");
inputParams1.put("p2", "workflow.input.param2");
workflowTask1.setInputParameters(inputParams1);
workflowTask1.setTaskReferenceName("t1");
workflowTask1.setTaskDefinition(new TaskDef("junit_task_1"));
WorkflowTask workflowTask2 = new WorkflowTask();
workflowTask2.setName("junit_task_2");
Map<String, Object> inputParams2 = new HashMap<>();
inputParams2.put("tp1", "workflow.input.param1");
workflowTask2.setInputParameters(inputParams2);
workflowTask2.setTaskReferenceName("t2");
workflowTask2.setTaskDefinition(new TaskDef("junit_task_2"));
WorkflowTask workflowTask3 = new WorkflowTask();
workflowTask3.setName("junit_task_3");
Map<String, Object> inputParams3 = new HashMap<>();
inputParams2.put("tp3", "workflow.input.param2");
workflowTask3.setInputParameters(inputParams3);
workflowTask3.setTaskReferenceName("t3");
workflowTask3.setTaskDefinition(new TaskDef("junit_task_3"));
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("Conditional Workflow");
workflowDef.setDescription("Conditional Workflow");
workflowDef.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask decisionTask2 = new WorkflowTask();
decisionTask2.setType(TaskType.DECISION.name());
decisionTask2.setCaseValueParam("case");
decisionTask2.setName("conditional2");
decisionTask2.setTaskReferenceName("conditional2");
Map<String, List<WorkflowTask>> dc = new HashMap<>();
dc.put("one", Arrays.asList(workflowTask1, workflowTask3));
dc.put("two", Collections.singletonList(workflowTask2));
decisionTask2.setDecisionCases(dc);
decisionTask2.getInputParameters().put("case", "workflow.input.param2");
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setCaseValueParam("case");
decisionTask.setName("conditional");
decisionTask.setTaskReferenceName("conditional");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("nested", Collections.singletonList(decisionTask2));
decisionCases.put("three", Collections.singletonList(workflowTask3));
decisionTask.setDecisionCases(decisionCases);
decisionTask.getInputParameters().put("case", "workflow.input.param1");
decisionTask.getDefaultCase().add(workflowTask2);
workflowDef.getTasks().add(decisionTask);
WorkflowTask notifyTask = new WorkflowTask();
notifyTask.setName("junit_task_4");
notifyTask.setTaskReferenceName("junit_task_4");
notifyTask.setTaskDefinition(new TaskDef("junit_task_4"));
WorkflowTask finalDecisionTask = new WorkflowTask();
finalDecisionTask.setName("finalcondition");
finalDecisionTask.setTaskReferenceName("tf");
finalDecisionTask.setType(TaskType.DECISION.name());
finalDecisionTask.setCaseValueParam("finalCase");
Map<String, Object> fi = new HashMap<>();
fi.put("finalCase", "workflow.input.finalCase");
finalDecisionTask.setInputParameters(fi);
finalDecisionTask.getDecisionCases().put("notify", Collections.singletonList(notifyTask));
workflowDef.getTasks().add(finalDecisionTask);
return workflowDef;
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class WorkflowExecutor method skipTasksAffectedByTerminateTask.
/**
* When a TERMINATE task runs, it only affects the workflow in which it runs; it does not do anything with
* in-progress tasks and subworkflows that are still running. This recursive method will ensure that all tasks within
* all subworkflows are set to SKIPPED status so they can complete.
* @param workflow a subworkflow within the hierarchy of the original workflow containing the TERMINATE task
*/
private void skipTasksAffectedByTerminateTask(Workflow workflow) {
if (!workflow.getStatus().isTerminal()) {
List<Task> tasksToBeUpdated = new ArrayList<>();
for (Task workflowTask : workflow.getTasks()) {
if (!workflowTask.getStatus().isTerminal()) {
workflowTask.setStatus(SKIPPED);
tasksToBeUpdated.add(workflowTask);
}
if (TaskType.SUB_WORKFLOW.name().equals(workflowTask.getTaskType()) && StringUtils.isNotBlank(workflowTask.getSubWorkflowId())) {
Workflow subWorkflow = executionDAOFacade.getWorkflowById(workflowTask.getSubWorkflowId(), true);
if (subWorkflow != null) {
skipTasksAffectedByTerminateTask(subWorkflow);
}
}
}
if (!tasksToBeUpdated.isEmpty()) {
executionDAOFacade.updateTasks(tasksToBeUpdated);
workflow.setStatus(Workflow.WorkflowStatus.TERMINATED);
workflow.setReasonForIncompletion("Parent workflow was terminated with a TERMINATE task");
executionDAOFacade.updateWorkflow(workflow);
}
}
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class DeciderService method checkForWorkflowCompletion.
@VisibleForTesting
boolean checkForWorkflowCompletion(final Workflow workflow) throws TerminateWorkflowException {
List<Task> allTasks = workflow.getTasks();
if (allTasks.isEmpty()) {
return false;
}
if (containsSuccessfulTerminateTask.test(workflow)) {
return true;
}
Map<String, Status> taskStatusMap = new HashMap<>();
workflow.getTasks().forEach(task -> taskStatusMap.put(task.getReferenceTaskName(), task.getStatus()));
List<WorkflowTask> workflowTasks = workflow.getWorkflowDefinition().getTasks();
boolean allCompletedSuccessfully = workflowTasks.stream().parallel().allMatch(wftask -> {
Status status = taskStatusMap.get(wftask.getTaskReferenceName());
return status != null && status.isSuccessful() && status.isTerminal();
});
boolean noPendingTasks = taskStatusMap.values().stream().allMatch(Status::isTerminal);
boolean noPendingSchedule = workflow.getTasks().stream().parallel().noneMatch(wftask -> {
String next = getNextTasksToBeScheduled(workflow, wftask);
return next != null && !taskStatusMap.containsKey(next);
});
return allCompletedSuccessfully && noPendingTasks && noPendingSchedule;
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class DeciderService method decide.
private DeciderOutcome decide(final Workflow workflow, List<Task> preScheduledTasks) throws TerminateWorkflowException {
DeciderOutcome outcome = new DeciderOutcome();
if (workflow.getStatus().isTerminal()) {
// you cannot evaluate a terminal workflow
LOGGER.debug("Workflow {} is already finished. Reason: {}", workflow, workflow.getReasonForIncompletion());
return outcome;
}
checkWorkflowTimeout(workflow);
if (workflow.getStatus().equals(WorkflowStatus.PAUSED)) {
LOGGER.debug("Workflow " + workflow.getWorkflowId() + " is paused");
return outcome;
}
// Filter the list of tasks and include only tasks that are not retried, not executed
// marked to be skipped and not part of System tasks that is DECISION, FORK, JOIN
// This list will be empty for a new workflow being started
List<Task> pendingTasks = workflow.getTasks().stream().filter(isNonPendingTask).collect(Collectors.toList());
// Get all the tasks that have not completed their lifecycle yet
// This list will be empty for a new workflow
Set<String> executedTaskRefNames = workflow.getTasks().stream().filter(Task::isExecuted).map(Task::getReferenceTaskName).collect(Collectors.toSet());
Map<String, Task> tasksToBeScheduled = new LinkedHashMap<>();
preScheduledTasks.forEach(preScheduledTask -> {
tasksToBeScheduled.put(preScheduledTask.getReferenceTaskName(), preScheduledTask);
});
// A new workflow does not enter this code branch
for (Task pendingTask : pendingTasks) {
if (SystemTaskType.is(pendingTask.getTaskType()) && !pendingTask.getStatus().isTerminal()) {
tasksToBeScheduled.putIfAbsent(pendingTask.getReferenceTaskName(), pendingTask);
executedTaskRefNames.remove(pendingTask.getReferenceTaskName());
}
Optional<TaskDef> taskDefinition = pendingTask.getTaskDefinition();
if (!taskDefinition.isPresent()) {
taskDefinition = Optional.ofNullable(workflow.getWorkflowDefinition().getTaskByRefName(pendingTask.getReferenceTaskName())).map(WorkflowTask::getTaskDefinition);
}
if (taskDefinition.isPresent()) {
checkTaskTimeout(taskDefinition.get(), pendingTask);
checkTaskPollTimeout(taskDefinition.get(), pendingTask);
// If the task has not been updated for "responseTimeoutSeconds" then mark task as TIMED_OUT
if (isResponseTimedOut(taskDefinition.get(), pendingTask)) {
timeoutTask(taskDefinition.get(), pendingTask);
}
}
if (!pendingTask.getStatus().isSuccessful()) {
WorkflowTask workflowTask = pendingTask.getWorkflowTask();
if (workflowTask == null) {
workflowTask = workflow.getWorkflowDefinition().getTaskByRefName(pendingTask.getReferenceTaskName());
}
Optional<Task> retryTask = retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow);
if (retryTask.isPresent()) {
tasksToBeScheduled.put(retryTask.get().getReferenceTaskName(), retryTask.get());
executedTaskRefNames.remove(retryTask.get().getReferenceTaskName());
outcome.tasksToBeUpdated.add(pendingTask);
} else {
pendingTask.setStatus(COMPLETED_WITH_ERRORS);
}
}
if (!pendingTask.isExecuted() && !pendingTask.isRetried() && pendingTask.getStatus().isTerminal()) {
pendingTask.setExecuted(true);
List<Task> nextTasks = getNextTask(workflow, pendingTask);
if (pendingTask.isLoopOverTask() && !TaskType.DO_WHILE.name().equals(pendingTask.getTaskType()) && !nextTasks.isEmpty()) {
nextTasks = filterNextLoopOverTasks(nextTasks, pendingTask, workflow);
}
nextTasks.forEach(nextTask -> tasksToBeScheduled.putIfAbsent(nextTask.getReferenceTaskName(), nextTask));
outcome.tasksToBeUpdated.add(pendingTask);
LOGGER.debug("Scheduling Tasks from {}, next = {} for workflowId: {}", pendingTask.getTaskDefName(), nextTasks.stream().map(Task::getTaskDefName).collect(Collectors.toList()), workflow.getWorkflowId());
}
}
// All the tasks that need to scheduled are added to the outcome, in case of
List<Task> unScheduledTasks = tasksToBeScheduled.values().stream().filter(task -> !executedTaskRefNames.contains(task.getReferenceTaskName())).collect(Collectors.toList());
if (!unScheduledTasks.isEmpty()) {
LOGGER.debug("Scheduling Tasks: {} for workflow: {}", unScheduledTasks.stream().map(Task::getTaskDefName).collect(Collectors.toList()), workflow.getWorkflowId());
outcome.tasksToBeScheduled.addAll(unScheduledTasks);
}
if (containsSuccessfulTerminateTask.test(workflow) || (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow))) {
LOGGER.debug("Marking workflow: {} as complete.", workflow);
outcome.isComplete = true;
}
return outcome;
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class DeciderService method getTasksToBeScheduled.
public List<Task> getTasksToBeScheduled(Workflow workflow, WorkflowTask taskToSchedule, int retryCount, String retriedTaskId) {
workflow = populateWorkflowAndTaskData(workflow);
Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflow, null, null);
TaskType taskType = TaskType.USER_DEFINED;
String type = taskToSchedule.getType();
if (TaskType.isSystemTask(type)) {
taskType = TaskType.valueOf(type);
}
// get tasks already scheduled (in progress/terminal) for this workflow instance
List<String> tasksInWorkflow = workflow.getTasks().stream().filter(runningTask -> runningTask.getStatus().equals(Status.IN_PROGRESS) || runningTask.getStatus().isTerminal()).map(Task::getReferenceTaskName).collect(Collectors.toList());
String taskId = IDGenerator.generate();
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(workflow.getWorkflowDefinition()).withWorkflowInstance(workflow).withTaskDefinition(taskToSchedule.getTaskDefinition()).withTaskToSchedule(taskToSchedule).withTaskInput(input).withRetryCount(retryCount).withRetryTaskId(retriedTaskId).withTaskId(taskId).withDeciderService(this).build();
// for static forks, each branch of the fork creates a join task upon completion
// for dynamic forks, a join task is created with the fork and also with each branch of the fork
// a new task must only be scheduled if a task with the same reference name is not already in this workflow instance
List<Task> tasks = taskMappers.get(taskType.name()).getMappedTasks(taskMapperContext).stream().filter(task -> !tasksInWorkflow.contains(task.getReferenceTaskName())).collect(Collectors.toList());
tasks.forEach(this::externalizeTaskData);
return tasks;
}
Aggregations