Search in sources :

Example 66 with WorkflowDef

use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.

the class ForkJoinDynamicTaskMapper method getMappedTasks.

/**
 * This method gets the list of tasks that need to scheduled when the task to scheduled is of type {@link TaskType#FORK_JOIN_DYNAMIC}.
 * Creates a Fork Task, followed by the Dynamic tasks and a final JOIN task.
 * <p>The definitions of the dynamic forks that need to be scheduled are available in the {@link WorkflowTask#getInputParameters()}
 * which are accessed using the {@link TaskMapperContext#getTaskToSchedule()}. The dynamic fork task definitions are referred by a key value either by
 * {@link WorkflowTask#getDynamicForkTasksParam()} or by {@link WorkflowTask#getDynamicForkJoinTasksParam()}
 * </p>
 * When creating the list of tasks to be scheduled a set of preconditions are validated:
 * <ul>
 * <li>If the input parameter representing the Dynamic fork tasks is available as part of {@link WorkflowTask#getDynamicForkTasksParam()} then
 * the input for the dynamic task is validated to be a map by using {@link WorkflowTask#getDynamicForkTasksInputParamName()}</li>
 * <li>If the input parameter representing the Dynamic fork tasks is available as part of {@link WorkflowTask#getDynamicForkJoinTasksParam()} then
 * the input for the dynamic tasks is available in the payload of the tasks definition.
 * </li>
 * <li>A check is performed that the next following task in the {@link WorkflowDef} is a {@link TaskType#JOIN}</li>
 * </ul>
 *
 * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
 * @throws TerminateWorkflowException In case of:
 *                                    <ul>
 *                                    <li>
 *                                    When the task after {@link TaskType#FORK_JOIN_DYNAMIC} is not a {@link TaskType#JOIN}
 *                                    </li>
 *                                    <li>
 *                                    When the input parameters for the dynamic tasks are not of type {@link Map}
 *                                    </li>
 *                                    </ul>
 * @return: List of tasks in the following order:
 * <ul>
 * <li>
 * {@link SystemTaskType#FORK} with {@link Task.Status#COMPLETED}
 * </li>
 * <li>
 * Might be any kind of task, but this is most cases is a UserDefinedTask with {@link Task.Status#SCHEDULED}
 * </li>
 * <li>
 * {@link SystemTaskType#JOIN} with {@link Task.Status#IN_PROGRESS}
 * </li>
 * </ul>
 */
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {
    logger.debug("TaskMapperContext {} in ForkJoinDynamicTaskMapper", taskMapperContext);
    WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
    Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
    String taskId = taskMapperContext.getTaskId();
    int retryCount = taskMapperContext.getRetryCount();
    List<Task> mappedTasks = new LinkedList<>();
    // Get the list of dynamic tasks and the input for the tasks
    Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> workflowTasksAndInputPair = Optional.ofNullable(taskToSchedule.getDynamicForkTasksParam()).map(dynamicForkTaskParam -> getDynamicForkTasksAndInput(taskToSchedule, workflowInstance, dynamicForkTaskParam)).orElseGet(() -> getDynamicForkJoinTasksAndInput(taskToSchedule, workflowInstance));
    List<WorkflowTask> dynForkTasks = workflowTasksAndInputPair.getLeft();
    Map<String, Map<String, Object>> tasksInput = workflowTasksAndInputPair.getRight();
    // Create Fork Task which needs to be followed by the dynamic tasks
    Task forkDynamicTask = createDynamicForkTask(taskToSchedule, workflowInstance, taskId, dynForkTasks);
    mappedTasks.add(forkDynamicTask);
    List<String> joinOnTaskRefs = new LinkedList<>();
    // which indicates that the following task after that needs to be a join task
    for (WorkflowTask dynForkTask : dynForkTasks) {
        // TODO this is a cyclic dependency, break it out using function composition
        List<Task> forkedTasks = taskMapperContext.getDeciderService().getTasksToBeScheduled(workflowInstance, dynForkTask, retryCount);
        // same reference name has already been created in the Workflow.
        if (forkedTasks == null || forkedTasks.isEmpty()) {
            Optional<String> existingTaskRefName = workflowInstance.getTasks().stream().filter(runningTask -> runningTask.getStatus().equals(Task.Status.IN_PROGRESS) || runningTask.getStatus().isTerminal()).map(Task::getReferenceTaskName).filter(refTaskName -> refTaskName.equals(dynForkTask.getTaskReferenceName())).findAny();
            // Construct an informative error message
            String terminateMessage = "No dynamic tasks could be created for the Workflow: " + workflowInstance.toShortString() + ", Dynamic Fork Task: " + dynForkTask;
            if (existingTaskRefName.isPresent()) {
                terminateMessage += "Attempted to create a duplicate task reference name: " + existingTaskRefName.get();
            }
            throw new TerminateWorkflowException(terminateMessage);
        }
        for (Task forkedTask : forkedTasks) {
            Map<String, Object> forkedTaskInput = tasksInput.get(forkedTask.getReferenceTaskName());
            forkedTask.getInputData().putAll(forkedTaskInput);
        }
        mappedTasks.addAll(forkedTasks);
        // Get the last of the dynamic tasks so that the join can be performed once this task is done
        Task last = forkedTasks.get(forkedTasks.size() - 1);
        joinOnTaskRefs.add(last.getReferenceTaskName());
    }
    // From the workflow definition get the next task and make sure that it is a JOIN task.
    // The dynamic fork tasks need to be followed by a join task
    WorkflowTask joinWorkflowTask = workflowInstance.getWorkflowDefinition().getNextTask(taskToSchedule.getTaskReferenceName());
    if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(TaskType.JOIN.name())) {
        throw new TerminateWorkflowException("Dynamic join definition is not followed by a join task.  Check the blueprint");
    }
    // Create Join task
    HashMap<String, Object> joinInput = new HashMap<>();
    joinInput.put("joinOn", joinOnTaskRefs);
    Task joinTask = createJoinTask(workflowInstance, joinWorkflowTask, joinInput);
    mappedTasks.add(joinTask);
    return mappedTasks;
}
Also used : IDGenerator(com.netflix.conductor.core.utils.IDGenerator) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) MetadataDAO(com.netflix.conductor.dao.MetadataDAO) Task(com.netflix.conductor.common.metadata.tasks.Task) StringUtils(org.apache.commons.lang3.StringUtils) TerminateWorkflowException(com.netflix.conductor.core.execution.TerminateWorkflowException) ArrayList(java.util.ArrayList) ParametersUtils(com.netflix.conductor.core.execution.ParametersUtils) Pair(org.apache.commons.lang3.tuple.Pair) Workflow(com.netflix.conductor.common.run.Workflow) Map(java.util.Map) LinkedList(java.util.LinkedList) TypeReference(com.fasterxml.jackson.core.type.TypeReference) Logger(org.slf4j.Logger) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef) Collectors(java.util.stream.Collectors) ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) List(java.util.List) DynamicForkJoinTaskList(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList) SystemTaskType(com.netflix.conductor.core.execution.SystemTaskType) Optional(java.util.Optional) TaskType(com.netflix.conductor.common.metadata.workflow.TaskType) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) TerminateWorkflowException(com.netflix.conductor.core.execution.TerminateWorkflowException) HashMap(java.util.HashMap) Workflow(com.netflix.conductor.common.run.Workflow) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) LinkedList(java.util.LinkedList) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) DynamicForkJoinTaskList(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList) HashMap(java.util.HashMap) Map(java.util.Map)

Example 67 with WorkflowDef

use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.

the class ExecutionDAOTest method createTestWorkflow.

protected Workflow createTestWorkflow() {
    WorkflowDef def = new WorkflowDef();
    def.setName("Junit Workflow");
    def.setVersion(3);
    def.setSchemaVersion(2);
    Workflow workflow = new Workflow();
    workflow.setWorkflowDefinition(def);
    workflow.setCorrelationId("correlationX");
    workflow.setCreatedBy("junit_tester");
    workflow.setEndTime(200L);
    Map<String, Object> input = new HashMap<>();
    input.put("param1", "param1 value");
    input.put("param2", 100);
    workflow.setInput(input);
    Map<String, Object> output = new HashMap<>();
    output.put("ouput1", "output 1 value");
    output.put("op2", 300);
    workflow.setOutput(output);
    workflow.setOwnerApp("workflow");
    workflow.setParentWorkflowId("parentWorkflowId");
    workflow.setParentWorkflowTaskId("parentWFTaskId");
    workflow.setReasonForIncompletion("missing recipe");
    workflow.setReRunFromWorkflowId("re-run from id1");
    workflow.setStartTime(90L);
    workflow.setStatus(Workflow.WorkflowStatus.FAILED);
    workflow.setWorkflowId(UUID.randomUUID().toString());
    List<Task> tasks = new LinkedList<>();
    Task task = new Task();
    task.setScheduledTime(1L);
    task.setSeq(1);
    task.setTaskId(UUID.randomUUID().toString());
    task.setReferenceTaskName("t1");
    task.setWorkflowInstanceId(workflow.getWorkflowId());
    task.setTaskDefName("task1");
    Task task2 = new Task();
    task2.setScheduledTime(2L);
    task2.setSeq(2);
    task2.setTaskId(UUID.randomUUID().toString());
    task2.setReferenceTaskName("t2");
    task2.setWorkflowInstanceId(workflow.getWorkflowId());
    task2.setTaskDefName("task2");
    Task task3 = new Task();
    task3.setScheduledTime(2L);
    task3.setSeq(3);
    task3.setTaskId(UUID.randomUUID().toString());
    task3.setReferenceTaskName("t3");
    task3.setWorkflowInstanceId(workflow.getWorkflowId());
    task3.setTaskDefName("task3");
    tasks.add(task);
    tasks.add(task2);
    tasks.add(task3);
    workflow.setTasks(tasks);
    workflow.setUpdatedBy("junit_tester");
    workflow.setUpdateTime(800L);
    return workflow;
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef) HashMap(java.util.HashMap) Workflow(com.netflix.conductor.common.run.Workflow) LinkedList(java.util.LinkedList)

Example 68 with WorkflowDef

use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.

the class ExecutionDAOTest method testPending.

@Test
public void testPending() {
    WorkflowDef def = new WorkflowDef();
    def.setName("pending_count_test");
    Workflow workflow = createTestWorkflow();
    workflow.setWorkflowDefinition(def);
    List<String> workflowIds = generateWorkflows(workflow, 10);
    long count = getExecutionDAO().getPendingWorkflowCount(def.getName());
    assertEquals(10, count);
    for (int i = 0; i < 10; i++) {
        getExecutionDAO().removeFromPendingWorkflow(def.getName(), workflowIds.get(i));
    }
    count = getExecutionDAO().getPendingWorkflowCount(def.getName());
    assertEquals(0, count);
}
Also used : WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef) Workflow(com.netflix.conductor.common.run.Workflow) Test(org.junit.Test)

Example 69 with WorkflowDef

use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.

the class MetadataMapperServiceTest method createWorkflowDefinition.

private WorkflowDef createWorkflowDefinition(String name) {
    WorkflowDef workflowDefinition = new WorkflowDef();
    workflowDefinition.setName(name);
    return workflowDefinition;
}
Also used : WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef)

Example 70 with WorkflowDef

use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.

the class MetadataMapperServiceTest method testVersionPopulationForSubworkflowTaskIfVersionIsNotAvailable.

@Test
public void testVersionPopulationForSubworkflowTaskIfVersionIsNotAvailable() {
    String nameTaskDefinition = "taskSubworkflow6";
    String workflowDefinitionName = "subworkflow";
    Integer version = 3;
    WorkflowDef subWorkflowDefinition = createWorkflowDefinition("workflowDefinitionName");
    subWorkflowDefinition.setVersion(version);
    WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition);
    workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW);
    SubWorkflowParams subWorkflowParams = new SubWorkflowParams();
    subWorkflowParams.setName(workflowDefinitionName);
    workflowTask.setSubWorkflowParam(subWorkflowParams);
    WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation");
    workflowDefinition.setTasks(ImmutableList.of(workflowTask));
    when(metadataDAO.getLatestWorkflowDef(workflowDefinitionName)).thenReturn(Optional.of(subWorkflowDefinition));
    metadataMapperService.populateTaskDefinitions(workflowDefinition);
    assertEquals(1, workflowDefinition.getTasks().size());
    List<WorkflowTask> workflowTasks = workflowDefinition.getTasks();
    SubWorkflowParams params = workflowTasks.get(0).getSubWorkflowParam();
    assertEquals(workflowDefinitionName, params.getName());
    assertEquals(version, params.getVersion());
    verify(metadataDAO).getLatestWorkflowDef(workflowDefinitionName);
    verify(metadataDAO).getTaskDef(nameTaskDefinition);
    verifyNoMoreInteractions(metadataDAO);
}
Also used : SubWorkflowParams(com.netflix.conductor.common.metadata.workflow.SubWorkflowParams) WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) Test(org.junit.Test)

Aggregations

WorkflowDef (com.netflix.conductor.common.metadata.workflow.WorkflowDef)247 Test (org.junit.Test)185 WorkflowTask (com.netflix.conductor.common.metadata.workflow.WorkflowTask)173 Workflow (com.netflix.conductor.common.run.Workflow)128 HashMap (java.util.HashMap)123 Task (com.netflix.conductor.common.metadata.tasks.Task)100 TaskDef (com.netflix.conductor.common.metadata.tasks.TaskDef)71 SubWorkflow (com.netflix.conductor.core.execution.tasks.SubWorkflow)49 ArrayList (java.util.ArrayList)43 LinkedList (java.util.LinkedList)37 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)35 List (java.util.List)33 UserTask (com.netflix.conductor.tests.utils.UserTask)28 Map (java.util.Map)25 SubWorkflowParams (com.netflix.conductor.common.metadata.workflow.SubWorkflowParams)20 ConstraintViolation (javax.validation.ConstraintViolation)14 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)13 ApplicationException (com.netflix.conductor.core.execution.ApplicationException)13 ValidatorFactory (javax.validation.ValidatorFactory)13 DynamicForkJoinTaskList (com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList)12