Search in sources :

Example 56 with WorkflowTask

use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.

the class DeciderService method startWorkflow.

private List<Task> startWorkflow(Workflow workflow) throws TerminateWorkflowException {
    final WorkflowDef workflowDef = workflow.getWorkflowDefinition();
    LOGGER.debug("Starting workflow: {}", workflow);
    // The tasks will be empty in case of new workflow
    List<Task> tasks = workflow.getTasks();
    // Check if the workflow is a re-run case or if it is a new workflow execution
    if (workflow.getReRunFromWorkflowId() == null || tasks.isEmpty()) {
        if (workflowDef.getTasks().isEmpty()) {
            throw new TerminateWorkflowException("No tasks found to be executed", WorkflowStatus.COMPLETED);
        }
        // Nothing is running yet - so schedule the first task
        WorkflowTask taskToSchedule = workflowDef.getTasks().get(0);
        // Loop until a non-skipped task is found
        while (isTaskSkipped(taskToSchedule, workflow)) {
            taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName());
        }
        // In case of a new workflow, the first non-skippable task will be scheduled
        return getTasksToBeScheduled(workflow, taskToSchedule, 0);
    }
    // Get the first task to schedule
    Task rerunFromTask = tasks.stream().findFirst().map(task -> {
        task.setStatus(SCHEDULED);
        task.setRetried(true);
        task.setRetryCount(0);
        return task;
    }).orElseThrow(() -> {
        String reason = String.format("The workflow %s is marked for re-run from %s but could not find the starting task", workflow.getWorkflowId(), workflow.getReRunFromWorkflowId());
        return new TerminateWorkflowException(reason);
    });
    return Collections.singletonList(rerunFromTask);
}
Also used : TaskUtils(com.netflix.conductor.common.utils.TaskUtils) TaskMapper(com.netflix.conductor.core.execution.mapper.TaskMapper) IDGenerator(com.netflix.conductor.core.utils.IDGenerator) Status(com.netflix.conductor.common.metadata.tasks.Task.Status) LoggerFactory(org.slf4j.LoggerFactory) TaskMapperContext(com.netflix.conductor.core.execution.mapper.TaskMapperContext) HashMap(java.util.HashMap) MetadataDAO(com.netflix.conductor.dao.MetadataDAO) Task(com.netflix.conductor.common.metadata.tasks.Task) StringUtils(org.apache.commons.lang3.StringUtils) LinkedHashMap(java.util.LinkedHashMap) Inject(javax.inject.Inject) SUB_WORKFLOW(com.netflix.conductor.common.metadata.workflow.TaskType.SUB_WORKFLOW) COMPLETED_WITH_ERRORS(com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED_WITH_ERRORS) ExternalPayloadStorageUtils(com.netflix.conductor.core.utils.ExternalPayloadStorageUtils) Workflow(com.netflix.conductor.common.run.Workflow) IN_PROGRESS(com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS) Map(java.util.Map) SKIPPED(com.netflix.conductor.common.metadata.tasks.Task.Status.SKIPPED) Operation(com.netflix.conductor.common.utils.ExternalPayloadStorage.Operation) Named(javax.inject.Named) LinkedList(java.util.LinkedList) Nullable(javax.annotation.Nullable) TaskDef(com.netflix.conductor.common.metadata.tasks.TaskDef) Logger(org.slf4j.Logger) WorkflowStatus(com.netflix.conductor.common.run.Workflow.WorkflowStatus) Predicate(java.util.function.Predicate) WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef) Set(java.util.Set) Collectors(java.util.stream.Collectors) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) SCHEDULED(com.netflix.conductor.common.metadata.tasks.Task.Status.SCHEDULED) Monitors(com.netflix.conductor.metrics.Monitors) TERMINATE(com.netflix.conductor.common.metadata.workflow.TaskType.TERMINATE) List(java.util.List) TIMED_OUT(com.netflix.conductor.common.metadata.tasks.Task.Status.TIMED_OUT) Optional(java.util.Optional) TaskType(com.netflix.conductor.common.metadata.workflow.TaskType) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Configuration(com.netflix.conductor.core.config.Configuration) PayloadType(com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType) Collections(java.util.Collections) Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask)

Example 57 with WorkflowTask

use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.

the class DecisionTaskMapper method getMappedTasks.

/**
 * This method gets the list of tasks that need to scheduled when the task to scheduled is of type {@link TaskType#DECISION}.
 *
 * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
 * @return List of tasks in the following order:
 * <ul>
 * <li>
 * {@link SystemTaskType#DECISION} with {@link Task.Status#IN_PROGRESS}
 * </li>
 * <li>
 * List of task based on the evaluation of {@link WorkflowTask#getCaseExpression()} are scheduled.
 * </li>
 * <li>
 * In case of no matching result after the evaluation of the {@link WorkflowTask#getCaseExpression()}, the {@link WorkflowTask#getDefaultCase()}
 * Tasks are scheduled.
 * </li>
 * </ul>
 */
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) {
    logger.debug("TaskMapperContext {} in DecisionTaskMapper", taskMapperContext);
    List<Task> tasksToBeScheduled = new LinkedList<>();
    WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
    Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
    Map<String, Object> taskInput = taskMapperContext.getTaskInput();
    int retryCount = taskMapperContext.getRetryCount();
    String taskId = taskMapperContext.getTaskId();
    // get the expression to be evaluated
    String caseValue = getEvaluatedCaseValue(taskToSchedule, taskInput);
    // QQ why is the case value and the caseValue passed and caseOutput passes as the same ??
    Task decisionTask = new Task();
    decisionTask.setTaskType(SystemTaskType.DECISION.name());
    decisionTask.setTaskDefName(SystemTaskType.DECISION.name());
    decisionTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
    decisionTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
    decisionTask.setWorkflowType(workflowInstance.getWorkflowName());
    decisionTask.setCorrelationId(workflowInstance.getCorrelationId());
    decisionTask.setScheduledTime(System.currentTimeMillis());
    decisionTask.getInputData().put("case", caseValue);
    decisionTask.getOutputData().put("caseOutput", Collections.singletonList(caseValue));
    decisionTask.setTaskId(taskId);
    decisionTask.setStartTime(System.currentTimeMillis());
    decisionTask.setStatus(Task.Status.IN_PROGRESS);
    decisionTask.setWorkflowTask(taskToSchedule);
    decisionTask.setWorkflowPriority(workflowInstance.getPriority());
    tasksToBeScheduled.add(decisionTask);
    // get the list of tasks based on the decision
    List<WorkflowTask> selectedTasks = taskToSchedule.getDecisionCases().get(caseValue);
    // if the tasks returned are empty based on evaluated case value, then get the default case if there is one
    if (selectedTasks == null || selectedTasks.isEmpty()) {
        selectedTasks = taskToSchedule.getDefaultCase();
    }
    // scheduled by using the decider service
    if (selectedTasks != null && !selectedTasks.isEmpty()) {
        // Schedule the first task to be executed...
        WorkflowTask selectedTask = selectedTasks.get(0);
        // TODO break out this recursive call using function composition of what needs to be done and then walk back the condition tree
        List<Task> caseTasks = taskMapperContext.getDeciderService().getTasksToBeScheduled(workflowInstance, selectedTask, retryCount, taskMapperContext.getRetryTaskId());
        tasksToBeScheduled.addAll(caseTasks);
        decisionTask.getInputData().put("hasChildren", "true");
    }
    return tasksToBeScheduled;
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) Workflow(com.netflix.conductor.common.run.Workflow) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) LinkedList(java.util.LinkedList)

Example 58 with WorkflowTask

use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.

the class DoWhileTaskMapper method getMappedTasks.

/**
 * This method maps {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#DO_WHILE} to a {@link Task} of type {@link SystemTaskType#DO_WHILE}
 * with a status of {@link Task.Status#IN_PROGRESS}
 *
 * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
 * @return: A {@link Task} of type {@link SystemTaskType#DO_WHILE} in a List
 */
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) {
    logger.debug("TaskMapperContext {} in DoWhileTaskMapper", taskMapperContext);
    WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
    Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
    Task task = workflowInstance.getTaskByRefName(taskToSchedule.getTaskReferenceName());
    if (task != null && task.getStatus().isTerminal()) {
        // Since loopTask is already completed no need to schedule task again.
        return Collections.emptyList();
    }
    String taskId = taskMapperContext.getTaskId();
    List<Task> tasksToBeScheduled = new ArrayList<>();
    int retryCount = taskMapperContext.getRetryCount();
    TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()).orElseGet(() -> Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())).orElseGet(TaskDef::new));
    Task loopTask = new Task();
    loopTask.setTaskType(SystemTaskType.DO_WHILE.name());
    loopTask.setTaskDefName(taskToSchedule.getName());
    loopTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
    loopTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
    loopTask.setCorrelationId(workflowInstance.getCorrelationId());
    loopTask.setWorkflowType(workflowInstance.getWorkflowName());
    loopTask.setScheduledTime(System.currentTimeMillis());
    loopTask.setTaskId(taskId);
    loopTask.setIteration(1);
    loopTask.setStatus(Task.Status.IN_PROGRESS);
    loopTask.setWorkflowTask(taskToSchedule);
    loopTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency());
    loopTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds());
    tasksToBeScheduled.add(loopTask);
    List<WorkflowTask> loopOverTasks = taskToSchedule.getLoopOver();
    List<Task> tasks2 = taskMapperContext.getDeciderService().getTasksToBeScheduled(workflowInstance, loopOverTasks.get(0), retryCount);
    tasks2.forEach(t -> {
        t.setReferenceTaskName(TaskUtils.appendIteration(t.getReferenceTaskName(), loopTask.getIteration()));
        t.setIteration(loopTask.getIteration());
    });
    tasksToBeScheduled.addAll(tasks2);
    return tasksToBeScheduled;
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) TaskDef(com.netflix.conductor.common.metadata.tasks.TaskDef) ArrayList(java.util.ArrayList) Workflow(com.netflix.conductor.common.run.Workflow) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask)

Example 59 with WorkflowTask

use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.

the class DynamicTaskMapper method getMappedTasks.

/**
 * This method maps a dynamic task to a {@link Task} based on the input params
 *
 * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
 * @return A {@link List} that contains a single {@link Task} with a {@link Task.Status#SCHEDULED}
 */
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {
    logger.debug("TaskMapperContext {} in DynamicTaskMapper", taskMapperContext);
    WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
    Map<String, Object> taskInput = taskMapperContext.getTaskInput();
    Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
    int retryCount = taskMapperContext.getRetryCount();
    String retriedTaskId = taskMapperContext.getRetryTaskId();
    String taskNameParam = taskToSchedule.getDynamicTaskNameParam();
    String taskName = getDynamicTaskName(taskInput, taskNameParam);
    taskToSchedule.setName(taskName);
    TaskDef taskDefinition = getDynamicTaskDefinition(taskToSchedule);
    taskToSchedule.setTaskDefinition(taskDefinition);
    Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, taskDefinition, taskMapperContext.getTaskId());
    Task dynamicTask = new Task();
    dynamicTask.setStartDelayInSeconds(taskToSchedule.getStartDelay());
    dynamicTask.setTaskId(taskMapperContext.getTaskId());
    dynamicTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
    dynamicTask.setInputData(input);
    dynamicTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
    dynamicTask.setWorkflowType(workflowInstance.getWorkflowName());
    dynamicTask.setStatus(Task.Status.SCHEDULED);
    dynamicTask.setTaskType(taskToSchedule.getType());
    dynamicTask.setTaskDefName(taskToSchedule.getName());
    dynamicTask.setCorrelationId(workflowInstance.getCorrelationId());
    dynamicTask.setScheduledTime(System.currentTimeMillis());
    dynamicTask.setRetryCount(retryCount);
    dynamicTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
    dynamicTask.setResponseTimeoutSeconds(taskDefinition.getResponseTimeoutSeconds());
    dynamicTask.setWorkflowTask(taskToSchedule);
    dynamicTask.setTaskType(taskName);
    dynamicTask.setRetriedTaskId(retriedTaskId);
    dynamicTask.setWorkflowPriority(workflowInstance.getPriority());
    return Collections.singletonList(dynamicTask);
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) TaskDef(com.netflix.conductor.common.metadata.tasks.TaskDef) Workflow(com.netflix.conductor.common.run.Workflow) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask)

Example 60 with WorkflowTask

use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.

the class ForkJoinDynamicTaskMapper method getMappedTasks.

/**
 * This method gets the list of tasks that need to scheduled when the task to scheduled is of type {@link TaskType#FORK_JOIN_DYNAMIC}.
 * Creates a Fork Task, followed by the Dynamic tasks and a final JOIN task.
 * <p>The definitions of the dynamic forks that need to be scheduled are available in the {@link WorkflowTask#getInputParameters()}
 * which are accessed using the {@link TaskMapperContext#getTaskToSchedule()}. The dynamic fork task definitions are referred by a key value either by
 * {@link WorkflowTask#getDynamicForkTasksParam()} or by {@link WorkflowTask#getDynamicForkJoinTasksParam()}
 * </p>
 * When creating the list of tasks to be scheduled a set of preconditions are validated:
 * <ul>
 * <li>If the input parameter representing the Dynamic fork tasks is available as part of {@link WorkflowTask#getDynamicForkTasksParam()} then
 * the input for the dynamic task is validated to be a map by using {@link WorkflowTask#getDynamicForkTasksInputParamName()}</li>
 * <li>If the input parameter representing the Dynamic fork tasks is available as part of {@link WorkflowTask#getDynamicForkJoinTasksParam()} then
 * the input for the dynamic tasks is available in the payload of the tasks definition.
 * </li>
 * <li>A check is performed that the next following task in the {@link WorkflowDef} is a {@link TaskType#JOIN}</li>
 * </ul>
 *
 * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
 * @throws TerminateWorkflowException In case of:
 *                                    <ul>
 *                                    <li>
 *                                    When the task after {@link TaskType#FORK_JOIN_DYNAMIC} is not a {@link TaskType#JOIN}
 *                                    </li>
 *                                    <li>
 *                                    When the input parameters for the dynamic tasks are not of type {@link Map}
 *                                    </li>
 *                                    </ul>
 * @return: List of tasks in the following order:
 * <ul>
 * <li>
 * {@link SystemTaskType#FORK} with {@link Task.Status#COMPLETED}
 * </li>
 * <li>
 * Might be any kind of task, but this is most cases is a UserDefinedTask with {@link Task.Status#SCHEDULED}
 * </li>
 * <li>
 * {@link SystemTaskType#JOIN} with {@link Task.Status#IN_PROGRESS}
 * </li>
 * </ul>
 */
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {
    logger.debug("TaskMapperContext {} in ForkJoinDynamicTaskMapper", taskMapperContext);
    WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
    Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
    String taskId = taskMapperContext.getTaskId();
    int retryCount = taskMapperContext.getRetryCount();
    List<Task> mappedTasks = new LinkedList<>();
    // Get the list of dynamic tasks and the input for the tasks
    Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> workflowTasksAndInputPair = Optional.ofNullable(taskToSchedule.getDynamicForkTasksParam()).map(dynamicForkTaskParam -> getDynamicForkTasksAndInput(taskToSchedule, workflowInstance, dynamicForkTaskParam)).orElseGet(() -> getDynamicForkJoinTasksAndInput(taskToSchedule, workflowInstance));
    List<WorkflowTask> dynForkTasks = workflowTasksAndInputPair.getLeft();
    Map<String, Map<String, Object>> tasksInput = workflowTasksAndInputPair.getRight();
    // Create Fork Task which needs to be followed by the dynamic tasks
    Task forkDynamicTask = createDynamicForkTask(taskToSchedule, workflowInstance, taskId, dynForkTasks);
    mappedTasks.add(forkDynamicTask);
    List<String> joinOnTaskRefs = new LinkedList<>();
    // which indicates that the following task after that needs to be a join task
    for (WorkflowTask dynForkTask : dynForkTasks) {
        // TODO this is a cyclic dependency, break it out using function composition
        List<Task> forkedTasks = taskMapperContext.getDeciderService().getTasksToBeScheduled(workflowInstance, dynForkTask, retryCount);
        // same reference name has already been created in the Workflow.
        if (forkedTasks == null || forkedTasks.isEmpty()) {
            Optional<String> existingTaskRefName = workflowInstance.getTasks().stream().filter(runningTask -> runningTask.getStatus().equals(Task.Status.IN_PROGRESS) || runningTask.getStatus().isTerminal()).map(Task::getReferenceTaskName).filter(refTaskName -> refTaskName.equals(dynForkTask.getTaskReferenceName())).findAny();
            // Construct an informative error message
            String terminateMessage = "No dynamic tasks could be created for the Workflow: " + workflowInstance.toShortString() + ", Dynamic Fork Task: " + dynForkTask;
            if (existingTaskRefName.isPresent()) {
                terminateMessage += "Attempted to create a duplicate task reference name: " + existingTaskRefName.get();
            }
            throw new TerminateWorkflowException(terminateMessage);
        }
        for (Task forkedTask : forkedTasks) {
            Map<String, Object> forkedTaskInput = tasksInput.get(forkedTask.getReferenceTaskName());
            forkedTask.getInputData().putAll(forkedTaskInput);
        }
        mappedTasks.addAll(forkedTasks);
        // Get the last of the dynamic tasks so that the join can be performed once this task is done
        Task last = forkedTasks.get(forkedTasks.size() - 1);
        joinOnTaskRefs.add(last.getReferenceTaskName());
    }
    // From the workflow definition get the next task and make sure that it is a JOIN task.
    // The dynamic fork tasks need to be followed by a join task
    WorkflowTask joinWorkflowTask = workflowInstance.getWorkflowDefinition().getNextTask(taskToSchedule.getTaskReferenceName());
    if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(TaskType.JOIN.name())) {
        throw new TerminateWorkflowException("Dynamic join definition is not followed by a join task.  Check the blueprint");
    }
    // Create Join task
    HashMap<String, Object> joinInput = new HashMap<>();
    joinInput.put("joinOn", joinOnTaskRefs);
    Task joinTask = createJoinTask(workflowInstance, joinWorkflowTask, joinInput);
    mappedTasks.add(joinTask);
    return mappedTasks;
}
Also used : IDGenerator(com.netflix.conductor.core.utils.IDGenerator) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) MetadataDAO(com.netflix.conductor.dao.MetadataDAO) Task(com.netflix.conductor.common.metadata.tasks.Task) StringUtils(org.apache.commons.lang3.StringUtils) TerminateWorkflowException(com.netflix.conductor.core.execution.TerminateWorkflowException) ArrayList(java.util.ArrayList) ParametersUtils(com.netflix.conductor.core.execution.ParametersUtils) Pair(org.apache.commons.lang3.tuple.Pair) Workflow(com.netflix.conductor.common.run.Workflow) Map(java.util.Map) LinkedList(java.util.LinkedList) TypeReference(com.fasterxml.jackson.core.type.TypeReference) Logger(org.slf4j.Logger) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) WorkflowDef(com.netflix.conductor.common.metadata.workflow.WorkflowDef) Collectors(java.util.stream.Collectors) ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) List(java.util.List) DynamicForkJoinTaskList(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList) SystemTaskType(com.netflix.conductor.core.execution.SystemTaskType) Optional(java.util.Optional) TaskType(com.netflix.conductor.common.metadata.workflow.TaskType) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) TerminateWorkflowException(com.netflix.conductor.core.execution.TerminateWorkflowException) HashMap(java.util.HashMap) Workflow(com.netflix.conductor.common.run.Workflow) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) LinkedList(java.util.LinkedList) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) DynamicForkJoinTaskList(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

WorkflowTask (com.netflix.conductor.common.metadata.workflow.WorkflowTask)244 Test (org.junit.Test)166 WorkflowDef (com.netflix.conductor.common.metadata.workflow.WorkflowDef)148 TaskDef (com.netflix.conductor.common.metadata.tasks.TaskDef)106 Workflow (com.netflix.conductor.common.run.Workflow)106 HashMap (java.util.HashMap)103 Task (com.netflix.conductor.common.metadata.tasks.Task)98 ArrayList (java.util.ArrayList)52 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)44 ConstraintViolation (javax.validation.ConstraintViolation)41 LinkedList (java.util.LinkedList)40 List (java.util.List)36 SubWorkflow (com.netflix.conductor.core.execution.tasks.SubWorkflow)32 Map (java.util.Map)26 SubWorkflowParams (com.netflix.conductor.common.metadata.workflow.SubWorkflowParams)25 DynamicForkJoinTaskList (com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList)18 WorkflowSystemTask (com.netflix.conductor.core.execution.tasks.WorkflowSystemTask)14 UserTask (com.netflix.conductor.tests.utils.UserTask)14 ValidatorFactory (javax.validation.ValidatorFactory)13 VisibleForTesting (com.google.common.annotations.VisibleForTesting)11