use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class ForkJoinDynamicTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Workflow workflowInstance = new Workflow();
workflowInstance.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
def.getTasks().add(join);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(Workflow.class), any(), any())).thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class))).thenReturn(Arrays.asList(wt2, wt3));
Task simpleTask1 = new Task();
simpleTask1.setReferenceTaskName("xdt1");
Task simpleTask2 = new Task();
simpleTask2.setReferenceTaskName("xdt2");
when(deciderService.getTasksToBeScheduled(workflowInstance, wt2, 0)).thenReturn(Arrays.asList(simpleTask1));
when(deciderService.getTasksToBeScheduled(workflowInstance, wt3, 0)).thenReturn(Arrays.asList(simpleTask2));
String taskId = IDGenerator.generate();
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(def).withWorkflowInstance(workflowInstance).withTaskToSchedule(dynamicForkJoinToSchedule).withRetryCount(0).withTaskId(taskId).withDeciderService(deciderService).build();
// then
List<Task> mappedTasks = forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(4, mappedTasks.size());
assertEquals(SystemTaskType.FORK.name(), mappedTasks.get(0).getTaskType());
assertEquals(SystemTaskType.JOIN.name(), mappedTasks.get(3).getTaskType());
List<String> joinTaskNames = (List<String>) mappedTasks.get(3).getInputData().get("joinOn");
assertEquals("xdt1, xdt2", joinTaskNames.stream().collect(Collectors.joining(", ")));
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class HTTPTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() {
// Given
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("http_task");
taskToSchedule.setType(TaskType.HTTP.name());
taskToSchedule.setTaskDefinition(new TaskDef("http_task"));
String taskId = IDGenerator.generate();
String retriedTaskId = IDGenerator.generate();
Workflow workflow = new Workflow();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(workflowDef).withWorkflowInstance(workflow).withTaskDefinition(new TaskDef()).withTaskToSchedule(taskToSchedule).withTaskInput(new HashMap<>()).withRetryCount(0).withRetryTaskId(retriedTaskId).withTaskId(taskId).build();
// when
List<Task> mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType());
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class TestWorkflowDef method createWorkflowTask.
private WorkflowTask createWorkflowTask(String name) {
WorkflowTask task = new WorkflowTask();
task.setName(name);
task.setTaskReferenceName(name);
return task;
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class TestWorkflowDef method testGetNextTask_Decision.
@Test
public void testGetNextTask_Decision() {
WorkflowDef def = new WorkflowDef();
def.setName("test_workflow");
def.setVersion(1);
def.setSchemaVersion(2);
def.getTasks().add(createWorkflowTask("simple_task_1"));
def.getTasks().add(createWorkflowTask("simple_task_2"));
WorkflowTask task3 = createWorkflowTask("decision_task_1");
def.getTasks().add(task3);
task3.setType(TaskType.DECISION.name());
task3.getDecisionCases().put("Case1", Arrays.asList(createWorkflowTask("case_1_task_1"), createWorkflowTask("case_1_task_2")));
task3.getDecisionCases().put("Case2", Arrays.asList(createWorkflowTask("case_2_task_1"), createWorkflowTask("case_2_task_2")));
task3.getDecisionCases().put("Case3", Collections.singletonList(deciderTask("decision_task_2", toMap("Case31", "case31_task_1", "case_31_task_2"), Collections.singletonList("case3_def_task"))));
def.getTasks().add(createWorkflowTask("simple_task_3"));
// Assertions
WorkflowTask next = def.getNextTask("simple_task_1");
assertNotNull(next);
assertEquals("simple_task_2", next.getTaskReferenceName());
next = def.getNextTask("simple_task_2");
assertNotNull(next);
assertEquals(task3.getTaskReferenceName(), next.getTaskReferenceName());
next = def.getNextTask("decision_task_1");
assertNotNull(next);
assertEquals("simple_task_3", next.getTaskReferenceName());
next = def.getNextTask("case_1_task_1");
assertNotNull(next);
assertEquals("case_1_task_2", next.getTaskReferenceName());
next = def.getNextTask("case_1_task_2");
assertNotNull(next);
assertEquals("simple_task_3", next.getTaskReferenceName());
next = def.getNextTask("case3_def_task");
assertNotNull(next);
assertEquals("simple_task_3", next.getTaskReferenceName());
next = def.getNextTask("case31_task_1");
assertNotNull(next);
assertEquals("case_31_task_2", next.getTaskReferenceName());
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class CassandraDAOTest method testTaskDefLimitCRUD.
@Test
public void testTaskDefLimitCRUD() {
String taskDefName = "test_task_def";
String taskId = IDGenerator.generate();
TaskDef taskDef = new TaskDef();
taskDef.setConcurrentExecLimit(1);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskDefinition(taskDef);
Task task = new Task();
task.setTaskDefName(taskDefName);
task.setTaskId(taskId);
task.setWorkflowInstanceId(IDGenerator.generate());
task.setWorkflowTask(workflowTask);
task.setTaskType("test_task");
task.setWorkflowType("test_workflow");
task.setStatus(Task.Status.SCHEDULED);
Task newTask = new Task();
newTask.setTaskDefName(taskDefName);
newTask.setTaskId(IDGenerator.generate());
newTask.setWorkflowInstanceId(IDGenerator.generate());
newTask.setWorkflowTask(workflowTask);
newTask.setTaskType("test_task");
newTask.setWorkflowType("test_workflow");
newTask.setStatus(Task.Status.SCHEDULED);
// no tasks are IN_PROGRESS
executionDAO.updateTaskDefLimit(task, false);
assertFalse(executionDAO.exceedsInProgressLimit(task));
// set a task to IN_PROGRESS
task.setStatus(Status.IN_PROGRESS);
executionDAO.updateTaskDefLimit(task, false);
// when same task is checked
assertFalse(executionDAO.exceedsInProgressLimit(task));
// check if new task can be added
assertTrue(executionDAO.exceedsInProgressLimit(newTask));
// set IN_PROGRESS task to COMPLETED
task.setStatus(Status.COMPLETED);
executionDAO.updateTaskDefLimit(task, false);
// check new task again
assertFalse(executionDAO.exceedsInProgressLimit(newTask));
// set new task to IN_PROGRESS
newTask.setStatus(Status.IN_PROGRESS);
executionDAO.updateTaskDefLimit(newTask, false);
// check new task again
assertFalse(executionDAO.exceedsInProgressLimit(newTask));
// force remove from task def limit
executionDAO.updateTaskDefLimit(newTask, true);
assertFalse(executionDAO.exceedsInProgressLimit(task));
}
Aggregations