use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class ForkJoinDynamicTaskMapperTest method getMappedTasksException.
@Test
public void getMappedTasksException() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Workflow workflowInstance = new Workflow();
workflowInstance.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(Workflow.class), any(), any())).thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class))).thenReturn(Arrays.asList(wt2, wt3));
Task simpleTask1 = new Task();
simpleTask1.setReferenceTaskName("xdt1");
Task simpleTask2 = new Task();
simpleTask2.setReferenceTaskName("xdt2");
when(deciderService.getTasksToBeScheduled(workflowInstance, wt2, 0)).thenReturn(Arrays.asList(simpleTask1));
when(deciderService.getTasksToBeScheduled(workflowInstance, wt3, 0)).thenReturn(Arrays.asList(simpleTask2));
String taskId = IDGenerator.generate();
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(def).withWorkflowInstance(workflowInstance).withTaskToSchedule(dynamicForkJoinToSchedule).withRetryCount(0).withTaskId(taskId).withDeciderService(deciderService).build();
// then
expectedException.expect(TerminateWorkflowException.class);
forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class ForkJoinDynamicTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() {
WorkflowDef def = new WorkflowDef();
def.setName("DYNAMIC_FORK_JOIN_WF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Workflow workflowInstance = new Workflow();
workflowInstance.setWorkflowDefinition(def);
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamictask_join");
def.getTasks().add(dynamicForkJoinToSchedule);
def.getTasks().add(join);
Map<String, Object> input1 = new HashMap<>();
input1.put("k1", "v1");
WorkflowTask wt2 = new WorkflowTask();
wt2.setName("junit_task_2");
wt2.setTaskReferenceName("xdt1");
Map<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
WorkflowTask wt3 = new WorkflowTask();
wt3.setName("junit_task_3");
wt3.setTaskReferenceName("xdt2");
HashMap<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("xdt1", input1);
dynamicTasksInput.put("xdt2", input2);
dynamicTasksInput.put("dynamicTasks", Arrays.asList(wt2, wt3));
dynamicTasksInput.put("dynamicTasksInput", dynamicTasksInput);
// when
when(parametersUtils.getTaskInput(anyMap(), any(Workflow.class), any(), any())).thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(TypeReference.class))).thenReturn(Arrays.asList(wt2, wt3));
Task simpleTask1 = new Task();
simpleTask1.setReferenceTaskName("xdt1");
Task simpleTask2 = new Task();
simpleTask2.setReferenceTaskName("xdt2");
when(deciderService.getTasksToBeScheduled(workflowInstance, wt2, 0)).thenReturn(Arrays.asList(simpleTask1));
when(deciderService.getTasksToBeScheduled(workflowInstance, wt3, 0)).thenReturn(Arrays.asList(simpleTask2));
String taskId = IDGenerator.generate();
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(def).withWorkflowInstance(workflowInstance).withTaskToSchedule(dynamicForkJoinToSchedule).withRetryCount(0).withTaskId(taskId).withDeciderService(deciderService).build();
// then
List<Task> mappedTasks = forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext);
assertEquals(4, mappedTasks.size());
assertEquals(SystemTaskType.FORK.name(), mappedTasks.get(0).getTaskType());
assertEquals(SystemTaskType.JOIN.name(), mappedTasks.get(3).getTaskType());
List<String> joinTaskNames = (List<String>) mappedTasks.get(3).getInputData().get("joinOn");
assertEquals("xdt1, xdt2", joinTaskNames.stream().collect(Collectors.joining(", ")));
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class HTTPTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() {
// Given
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("http_task");
taskToSchedule.setType(TaskType.HTTP.name());
taskToSchedule.setTaskDefinition(new TaskDef("http_task"));
String taskId = IDGenerator.generate();
String retriedTaskId = IDGenerator.generate();
Workflow workflow = new Workflow();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(workflowDef).withWorkflowInstance(workflow).withTaskDefinition(new TaskDef()).withTaskToSchedule(taskToSchedule).withTaskInput(new HashMap<>()).withRetryCount(0).withRetryTaskId(retriedTaskId).withTaskId(taskId).build();
// when
List<Task> mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType());
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class TestWorkflowExecutor method testTerminateWorkflow.
@Test
@SuppressWarnings("unchecked")
public void testTerminateWorkflow() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
Workflow workflow = new Workflow();
workflow.setWorkflowDefinition(def);
workflow.setWorkflowId("1");
workflow.setStatus(Workflow.WorkflowStatus.RUNNING);
workflow.setOwnerApp("junit_test");
workflow.setStartTime(10L);
workflow.setEndTime(100L);
workflow.setOutput(Collections.EMPTY_MAP);
when(executionDAOFacade.getWorkflowById(anyString(), anyBoolean())).thenReturn(workflow);
AtomicInteger updateWorkflowCalledCounter = new AtomicInteger(0);
doAnswer(invocation -> {
updateWorkflowCalledCounter.incrementAndGet();
return null;
}).when(executionDAOFacade).updateWorkflow(any());
AtomicInteger updateTasksCalledCounter = new AtomicInteger(0);
doAnswer(invocation -> {
updateTasksCalledCounter.incrementAndGet();
return null;
}).when(executionDAOFacade).updateTasks(any());
AtomicInteger removeQueueEntryCalledCounter = new AtomicInteger(0);
doAnswer(invocation -> {
removeQueueEntryCalledCounter.incrementAndGet();
return null;
}).when(queueDAO).remove(anyString(), anyString());
workflowExecutor.terminateWorkflow("workflowId", "reason");
assertEquals(Workflow.WorkflowStatus.TERMINATED, workflow.getStatus());
assertEquals(1, updateWorkflowCalledCounter.get());
assertEquals(1, removeQueueEntryCalledCounter.get());
verify(workflowStatusListener, times(1)).onWorkflowTerminatedIfEnabled(any(Workflow.class));
verify(workflowStatusListener, times(1)).onWorkflowFinalizedIfEnabled(any(Workflow.class));
def.setWorkflowStatusListenerEnabled(true);
workflow.setStatus(Workflow.WorkflowStatus.RUNNING);
workflowExecutor.completeWorkflow(workflow);
verify(workflowStatusListener, times(1)).onWorkflowCompletedIfEnabled(any(Workflow.class));
verify(workflowStatusListener, times(1)).onWorkflowFinalizedIfEnabled(any(Workflow.class));
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class TestWorkflowExecutor method testRetryNonTerminalWorkflow.
@Test(expected = ApplicationException.class)
public void testRetryNonTerminalWorkflow() {
Workflow workflow = new Workflow();
workflow.setWorkflowId("testRetryNonTerminalWorkflow");
workflow.setStatus(Workflow.WorkflowStatus.RUNNING);
when(executionDAOFacade.getWorkflowById(anyString(), anyBoolean())).thenReturn(workflow);
workflowExecutor.retry(workflow.getWorkflowId(), false);
}
Aggregations