use of com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList in project conductor by Netflix.
the class AbstractProtoMapper method fromProto.
public DynamicForkJoinTaskList fromProto(DynamicForkJoinTaskListPb.DynamicForkJoinTaskList from) {
DynamicForkJoinTaskList to = new DynamicForkJoinTaskList();
to.setDynamicTasks(from.getDynamicTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)));
return to;
}
use of com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testDynamicForkJoinLegacy.
@SuppressWarnings("unchecked")
@Test
public void testDynamicForkJoinLegacy() {
try {
createDynamicForkJoinWorkflowDefsLegacy(1);
} catch (Exception e) {
}
Map<String, Object> input = new HashMap<String, Object>();
String wfid = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF_LEGACY, 1, "dynfanouttest1", input, null, null);
System.out.println("testDynamicForkJoinLegacy.wfid=" + wfid);
Task t1 = workflowExecutionService.poll("junit_task_1", "test");
// assertTrue(ess.ackTaskRecieved(t1.getTaskId(), "test"));
DynamicForkJoinTaskList dynamicForkJoinTasks = new DynamicForkJoinTaskList();
input = new HashMap<String, Object>();
input.put("k1", "v1");
dynamicForkJoinTasks.add("junit_task_2", null, "xdt1", input);
HashMap<String, Object> input2 = new HashMap<String, Object>();
input2.put("k2", "v2");
dynamicForkJoinTasks.add("junit_task_3", null, "xdt2", input2);
t1.getOutputData().put("dynamicTasks", dynamicForkJoinTasks);
t1.setStatus(COMPLETED);
workflowExecutionService.updateTask(t1);
Task t2 = workflowExecutionService.poll("junit_task_2", "test");
assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId()));
assertEquals("xdt1", t2.getReferenceTaskName());
assertTrue(t2.getInputData().containsKey("k1"));
assertEquals("v1", t2.getInputData().get("k1"));
Map<String, Object> output = new HashMap<String, Object>();
output.put("ok1", "ov1");
t2.setOutputData(output);
t2.setStatus(COMPLETED);
workflowExecutionService.updateTask(t2);
Task t3 = workflowExecutionService.poll("junit_task_3", "test");
assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId()));
assertEquals("xdt2", t3.getReferenceTaskName());
assertTrue(t3.getInputData().containsKey("k2"));
assertEquals("v2", t3.getInputData().get("k2"));
output = new HashMap<>();
output.put("ok1", "ov1");
t3.setOutputData(output);
t3.setStatus(COMPLETED);
workflowExecutionService.updateTask(t3);
Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true);
assertNotNull(wf);
assertEquals(WorkflowStatus.COMPLETED, wf.getStatus());
// Check the output
Task joinTask = wf.getTaskByRefName("dynamicfanouttask_join");
assertEquals("Found:" + joinTask.getOutputData(), 2, joinTask.getOutputData().keySet().size());
Set<String> joinTaskOutput = joinTask.getOutputData().keySet();
System.out.println("joinTaskOutput=" + joinTaskOutput);
for (String key : joinTask.getOutputData().keySet()) {
assertTrue(key.equals("xdt1") || key.equals("xdt2"));
assertEquals("ov1", ((Map<String, Object>) joinTask.getOutputData().get(key)).get("ok1"));
}
}
use of com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList in project conductor by Netflix.
the class ForkJoinDynamicTaskMapperTest method getDynamicForkJoinTasksAndInputException.
@Test
public void getDynamicForkJoinTasksAndInputException() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList();
Map<String, Object> input = new HashMap<>();
input.put("k1", "v1");
dtasks.add("junit_task_2", null, "xdt1", input);
HashMap<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
dtasks.add("junit_task_3", null, "xdt2", input2);
Map<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("dynamicTasks", dtasks);
// when
when(parametersUtils.getTaskInput(anyMap(), any(Workflow.class), any(TaskDef.class), anyString())).thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(Class.class))).thenReturn(null);
// then
expectedException.expect(TerminateWorkflowException.class);
forkJoinDynamicTaskMapper.getDynamicForkJoinTasksAndInput(dynamicForkJoinToSchedule, new Workflow());
}
use of com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList in project conductor by Netflix.
the class ForkJoinDynamicTaskMapperTest method getDynamicForkJoinTasksAndInput.
@Test
public void getDynamicForkJoinTasksAndInput() {
// Given
WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask();
dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name());
dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask");
dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks");
dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput");
DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList();
Map<String, Object> input = new HashMap<>();
input.put("k1", "v1");
dtasks.add("junit_task_2", null, "xdt1", input);
HashMap<String, Object> input2 = new HashMap<>();
input2.put("k2", "v2");
dtasks.add("junit_task_3", null, "xdt2", input2);
Map<String, Object> dynamicTasksInput = new HashMap<>();
dynamicTasksInput.put("dynamicTasks", dtasks);
// when
when(parametersUtils.getTaskInput(anyMap(), any(Workflow.class), any(TaskDef.class), anyString())).thenReturn(dynamicTasksInput);
when(objectMapper.convertValue(any(), any(Class.class))).thenReturn(dtasks);
Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> dynamicForkJoinTasksAndInput = forkJoinDynamicTaskMapper.getDynamicForkJoinTasksAndInput(dynamicForkJoinToSchedule, new Workflow());
// then
assertNotNull(dynamicForkJoinTasksAndInput.getLeft());
assertEquals(2, dynamicForkJoinTasksAndInput.getLeft().size());
assertEquals(2, dynamicForkJoinTasksAndInput.getRight().size());
}
use of com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList in project conductor by Netflix.
the class ForkJoinDynamicTaskMapper method getDynamicForkJoinTasksAndInput.
/**
* This method is used to get the List of dynamic workflow tasks and their input based on the {@link WorkflowTask#getDynamicForkJoinTasksParam()}
* <p><b>NOTE:</b> This method is kept for legacy reasons, new workflows should use the {@link #getDynamicForkTasksAndInput}</p>
*
* @param taskToSchedule: The Task of type FORK_JOIN_DYNAMIC that needs to scheduled, which has the input parameters
* @param workflowInstance: The instance of the {@link Workflow} which represents the workflow being executed.
* @return {@link Pair} representing the list of dynamic fork tasks in {@link Pair#getLeft()} and the input for the dynamic fork tasks in {@link Pair#getRight()}
* @throws TerminateWorkflowException : In case of the {@link WorkflowTask#getInputParameters()} does not have a payload that contains the list of the dynamic tasks
*/
@VisibleForTesting
Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> getDynamicForkJoinTasksAndInput(WorkflowTask taskToSchedule, Workflow workflowInstance) throws TerminateWorkflowException {
String dynamicForkJoinTaskParam = taskToSchedule.getDynamicForkJoinTasksParam();
Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, null, null);
Object paramValue = input.get(dynamicForkJoinTaskParam);
DynamicForkJoinTaskList dynamicForkJoinTaskList = objectMapper.convertValue(paramValue, DynamicForkJoinTaskList.class);
if (dynamicForkJoinTaskList == null) {
String reason = String.format("Dynamic tasks could not be created. The value of %s from task's input %s has no dynamic tasks to be scheduled", dynamicForkJoinTaskParam, input);
logger.error(reason);
throw new TerminateWorkflowException(reason);
}
Map<String, Map<String, Object>> dynamicForkJoinTasksInput = new HashMap<>();
List<WorkflowTask> dynamicForkJoinWorkflowTasks = dynamicForkJoinTaskList.getDynamicTasks().stream().peek(// TODO create a custom pair collector
dynamicForkJoinTask -> dynamicForkJoinTasksInput.put(dynamicForkJoinTask.getReferenceName(), dynamicForkJoinTask.getInput())).map(dynamicForkJoinTask -> {
WorkflowTask dynamicForkJoinWorkflowTask = new WorkflowTask();
dynamicForkJoinWorkflowTask.setTaskReferenceName(dynamicForkJoinTask.getReferenceName());
dynamicForkJoinWorkflowTask.setName(dynamicForkJoinTask.getTaskName());
dynamicForkJoinWorkflowTask.setType(dynamicForkJoinTask.getType());
if (dynamicForkJoinWorkflowTask.getTaskDefinition() == null && StringUtils.isNotBlank(dynamicForkJoinWorkflowTask.getName())) {
dynamicForkJoinWorkflowTask.setTaskDefinition(metadataDAO.getTaskDef(dynamicForkJoinTask.getTaskName()));
}
return dynamicForkJoinWorkflowTask;
}).collect(Collectors.toCollection(LinkedList::new));
return new ImmutablePair<>(dynamicForkJoinWorkflowTasks, dynamicForkJoinTasksInput);
}
Aggregations