use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createWFWithResponseTimeout.
private void createWFWithResponseTimeout() {
TaskDef task = new TaskDef();
task.setName("task_rt");
task.setTimeoutSeconds(120);
task.setRetryCount(RETRY_COUNT);
task.setRetryDelaySeconds(0);
task.setResponseTimeoutSeconds(10);
metadataService.registerTaskDef(Collections.singletonList(task));
WorkflowDef def = new WorkflowDef();
def.setName("RTOWF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Map<String, Object> outputParameters = new HashMap<>();
outputParameters.put("o1", "${workflow.input.param1}");
outputParameters.put("o2", "${t2.output.uuid}");
outputParameters.put("o3", "${t1.output.op}");
def.setOutputParameters(outputParameters);
def.setFailureWorkflow("$workflow.input.failureWfName");
def.setSchemaVersion(2);
LinkedList<WorkflowTask> wftasks = new LinkedList<>();
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("task_rt");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("task_rt_t1");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "${workflow.input.param1}");
ip2.put("tp2", "${t1.output.op}");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
wftasks.add(wft1);
wftasks.add(wft2);
def.setTasks(wftasks);
metadataService.updateWorkflowDef(def);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class AbstractWorkflowServiceTest method init.
@Before
public void init() {
System.setProperty("EC2_REGION", "us-east-1");
System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c");
if (registered) {
return;
}
WorkflowContext.set(new WorkflowContext("junit_app"));
for (int i = 0; i < 21; i++) {
String name = "junit_task_" + i;
if (notFoundSafeGetTaskDef(name) != null) {
continue;
}
TaskDef task = new TaskDef();
task.setName(name);
task.setTimeoutSeconds(120);
task.setRetryCount(RETRY_COUNT);
metadataService.registerTaskDef(Collections.singletonList(task));
}
for (int i = 0; i < 5; i++) {
String name = "junit_task_0_RT_" + i;
if (notFoundSafeGetTaskDef(name) != null) {
continue;
}
TaskDef task = new TaskDef();
task.setName(name);
task.setTimeoutSeconds(120);
task.setRetryCount(0);
metadataService.registerTaskDef(Collections.singletonList(task));
}
TaskDef task = new TaskDef();
task.setName("short_time_out");
task.setTimeoutSeconds(5);
task.setRetryCount(RETRY_COUNT);
metadataService.registerTaskDef(Collections.singletonList(task));
WorkflowDef def = new WorkflowDef();
def.setName(LINEAR_WORKFLOW_T1_T2);
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Map<String, Object> outputParameters = new HashMap<>();
outputParameters.put("o1", "${workflow.input.param1}");
outputParameters.put("o2", "${t2.output.uuid}");
outputParameters.put("o3", "${t1.output.op}");
def.setOutputParameters(outputParameters);
def.setFailureWorkflow("$workflow.input.failureWfName");
def.setSchemaVersion(2);
LinkedList<WorkflowTask> wftasks = new LinkedList<>();
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("junit_task_1");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("someNullKey", null);
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("t1");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "${workflow.input.param1}");
ip2.put("tp2", "${t1.output.op}");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
wftasks.add(wft1);
wftasks.add(wft2);
def.setTasks(wftasks);
WorkflowTask wft3 = new WorkflowTask();
wft3.setName("junit_task_3");
Map<String, Object> ip3 = new HashMap<>();
ip3.put("tp1", "${workflow.input.param1}");
ip3.put("tp2", "${t1.output.op}");
wft3.setInputParameters(ip3);
wft3.setTaskReferenceName("t3");
WorkflowDef def2 = new WorkflowDef();
def2.setName(TEST_WORKFLOW);
def2.setDescription(def2.getName());
def2.setVersion(1);
def2.setInputParameters(Arrays.asList("param1", "param2"));
LinkedList<WorkflowTask> wftasks2 = new LinkedList<>();
wftasks2.add(wft1);
wftasks2.add(wft2);
wftasks2.add(wft3);
def2.setSchemaVersion(2);
def2.setTasks(wftasks2);
try {
WorkflowDef[] wdsf = new WorkflowDef[] { def, def2 };
for (WorkflowDef wd : wdsf) {
metadataService.updateWorkflowDef(wd);
}
createForkJoinWorkflow();
def.setName(LONG_RUNNING);
metadataService.updateWorkflowDef(def);
} catch (Exception e) {
}
taskDefs = metadataService.getTaskDefs();
registered = true;
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createForkJoinNestedWorkflow.
private void createForkJoinNestedWorkflow(int schemaVersion) {
WorkflowDef def = new WorkflowDef();
def.setName(FORK_JOIN_NESTED_WF);
def.setDescription(def.getName());
def.setVersion(1);
def.setSchemaVersion(schemaVersion);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask[] tasks = new WorkflowTask[21];
Map<String, Object> ip1 = new HashMap<>();
if (schemaVersion <= 1) {
ip1.put("p1", "workflow.input.param1");
ip1.put("p2", "workflow.input.param2");
ip1.put("case", "workflow.input.case");
} else {
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("case", "${workflow.input.case}");
}
for (int i = 10; i < 21; i++) {
WorkflowTask wft = new WorkflowTask();
wft.setName("junit_task_" + i);
wft.setInputParameters(ip1);
wft.setTaskReferenceName("t" + i);
tasks[i] = wft;
}
WorkflowTask d1 = new WorkflowTask();
d1.setType(TaskType.DECISION.name());
d1.setName("Decision");
d1.setTaskReferenceName("d1");
d1.setInputParameters(ip1);
d1.setDefaultCase(Arrays.asList(tasks[18], tasks[20]));
d1.setCaseValueParam("case");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("a", Arrays.asList(tasks[16], tasks[19], tasks[20]));
decisionCases.put("b", Arrays.asList(tasks[17], tasks[20]));
d1.setDecisionCases(decisionCases);
WorkflowTask fork2 = new WorkflowTask();
fork2.setType(TaskType.FORK_JOIN.name());
fork2.setName("fork2");
fork2.setTaskReferenceName("fork2");
fork2.getForkTasks().add(Arrays.asList(tasks[12], tasks[14]));
fork2.getForkTasks().add(Arrays.asList(tasks[13], d1));
WorkflowTask join2 = new WorkflowTask();
join2.setType(TaskType.JOIN.name());
join2.setTaskReferenceName("join2");
join2.setJoinOn(Arrays.asList("t14", "t20"));
WorkflowTask fork1 = new WorkflowTask();
fork1.setType(TaskType.FORK_JOIN.name());
fork1.setTaskReferenceName("fork1");
fork1.getForkTasks().add(Arrays.asList(tasks[11]));
fork1.getForkTasks().add(Arrays.asList(fork2, join2));
WorkflowTask join1 = new WorkflowTask();
join1.setType(TaskType.JOIN.name());
join1.setTaskReferenceName("join1");
join1.setJoinOn(Arrays.asList("t11", "join2"));
def.getTasks().add(fork1);
def.getTasks().add(join1);
def.getTasks().add(tasks[15]);
metadataService.updateWorkflowDef(def);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createDynamicForkJoinWorkflowDefs.
private void createDynamicForkJoinWorkflowDefs() {
WorkflowDef def = new WorkflowDef();
def.setName(DYNAMIC_FORK_JOIN_WF);
def.setDescription(def.getName());
def.setVersion(1);
def.setSchemaVersion(2);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask workflowTask1 = new WorkflowTask();
workflowTask1.setName("junit_task_1");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
workflowTask1.setInputParameters(ip1);
workflowTask1.setTaskReferenceName("dt1");
WorkflowTask fanout = new WorkflowTask();
fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name());
fanout.setTaskReferenceName("dynamicfanouttask");
fanout.setDynamicForkTasksParam("dynamicTasks");
fanout.setDynamicForkTasksInputParamName("dynamicTasksInput");
fanout.getInputParameters().put("dynamicTasks", "${dt1.output.dynamicTasks}");
fanout.getInputParameters().put("dynamicTasksInput", "${dt1.output.dynamicTasksInput}");
WorkflowTask join = new WorkflowTask();
join.setType(TaskType.JOIN.name());
join.setTaskReferenceName("dynamicfanouttask_join");
WorkflowTask workflowTask4 = new WorkflowTask();
workflowTask4.setName("junit_task_4");
workflowTask4.setTaskReferenceName("task4");
def.getTasks().add(workflowTask1);
def.getTasks().add(fanout);
def.getTasks().add(join);
def.getTasks().add(workflowTask4);
metadataMapperService.populateTaskDefinitions(def);
metadataService.updateWorkflowDef(def);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createDoWhileWorkflowWithIteration.
private void createDoWhileWorkflowWithIteration(int iteration, boolean isInputParameter, boolean useTaskDef) {
WorkflowDef workflowDef = new WorkflowDef();
if (isInputParameter) {
workflowDef.setName(DO_WHILE_WF + "_3");
} else {
workflowDef.setName(DO_WHILE_WF + "_" + iteration);
}
workflowDef.setDescription(workflowDef.getName());
workflowDef.setVersion(1);
workflowDef.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask loopTask = new WorkflowTask();
loopTask.setType(TaskType.DO_WHILE.name());
loopTask.setTaskReferenceName("loopTask");
loopTask.setName("loopTask");
loopTask.setWorkflowTaskType(TaskType.DO_WHILE);
Map<String, Object> input = new HashMap<>();
input.put("value", "${workflow.input.loop}");
loopTask.setInputParameters(input);
if (useTaskDef) {
TaskDef taskDef = new TaskDef();
taskDef.setName("loopTask");
taskDef.setTimeoutSeconds(200);
taskDef.setRetryCount(1);
taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY);
taskDef.setRetryDelaySeconds(10);
metadataService.registerTaskDef(Arrays.asList(taskDef));
}
Map<String, Object> inputParams1 = new HashMap<>();
inputParams1.put("p1", "workflow.input.param1");
inputParams1.put("p2", "workflow.input.param2");
WorkflowTask http1 = new WorkflowTask();
http1.setName("http1");
http1.setInputParameters(inputParams1);
http1.setTaskReferenceName("http1");
http1.setWorkflowTaskType(TaskType.HTTP);
WorkflowTask http2 = new WorkflowTask();
http2.setName("http2");
http2.setInputParameters(inputParams1);
http2.setTaskReferenceName("http2");
http2.setWorkflowTaskType(TaskType.HTTP);
WorkflowTask fork = new WorkflowTask();
fork.setName("fork");
fork.setInputParameters(inputParams1);
fork.setTaskReferenceName("fork");
fork.setWorkflowTaskType(TaskType.FORK_JOIN);
fork.setForkTasks(Arrays.asList(Arrays.asList(http1), Arrays.asList(http2)));
WorkflowTask join = new WorkflowTask();
join.setName("join");
join.setInputParameters(inputParams1);
join.setTaskReferenceName("join");
join.setWorkflowTaskType(TaskType.JOIN);
WorkflowTask http0 = new WorkflowTask();
http0.setName("http0");
http0.setInputParameters(inputParams1);
http0.setTaskReferenceName("http0");
http0.setWorkflowTaskType(TaskType.HTTP);
loopTask.getLoopOver().add(http0);
loopTask.getLoopOver().add(fork);
loopTask.getLoopOver().add(join);
if (isInputParameter) {
loopTask.setLoopCondition("if ($.loopTask['iteration'] < $.value) { true; } else { false; }");
} else {
loopTask.setLoopCondition("if ($.loopTask['iteration'] < " + iteration + " ) { true;} else {false;} ");
}
workflowDef.getTasks().add(loopTask);
if (iteration == 2 && isInputParameter == false) {
if (useTaskDef) {
TaskDef taskDef2 = new TaskDef();
taskDef2.setName("loopTask2");
taskDef2.setTimeoutSeconds(200);
taskDef2.setRetryCount(3);
taskDef2.setTimeoutPolicy(TimeoutPolicy.RETRY);
taskDef2.setRetryDelaySeconds(10);
metadataService.registerTaskDef(Arrays.asList(taskDef2));
}
WorkflowTask loopTask2 = new WorkflowTask();
loopTask2.setType(TaskType.DO_WHILE.name());
loopTask2.setTaskReferenceName("loopTask2");
loopTask2.setName("loopTask2");
loopTask2.setWorkflowTaskType(TaskType.DO_WHILE);
loopTask2.setInputParameters(input);
WorkflowTask http3 = new WorkflowTask();
http3.setName("http3");
http3.setInputParameters(inputParams1);
http3.setTaskReferenceName("http3");
http3.setWorkflowTaskType(TaskType.HTTP);
loopTask2.setLoopCondition("if ($.loopTask2['iteration'] < 1) { true; } else { false; }");
loopTask2.getLoopOver().add(http3);
workflowDef.getTasks().add(loopTask2);
}
metadataService.registerWorkflowDef(workflowDef);
}
Aggregations