use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testTaskDefTemplate.
@Test
public void testTaskDefTemplate() throws Exception {
System.setProperty("STACK2", "test_stack");
TaskDef templatedTask = new TaskDef();
templatedTask.setName("templated_task");
Map<String, Object> httpRequest = new HashMap<>();
httpRequest.put("method", "GET");
httpRequest.put("vipStack", "${STACK2}");
httpRequest.put("uri", "/get/something");
Map<String, Object> body = new HashMap<>();
body.put("inputPaths", Arrays.asList("${workflow.input.path1}", "${workflow.input.path2}"));
body.put("requestDetails", "${workflow.input.requestDetails}");
body.put("outputPath", "${workflow.input.outputPath}");
httpRequest.put("body", body);
templatedTask.getInputTemplate().put("http_request", httpRequest);
metadataService.registerTaskDef(Arrays.asList(templatedTask));
WorkflowDef templateWf = new WorkflowDef();
templateWf.setName("template_workflow");
WorkflowTask wft = new WorkflowTask();
wft.setName(templatedTask.getName());
wft.setWorkflowTaskType(TaskType.SIMPLE);
wft.setTaskReferenceName("t0");
templateWf.getTasks().add(wft);
templateWf.setSchemaVersion(2);
metadataService.registerWorkflowDef(templateWf);
Map<String, Object> requestDetails = new HashMap<>();
requestDetails.put("key1", "value1");
requestDetails.put("key2", 42);
Map<String, Object> input = new HashMap<>();
input.put("path1", "file://path1");
input.put("path2", "file://path2");
input.put("outputPath", "s3://bucket/outputPath");
input.put("requestDetails", requestDetails);
String id = startOrLoadWorkflowExecution(templateWf.getName(), 1, "testTaskDefTemplate", input, null, null);
assertNotNull(id);
Workflow workflow = workflowExecutionService.getExecutionStatus(id, true);
assertNotNull(workflow);
assertTrue(workflow.getReasonForIncompletion(), !workflow.getStatus().isTerminal());
assertEquals(1, workflow.getTasks().size());
Task task = workflow.getTasks().get(0);
Map<String, Object> taskInput = task.getInputData();
assertNotNull(taskInput);
assertTrue(taskInput.containsKey("http_request"));
assertTrue(taskInput.get("http_request") instanceof Map);
// Use the commented sysout to get the string value
// System.out.println(om.writeValueAsString(om.writeValueAsString(taskInput)));
String expected = "{\"http_request\":{\"method\":\"GET\",\"vipStack\":\"test_stack\",\"body\":{\"requestDetails\":{\"key1\":\"value1\",\"key2\":42},\"outputPath\":\"s3://bucket/outputPath\",\"inputPaths\":[\"file://path1\",\"file://path2\"]},\"uri\":\"/get/something\"}}";
assertEquals(expected, objectMapper.writeValueAsString(taskInput));
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createWFWithResponseTimeout.
private void createWFWithResponseTimeout() {
TaskDef task = new TaskDef();
task.setName("task_rt");
task.setTimeoutSeconds(120);
task.setRetryCount(RETRY_COUNT);
task.setRetryDelaySeconds(0);
task.setResponseTimeoutSeconds(10);
metadataService.registerTaskDef(Collections.singletonList(task));
WorkflowDef def = new WorkflowDef();
def.setName("RTOWF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Map<String, Object> outputParameters = new HashMap<>();
outputParameters.put("o1", "${workflow.input.param1}");
outputParameters.put("o2", "${t2.output.uuid}");
outputParameters.put("o3", "${t1.output.op}");
def.setOutputParameters(outputParameters);
def.setFailureWorkflow("$workflow.input.failureWfName");
def.setSchemaVersion(2);
LinkedList<WorkflowTask> wftasks = new LinkedList<>();
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("task_rt");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("task_rt_t1");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "${workflow.input.param1}");
ip2.put("tp2", "${t1.output.op}");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
wftasks.add(wft1);
wftasks.add(wft2);
def.setTasks(wftasks);
metadataService.updateWorkflowDef(def);
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method init.
@Before
public void init() {
System.setProperty("EC2_REGION", "us-east-1");
System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c");
if (registered) {
return;
}
WorkflowContext.set(new WorkflowContext("junit_app"));
for (int i = 0; i < 21; i++) {
String name = "junit_task_" + i;
if (notFoundSafeGetTaskDef(name) != null) {
continue;
}
TaskDef task = new TaskDef();
task.setName(name);
task.setTimeoutSeconds(120);
task.setRetryCount(RETRY_COUNT);
metadataService.registerTaskDef(Collections.singletonList(task));
}
for (int i = 0; i < 5; i++) {
String name = "junit_task_0_RT_" + i;
if (notFoundSafeGetTaskDef(name) != null) {
continue;
}
TaskDef task = new TaskDef();
task.setName(name);
task.setTimeoutSeconds(120);
task.setRetryCount(0);
metadataService.registerTaskDef(Collections.singletonList(task));
}
TaskDef task = new TaskDef();
task.setName("short_time_out");
task.setTimeoutSeconds(5);
task.setRetryCount(RETRY_COUNT);
metadataService.registerTaskDef(Collections.singletonList(task));
WorkflowDef def = new WorkflowDef();
def.setName(LINEAR_WORKFLOW_T1_T2);
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Map<String, Object> outputParameters = new HashMap<>();
outputParameters.put("o1", "${workflow.input.param1}");
outputParameters.put("o2", "${t2.output.uuid}");
outputParameters.put("o3", "${t1.output.op}");
def.setOutputParameters(outputParameters);
def.setFailureWorkflow("$workflow.input.failureWfName");
def.setSchemaVersion(2);
LinkedList<WorkflowTask> wftasks = new LinkedList<>();
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("junit_task_1");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("someNullKey", null);
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("t1");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "${workflow.input.param1}");
ip2.put("tp2", "${t1.output.op}");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
wftasks.add(wft1);
wftasks.add(wft2);
def.setTasks(wftasks);
WorkflowTask wft3 = new WorkflowTask();
wft3.setName("junit_task_3");
Map<String, Object> ip3 = new HashMap<>();
ip3.put("tp1", "${workflow.input.param1}");
ip3.put("tp2", "${t1.output.op}");
wft3.setInputParameters(ip3);
wft3.setTaskReferenceName("t3");
WorkflowDef def2 = new WorkflowDef();
def2.setName(TEST_WORKFLOW);
def2.setDescription(def2.getName());
def2.setVersion(1);
def2.setInputParameters(Arrays.asList("param1", "param2"));
LinkedList<WorkflowTask> wftasks2 = new LinkedList<>();
wftasks2.add(wft1);
wftasks2.add(wft2);
wftasks2.add(wft3);
def2.setSchemaVersion(2);
def2.setTasks(wftasks2);
try {
WorkflowDef[] wdsf = new WorkflowDef[] { def, def2 };
for (WorkflowDef wd : wdsf) {
metadataService.updateWorkflowDef(wd);
}
createForkJoinWorkflow();
def.setName(LONG_RUNNING);
metadataService.updateWorkflowDef(def);
} catch (Exception e) {
}
taskDefs = metadataService.getTaskDefs();
registered = true;
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testTaskTimeout.
@Test
public void testTaskTimeout() throws Exception {
String taskName = "junit_task_1";
TaskDef taskDef = notFoundSafeGetTaskDef(taskName);
taskDef.setRetryCount(1);
taskDef.setTimeoutSeconds(1);
taskDef.setRetryDelaySeconds(0);
taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY);
metadataService.updateTaskDef(taskDef);
WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1);
assertNotNull(found.getFailureWorkflow());
assertFalse(StringUtils.isBlank(found.getFailureWorkflow()));
String correlationId = "unit_test_1" + UUID.randomUUID().toString();
Map<String, Object> input = new HashMap<>();
input.put("param1", "p1 value");
input.put("param2", "p2 value");
input.put("failureWfName", "FanInOutTest");
String workflowId = startOrLoadWorkflowExecution("timeout", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null);
assertNotNull(workflowId);
// Ensure that we have a workflow queued up for evaluation here...
long size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE);
assertEquals(1, size);
// If we get the full workflow here then, last task should be completed and the next task should be scheduled
Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals(RUNNING, workflow.getStatus());
assertEquals("found: " + workflow.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 1, workflow.getTasks().size());
Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker");
assertNotNull(task);
assertEquals(workflowId, task.getWorkflowInstanceId());
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
// Ensure that we have a workflow queued up for evaluation here...
size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE);
assertEquals(1, size);
Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS);
workflowSweeper.sweep(Collections.singletonList(workflowId), workflowExecutor, workflowRepairService);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals("found: " + workflow.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 2, workflow.getTasks().size());
Task task1 = workflow.getTasks().get(0);
assertEquals(TIMED_OUT, task1.getStatus());
Task task2 = workflow.getTasks().get(1);
assertEquals(SCHEDULED, task2.getStatus());
task = workflowExecutionService.poll(task2.getTaskDefName(), "task1.junit.worker");
assertNotNull(task);
assertEquals(workflowId, task.getWorkflowInstanceId());
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS);
workflowExecutor.decide(workflowId);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals(2, workflow.getTasks().size());
assertEquals(TIMED_OUT, workflow.getTasks().get(0).getStatus());
assertEquals(TIMED_OUT, workflow.getTasks().get(1).getStatus());
assertEquals(WorkflowStatus.TIMED_OUT, workflow.getStatus());
assertEquals(1, queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE));
taskDef.setTimeoutSeconds(0);
taskDef.setRetryCount(RETRY_COUNT);
metadataService.updateTaskDef(taskDef);
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createDoWhileWorkflowWithIteration.
private void createDoWhileWorkflowWithIteration(int iteration, boolean isInputParameter, boolean useTaskDef) {
WorkflowDef workflowDef = new WorkflowDef();
if (isInputParameter) {
workflowDef.setName(DO_WHILE_WF + "_3");
} else {
workflowDef.setName(DO_WHILE_WF + "_" + iteration);
}
workflowDef.setDescription(workflowDef.getName());
workflowDef.setVersion(1);
workflowDef.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask loopTask = new WorkflowTask();
loopTask.setType(TaskType.DO_WHILE.name());
loopTask.setTaskReferenceName("loopTask");
loopTask.setName("loopTask");
loopTask.setWorkflowTaskType(TaskType.DO_WHILE);
Map<String, Object> input = new HashMap<>();
input.put("value", "${workflow.input.loop}");
loopTask.setInputParameters(input);
if (useTaskDef) {
TaskDef taskDef = new TaskDef();
taskDef.setName("loopTask");
taskDef.setTimeoutSeconds(200);
taskDef.setRetryCount(1);
taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY);
taskDef.setRetryDelaySeconds(10);
metadataService.registerTaskDef(Arrays.asList(taskDef));
}
Map<String, Object> inputParams1 = new HashMap<>();
inputParams1.put("p1", "workflow.input.param1");
inputParams1.put("p2", "workflow.input.param2");
WorkflowTask http1 = new WorkflowTask();
http1.setName("http1");
http1.setInputParameters(inputParams1);
http1.setTaskReferenceName("http1");
http1.setWorkflowTaskType(TaskType.HTTP);
WorkflowTask http2 = new WorkflowTask();
http2.setName("http2");
http2.setInputParameters(inputParams1);
http2.setTaskReferenceName("http2");
http2.setWorkflowTaskType(TaskType.HTTP);
WorkflowTask fork = new WorkflowTask();
fork.setName("fork");
fork.setInputParameters(inputParams1);
fork.setTaskReferenceName("fork");
fork.setWorkflowTaskType(TaskType.FORK_JOIN);
fork.setForkTasks(Arrays.asList(Arrays.asList(http1), Arrays.asList(http2)));
WorkflowTask join = new WorkflowTask();
join.setName("join");
join.setInputParameters(inputParams1);
join.setTaskReferenceName("join");
join.setWorkflowTaskType(TaskType.JOIN);
WorkflowTask http0 = new WorkflowTask();
http0.setName("http0");
http0.setInputParameters(inputParams1);
http0.setTaskReferenceName("http0");
http0.setWorkflowTaskType(TaskType.HTTP);
loopTask.getLoopOver().add(http0);
loopTask.getLoopOver().add(fork);
loopTask.getLoopOver().add(join);
if (isInputParameter) {
loopTask.setLoopCondition("if ($.loopTask['iteration'] < $.value) { true; } else { false; }");
} else {
loopTask.setLoopCondition("if ($.loopTask['iteration'] < " + iteration + " ) { true;} else {false;} ");
}
workflowDef.getTasks().add(loopTask);
if (iteration == 2 && isInputParameter == false) {
if (useTaskDef) {
TaskDef taskDef2 = new TaskDef();
taskDef2.setName("loopTask2");
taskDef2.setTimeoutSeconds(200);
taskDef2.setRetryCount(3);
taskDef2.setTimeoutPolicy(TimeoutPolicy.RETRY);
taskDef2.setRetryDelaySeconds(10);
metadataService.registerTaskDef(Arrays.asList(taskDef2));
}
WorkflowTask loopTask2 = new WorkflowTask();
loopTask2.setType(TaskType.DO_WHILE.name());
loopTask2.setTaskReferenceName("loopTask2");
loopTask2.setName("loopTask2");
loopTask2.setWorkflowTaskType(TaskType.DO_WHILE);
loopTask2.setInputParameters(input);
WorkflowTask http3 = new WorkflowTask();
http3.setName("http3");
http3.setInputParameters(inputParams1);
http3.setTaskReferenceName("http3");
http3.setWorkflowTaskType(TaskType.HTTP);
loopTask2.setLoopCondition("if ($.loopTask2['iteration'] < 1) { true; } else { false; }");
loopTask2.getLoopOver().add(http3);
workflowDef.getTasks().add(loopTask2);
}
metadataService.registerWorkflowDef(workflowDef);
}
Aggregations