use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testRateLimiting.
@Test
public void testRateLimiting() {
// Create a dynamic workflow definition with one simple task
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName("test_concurrency_limits");
workflowDef.setVersion(1);
TaskDef taskDef = new TaskDef();
taskDef.setName("test_task_with_ratelimits");
taskDef.setRateLimitFrequencyInSeconds(600);
taskDef.setRateLimitPerFrequency(1);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskReferenceName("test_task_with_ratelimits");
workflowTask.setName("test_task_with_ratelimits");
workflowTask.setType(UserTask.NAME);
workflowTask.setTaskDefinition(taskDef);
Map<String, Object> userIP = new HashMap<>();
workflowDef.setTasks(Arrays.asList(workflowTask));
String workflowInstanceId1 = workflowExecutor.startWorkflow(workflowDef, new HashMap<>(), "", "", 0, "", "", "", new HashMap<>());
assertNotNull(workflowInstanceId1);
Workflow workflow1 = workflowExecutionService.getExecutionStatus(workflowInstanceId1, true);
assertNotNull(workflow1);
assertEquals(RUNNING, workflow1.getStatus());
// The very first task is the one that should be scheduled.
assertEquals(1, workflow1.getTasks().size());
UserTask userTask = new UserTask();
Task task = workflow1.getTasks().get(0);
workflowExecutor.executeSystemTask(userTask, task.getTaskId(), 30);
workflow1 = workflowExecutionService.getExecutionStatus(workflowInstanceId1, true);
String workflowInstanceId2 = workflowExecutor.startWorkflow(workflowDef, new HashMap<>(), "", "", 0, "", "", "", new HashMap<>());
assertNotNull(workflowInstanceId2);
Workflow workflow2 = workflowExecutionService.getExecutionStatus(workflowInstanceId2, true);
assertNotNull(workflow2);
assertEquals(RUNNING, workflow2.getStatus());
// The very first task is the one that should be scheduled.
assertEquals(1, workflow2.getTasks().size());
// Try to execute second task
Task task2 = workflow2.getTasks().get(0);
workflowExecutor.executeSystemTask(userTask, task2.getTaskId(), 30);
workflow2 = workflowExecutionService.getExecutionStatus(workflowInstanceId2, true);
task2 = workflow2.getTasks().get(0);
assertEquals(SCHEDULED, task2.getStatus());
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testDoWhileTwoIteration.
@Test
public void testDoWhileTwoIteration() throws Exception {
try {
createDoWhileWorkflowWithIteration(2, false, true);
} catch (Exception e) {
}
TaskDef taskDef = new TaskDef();
taskDef.setName("http1");
taskDef.setTimeoutSeconds(5);
taskDef.setRetryCount(1);
taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY);
taskDef.setRetryDelaySeconds(10);
metadataService.registerTaskDef(Arrays.asList(taskDef));
TaskDef taskDef2 = new TaskDef();
taskDef2.setName("http0");
taskDef2.setTimeoutSeconds(5);
taskDef2.setRetryCount(1);
taskDef2.setTimeoutPolicy(TimeoutPolicy.RETRY);
taskDef2.setRetryDelaySeconds(10);
metadataService.registerTaskDef(Arrays.asList(taskDef2));
TaskDef taskDef1 = new TaskDef();
taskDef1.setName("http2");
taskDef1.setTimeoutSeconds(5);
taskDef1.setRetryCount(1);
taskDef1.setTimeoutPolicy(TimeoutPolicy.RETRY);
taskDef1.setRetryDelaySeconds(10);
metadataService.registerTaskDef(Arrays.asList(taskDef1));
TaskDef taskDef3 = new TaskDef();
taskDef1.setName("http3");
taskDef1.setTimeoutSeconds(5);
taskDef1.setRetryCount(1);
taskDef1.setTimeoutPolicy(TimeoutPolicy.RETRY);
taskDef1.setRetryDelaySeconds(10);
metadataService.registerTaskDef(Arrays.asList(taskDef3));
Map<String, Object> input = new HashMap<>();
String workflowId = startOrLoadWorkflowExecution(DO_WHILE_WF + "_2", 1, "looptest", input, null, null);
System.out.println("testDoWhile.wfid=" + workflowId);
printTaskStatuses(workflowId, "initiated");
Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus());
Task task = workflowExecutionService.poll("HTTP", "test");
assertNotNull(task);
assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration())));
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
task = workflowExecutionService.poll("FORK_JOIN", "test");
// fork task is completed
assertNull(task);
task = workflowExecutionService.poll("HTTP", "test");
assertNotNull(task);
assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration())));
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
task = workflowExecutionService.poll("HTTP", "test");
assertNotNull(task);
assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration())));
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
task = workflowExecutionService.poll("JOIN", "test");
// Both HTTP task completed.
assertNull(task);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus());
task = workflowExecutionService.poll("HTTP", "test");
assertNotNull(task);
assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration())));
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
task = workflowExecutionService.poll("FORK_JOIN", "test");
// fork task is completed.
assertNull(task);
task = workflowExecutionService.poll("HTTP", "test");
assertNotNull(task);
assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration())));
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
task = workflowExecutionService.poll("HTTP", "test");
assertNotNull(task);
assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration())));
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
task = workflowExecutionService.poll("JOIN", "test");
assertNull(task);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus());
task = workflowExecutionService.poll("HTTP", "test");
assertNotNull(task);
assertTrue(task.getReferenceTaskName().endsWith(TaskUtils.getLoopOverTaskRefNameSuffix(task.getIteration())));
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals("Found " + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus());
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class WorkflowStatusPublisherIntegrationTest method setUp.
@Before
public void setUp() {
TaskDef task = new TaskDef();
task.setName("junit_task_1");
task.setTimeoutSeconds(120);
task.setRetryCount(1);
metadataService.registerTaskDef(Collections.singletonList(task));
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testSubWorkflowRetry.
@Test
public void testSubWorkflowRetry() {
String taskName = "junit_task_1";
TaskDef taskDef = notFoundSafeGetTaskDef(taskName);
int retryCount = notFoundSafeGetTaskDef(taskName).getRetryCount();
taskDef.setRetryCount(0);
metadataService.updateTaskDef(taskDef);
// create a workflow with sub-workflow
createSubWorkflow();
WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1);
WorkflowTask workflowTask = found.getTasks().stream().filter(t -> t.getType().equals(SUB_WORKFLOW.name())).findAny().orElse(null);
// Set subworkflow task retry count to 1.
TaskDef subWorkflowTaskDef = new TaskDef();
subWorkflowTaskDef.setRetryCount(1);
subWorkflowTaskDef.setName("test_subworkflow_task");
subWorkflowTaskDef.setOwnerEmail("test@qbc.com");
workflowTask.setTaskDefinition(subWorkflowTaskDef);
metadataService.updateWorkflowDef(found);
// start the workflow
Map<String, Object> workflowInputParams = new HashMap<>();
workflowInputParams.put("param1", "param 1");
workflowInputParams.put("param3", "param 2");
workflowInputParams.put("wfName", LINEAR_WORKFLOW_T1_T2);
String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", workflowInputParams, null, null);
assertNotNull(workflowId);
Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
// poll and complete first task
Task task = workflowExecutionService.poll("junit_task_5", "test");
assertNotNull(task);
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
// Simulating SystemTaskWorkerCoordinator to execute async system tasks
String subWorkflowTaskId = workflow.getTaskByRefName("a2").getTaskId();
workflowExecutor.executeSystemTask(dummySubWorkflowSystemTask, subWorkflowTaskId, 1);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertNotNull(workflow.getTasks());
assertEquals(2, workflow.getTasks().size());
task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).findAny().orElse(null);
assertNotNull(task);
assertNotNull(task.getOutputData());
assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getSubWorkflowId());
String subWorkflowId = task.getSubWorkflowId();
workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true);
assertNotNull(workflow);
assertNotNull(workflow.getTasks());
assertEquals(workflowId, workflow.getParentWorkflowId());
assertEquals(RUNNING, workflow.getStatus());
// poll and fail the first task in sub-workflow
task = workflowExecutionService.poll("junit_task_1", "test");
task.setStatus(FAILED);
workflowExecutionService.updateTask(task);
Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true);
assertNotNull(subWorkflow);
assertEquals(WorkflowStatus.FAILED, subWorkflow.getStatus());
subWorkflowTaskId = subWorkflow.getParentWorkflowTaskId();
workflowExecutor.executeSystemTask(subworkflow, subWorkflowTaskId, 1);
// Ensure failed Subworkflow task is rescheduled.
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals(RUNNING, workflow.getStatus());
task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).filter(t -> t.getStatus().equals(SCHEDULED)).findAny().orElse(null);
assertNotNull(task);
subWorkflowTaskId = task.getTaskId();
workflowExecutor.executeSystemTask(subworkflow, task.getTaskId(), 1);
// Get the latest workflow and task, and then acquire latest subWorkflowId
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(SUB_WORKFLOW.name())).filter(t -> t.getStatus().equals(IN_PROGRESS)).findAny().orElse(null);
assertNotNull(task);
assertNotNull("Retried task in scheduled state shouldn't have a SubworkflowId yet", task.getSubWorkflowId());
subWorkflowId = task.getSubWorkflowId();
// poll and fail the first task in sub-workflow
task = workflowExecutionService.poll("junit_task_1", "test");
task.setStatus(FAILED);
workflowExecutionService.updateTask(task);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals(WorkflowStatus.FAILED, workflow.getStatus());
// Retry the failed sub workflow
workflowExecutor.retry(subWorkflowId, false);
task = workflowExecutionService.poll("junit_task_1", "test");
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true);
assertNotNull(subWorkflow);
assertEquals(RUNNING, subWorkflow.getStatus());
task = workflowExecutionService.poll("junit_task_2", "test");
assertEquals(subWorkflowId, task.getWorkflowInstanceId());
String uuid = UUID.randomUUID().toString();
task.getOutputData().put("uuid", uuid);
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true);
assertNotNull(subWorkflow);
assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus());
assertNotNull(subWorkflow.getOutput());
assertTrue(subWorkflow.getOutput().containsKey("o1"));
assertTrue(subWorkflow.getOutput().containsKey("o2"));
assertEquals("sub workflow input param1", subWorkflow.getOutput().get("o1"));
assertEquals(uuid, subWorkflow.getOutput().get("o2"));
// Simulating SystemTaskWorkerCoordinator
workflowExecutor.executeSystemTask(subworkflow, subWorkflow.getParentWorkflowTaskId(), 1);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals(RUNNING, workflow.getStatus());
task = workflowExecutionService.poll("junit_task_6", "test");
assertNotNull(task);
task.setStatus(COMPLETED);
workflowExecutionService.updateTask(task);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus());
// reset retry count
taskDef = notFoundSafeGetTaskDef(taskName);
taskDef.setRetryCount(retryCount);
metadataService.updateTaskDef(taskDef);
workflowTask = found.getTasks().stream().filter(t -> t.getType().equals(SUB_WORKFLOW.name())).findAny().orElse(null);
workflowTask.setTaskDefinition(null);
metadataService.updateWorkflowDef(found);
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method registerKafkaWorkflow.
private void registerKafkaWorkflow() {
System.setProperty("STACK_KAFKA", "test_kafka_topic");
TaskDef templatedTask = new TaskDef();
templatedTask.setName("templated_kafka_task");
templatedTask.setRetryCount(0);
Map<String, Object> kafkaRequest = new HashMap<>();
kafkaRequest.put("topic", "${STACK_KAFKA}");
kafkaRequest.put("bootStrapServers", "localhost:9092");
Map<String, Object> value = new HashMap<>();
value.put("inputPaths", Arrays.asList("${workflow.input.path1}", "${workflow.input.path2}"));
value.put("requestDetails", "${workflow.input.requestDetails}");
value.put("outputPath", "${workflow.input.outputPath}");
kafkaRequest.put("value", value);
templatedTask.getInputTemplate().put("kafka_request", kafkaRequest);
metadataService.registerTaskDef(Arrays.asList(templatedTask));
WorkflowDef templateWf = new WorkflowDef();
templateWf.setName("template_kafka_workflow");
WorkflowTask wft = new WorkflowTask();
wft.setName(templatedTask.getName());
wft.setWorkflowTaskType(TaskType.KAFKA_PUBLISH);
wft.setTaskReferenceName("t0");
templateWf.getTasks().add(wft);
templateWf.setSchemaVersion(2);
metadataService.registerWorkflowDef(templateWf);
}
Aggregations