use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testWorkflowWithNoTasks.
@Test
public void testWorkflowWithNoTasks() {
WorkflowDef empty = new WorkflowDef();
empty.setName("empty_workflow");
empty.setSchemaVersion(2);
metadataService.registerWorkflowDef(empty);
String id = startOrLoadWorkflowExecution(empty.getName(), 1, "testWorkflowWithNoTasks", new HashMap<>(), null, null);
assertNotNull(id);
Workflow workflow = workflowExecutionService.getExecutionStatus(id, true);
assertNotNull(workflow);
assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus());
assertEquals(0, workflow.getTasks().size());
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method init.
@Before
public void init() {
System.setProperty("EC2_REGION", "us-east-1");
System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c");
if (registered) {
return;
}
WorkflowContext.set(new WorkflowContext("junit_app"));
for (int i = 0; i < 21; i++) {
String name = "junit_task_" + i;
if (notFoundSafeGetTaskDef(name) != null) {
continue;
}
TaskDef task = new TaskDef();
task.setName(name);
task.setTimeoutSeconds(120);
task.setRetryCount(RETRY_COUNT);
metadataService.registerTaskDef(Collections.singletonList(task));
}
for (int i = 0; i < 5; i++) {
String name = "junit_task_0_RT_" + i;
if (notFoundSafeGetTaskDef(name) != null) {
continue;
}
TaskDef task = new TaskDef();
task.setName(name);
task.setTimeoutSeconds(120);
task.setRetryCount(0);
metadataService.registerTaskDef(Collections.singletonList(task));
}
TaskDef task = new TaskDef();
task.setName("short_time_out");
task.setTimeoutSeconds(5);
task.setRetryCount(RETRY_COUNT);
metadataService.registerTaskDef(Collections.singletonList(task));
WorkflowDef def = new WorkflowDef();
def.setName(LINEAR_WORKFLOW_T1_T2);
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Map<String, Object> outputParameters = new HashMap<>();
outputParameters.put("o1", "${workflow.input.param1}");
outputParameters.put("o2", "${t2.output.uuid}");
outputParameters.put("o3", "${t1.output.op}");
def.setOutputParameters(outputParameters);
def.setFailureWorkflow("$workflow.input.failureWfName");
def.setSchemaVersion(2);
LinkedList<WorkflowTask> wftasks = new LinkedList<>();
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("junit_task_1");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("someNullKey", null);
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("t1");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "${workflow.input.param1}");
ip2.put("tp2", "${t1.output.op}");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
wftasks.add(wft1);
wftasks.add(wft2);
def.setTasks(wftasks);
WorkflowTask wft3 = new WorkflowTask();
wft3.setName("junit_task_3");
Map<String, Object> ip3 = new HashMap<>();
ip3.put("tp1", "${workflow.input.param1}");
ip3.put("tp2", "${t1.output.op}");
wft3.setInputParameters(ip3);
wft3.setTaskReferenceName("t3");
WorkflowDef def2 = new WorkflowDef();
def2.setName(TEST_WORKFLOW);
def2.setDescription(def2.getName());
def2.setVersion(1);
def2.setInputParameters(Arrays.asList("param1", "param2"));
LinkedList<WorkflowTask> wftasks2 = new LinkedList<>();
wftasks2.add(wft1);
wftasks2.add(wft2);
wftasks2.add(wft3);
def2.setSchemaVersion(2);
def2.setTasks(wftasks2);
try {
WorkflowDef[] wdsf = new WorkflowDef[] { def, def2 };
for (WorkflowDef wd : wdsf) {
metadataService.updateWorkflowDef(wd);
}
createForkJoinWorkflow();
def.setName(LONG_RUNNING);
metadataService.updateWorkflowDef(def);
} catch (Exception e) {
}
taskDefs = metadataService.getTaskDefs();
registered = true;
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createForkJoinNestedWorkflow.
private void createForkJoinNestedWorkflow(int schemaVersion) {
WorkflowDef def = new WorkflowDef();
def.setName(FORK_JOIN_NESTED_WF);
def.setDescription(def.getName());
def.setVersion(1);
def.setSchemaVersion(schemaVersion);
def.setInputParameters(Arrays.asList("param1", "param2"));
WorkflowTask[] tasks = new WorkflowTask[21];
Map<String, Object> ip1 = new HashMap<>();
if (schemaVersion <= 1) {
ip1.put("p1", "workflow.input.param1");
ip1.put("p2", "workflow.input.param2");
ip1.put("case", "workflow.input.case");
} else {
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
ip1.put("case", "${workflow.input.case}");
}
for (int i = 10; i < 21; i++) {
WorkflowTask wft = new WorkflowTask();
wft.setName("junit_task_" + i);
wft.setInputParameters(ip1);
wft.setTaskReferenceName("t" + i);
tasks[i] = wft;
}
WorkflowTask d1 = new WorkflowTask();
d1.setType(TaskType.DECISION.name());
d1.setName("Decision");
d1.setTaskReferenceName("d1");
d1.setInputParameters(ip1);
d1.setDefaultCase(Arrays.asList(tasks[18], tasks[20]));
d1.setCaseValueParam("case");
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("a", Arrays.asList(tasks[16], tasks[19], tasks[20]));
decisionCases.put("b", Arrays.asList(tasks[17], tasks[20]));
d1.setDecisionCases(decisionCases);
WorkflowTask fork2 = new WorkflowTask();
fork2.setType(TaskType.FORK_JOIN.name());
fork2.setName("fork2");
fork2.setTaskReferenceName("fork2");
fork2.getForkTasks().add(Arrays.asList(tasks[12], tasks[14]));
fork2.getForkTasks().add(Arrays.asList(tasks[13], d1));
WorkflowTask join2 = new WorkflowTask();
join2.setType(TaskType.JOIN.name());
join2.setTaskReferenceName("join2");
join2.setJoinOn(Arrays.asList("t14", "t20"));
WorkflowTask fork1 = new WorkflowTask();
fork1.setType(TaskType.FORK_JOIN.name());
fork1.setTaskReferenceName("fork1");
fork1.getForkTasks().add(Arrays.asList(tasks[11]));
fork1.getForkTasks().add(Arrays.asList(fork2, join2));
WorkflowTask join1 = new WorkflowTask();
join1.setType(TaskType.JOIN.name());
join1.setTaskReferenceName("join1");
join1.setJoinOn(Arrays.asList("t11", "join2"));
def.getTasks().add(fork1);
def.getTasks().add(join1);
def.getTasks().add(tasks[15]);
metadataService.updateWorkflowDef(def);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testSimpleWorkflowNullInputOutputs.
@Test
public void testSimpleWorkflowNullInputOutputs() throws Exception {
clearWorkflows();
WorkflowDef workflowDefinition = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1);
// Assert null keys are preserved in task definition's input parameters.
assertTrue(workflowDefinition.getTasks().get(0).getInputParameters().containsKey("someNullKey"));
String correlationId = "unit_test_1";
Map<String, Object> input = new HashMap<>();
String inputParam1 = "p1 value";
input.put("param1", inputParam1);
input.put("param2", null);
String workflowInstanceId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null);
logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId);
assertNotNull(workflowInstanceId);
Workflow workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true);
assertNotNull(workflow);
assertEquals(RUNNING, workflow.getStatus());
// The very first task is the one that should be scheduled.
assertEquals(1, workflow.getTasks().size());
// Assert null values are passed through workflow input.
assertNull(workflow.getInput().get("param2"));
// Assert null values are carried from task definition to task execution.
assertNull(workflow.getTasks().get(0).getInputData().get("someNullKey"));
// Polling for the first task
Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker");
assertNotNull(task);
assertEquals("junit_task_1", task.getTaskType());
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
assertEquals(workflowInstanceId, task.getWorkflowInstanceId());
task.setStatus(COMPLETED);
task.getOutputData().put("someKey", null);
Map<String, Object> someOtherKey = new HashMap<>();
someOtherKey.put("a", 1);
someOtherKey.put("A", null);
task.getOutputData().put("someOtherKey", someOtherKey);
workflowExecutionService.updateTask(task);
workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true);
assertNotNull(workflow);
task = workflow.getTasks().get(0);
// Assert null keys are preserved in task outputs.
assertTrue(task.getOutputData().containsKey("someKey"));
assertNull(task.getOutputData().get("someKey"));
someOtherKey = (Map<String, Object>) task.getOutputData().get("someOtherKey");
assertTrue(someOtherKey.containsKey("A"));
assertNull(someOtherKey.get("A"));
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testTaskTimeout.
@Test
public void testTaskTimeout() throws Exception {
String taskName = "junit_task_1";
TaskDef taskDef = notFoundSafeGetTaskDef(taskName);
taskDef.setRetryCount(1);
taskDef.setTimeoutSeconds(1);
taskDef.setRetryDelaySeconds(0);
taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY);
metadataService.updateTaskDef(taskDef);
WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1);
assertNotNull(found.getFailureWorkflow());
assertFalse(StringUtils.isBlank(found.getFailureWorkflow()));
String correlationId = "unit_test_1" + UUID.randomUUID().toString();
Map<String, Object> input = new HashMap<>();
input.put("param1", "p1 value");
input.put("param2", "p2 value");
input.put("failureWfName", "FanInOutTest");
String workflowId = startOrLoadWorkflowExecution("timeout", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null);
assertNotNull(workflowId);
// Ensure that we have a workflow queued up for evaluation here...
long size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE);
assertEquals(1, size);
// If we get the full workflow here then, last task should be completed and the next task should be scheduled
Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals(RUNNING, workflow.getStatus());
assertEquals("found: " + workflow.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 1, workflow.getTasks().size());
Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker");
assertNotNull(task);
assertEquals(workflowId, task.getWorkflowInstanceId());
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
// Ensure that we have a workflow queued up for evaluation here...
size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE);
assertEquals(1, size);
Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS);
workflowSweeper.sweep(Collections.singletonList(workflowId), workflowExecutor, workflowRepairService);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals("found: " + workflow.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 2, workflow.getTasks().size());
Task task1 = workflow.getTasks().get(0);
assertEquals(TIMED_OUT, task1.getStatus());
Task task2 = workflow.getTasks().get(1);
assertEquals(SCHEDULED, task2.getStatus());
task = workflowExecutionService.poll(task2.getTaskDefName(), "task1.junit.worker");
assertNotNull(task);
assertEquals(workflowId, task.getWorkflowInstanceId());
assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId()));
Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS);
workflowExecutor.decide(workflowId);
workflow = workflowExecutionService.getExecutionStatus(workflowId, true);
assertNotNull(workflow);
assertEquals(2, workflow.getTasks().size());
assertEquals(TIMED_OUT, workflow.getTasks().get(0).getStatus());
assertEquals(TIMED_OUT, workflow.getTasks().get(1).getStatus());
assertEquals(WorkflowStatus.TIMED_OUT, workflow.getStatus());
assertEquals(1, queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE));
taskDef.setTimeoutSeconds(0);
taskDef.setRetryCount(RETRY_COUNT);
metadataService.updateTaskDef(taskDef);
}
Aggregations