use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class MySQLMetadataDAOTest method testDuplicateWorkflowDef.
@Test
public void testDuplicateWorkflowDef() throws Exception {
thrown.expect(ApplicationException.class);
thrown.expectMessage("Workflow with testDuplicate.1 already exists!");
thrown.expect(hasProperty("code", is(CONFLICT)));
WorkflowDef def = new WorkflowDef();
def.setName("testDuplicate");
def.setVersion(1);
dao.createWorkflowDef(def);
dao.createWorkflowDef(def);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createDecisionWorkflow.
private void createDecisionWorkflow() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName(DECISION_WF);
workflowDef.setDescription(workflowDef.getName());
workflowDef.setVersion(1);
Map<String, Object> inputParams = new HashMap<>();
inputParams.put("p1", "${workflow.input.param1}");
inputParams.put("p2", "${workflow.input.param2}");
// left decision
WorkflowTask leftCaseTask1 = new WorkflowTask();
leftCaseTask1.setName("junit_task_1");
leftCaseTask1.setInputParameters(inputParams);
leftCaseTask1.setTaskReferenceName("t1");
WorkflowTask leftCaseTask2 = new WorkflowTask();
leftCaseTask2.setName("junit_task_2");
leftCaseTask2.setInputParameters(inputParams);
leftCaseTask2.setTaskReferenceName("t2");
// default decision
WorkflowTask defaultCaseTask5 = new WorkflowTask();
defaultCaseTask5.setName("junit_task_5");
defaultCaseTask5.setInputParameters(inputParams);
defaultCaseTask5.setTaskReferenceName("t5");
// decision task
Map<String, Object> decisionInput = new HashMap<>();
decisionInput.put("case", "${workflow.input.case}");
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setCaseValueParam("case");
decisionTask.setName("decisionTask");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setInputParameters(decisionInput);
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("c", Arrays.asList(leftCaseTask1, leftCaseTask2));
decisionTask.setDefaultCase(Collections.singletonList(defaultCaseTask5));
decisionTask.setDecisionCases(decisionCases);
WorkflowTask workflowTask20 = new WorkflowTask();
workflowTask20.setName("junit_task_20");
workflowTask20.setInputParameters(inputParams);
workflowTask20.setTaskReferenceName("t20");
workflowDef.getTasks().add(decisionTask);
workflowDef.getTasks().add(workflowTask20);
metadataService.updateWorkflowDef(workflowDef);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createForkJoinDecisionWorkflow.
private void createForkJoinDecisionWorkflow() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setName(FORK_JOIN_DECISION_WF);
workflowDef.setDescription(workflowDef.getName());
workflowDef.setVersion(1);
Map<String, Object> inputParams = new HashMap<>();
inputParams.put("p1", "${workflow.input.param1}");
inputParams.put("p2", "${workflow.input.param2}");
// left decision
WorkflowTask leftCaseTask1 = new WorkflowTask();
leftCaseTask1.setName("junit_task_1");
leftCaseTask1.setInputParameters(inputParams);
leftCaseTask1.setTaskReferenceName("t1");
WorkflowTask leftCaseTask2 = new WorkflowTask();
leftCaseTask2.setName("junit_task_2");
leftCaseTask2.setInputParameters(inputParams);
leftCaseTask2.setTaskReferenceName("t2");
// default decision
WorkflowTask defaultCaseTask5 = new WorkflowTask();
defaultCaseTask5.setName("junit_task_5");
defaultCaseTask5.setInputParameters(inputParams);
defaultCaseTask5.setTaskReferenceName("t5");
// left fork
// decision task
Map<String, Object> decisionInput = new HashMap<>();
decisionInput.put("case", "${workflow.input.case}");
WorkflowTask decisionTask = new WorkflowTask();
decisionTask.setType(TaskType.DECISION.name());
decisionTask.setCaseValueParam("case");
decisionTask.setName("decisionTask");
decisionTask.setTaskReferenceName("decisionTask");
decisionTask.setInputParameters(decisionInput);
Map<String, List<WorkflowTask>> decisionCases = new HashMap<>();
decisionCases.put("c", Arrays.asList(leftCaseTask1, leftCaseTask2));
decisionTask.setDefaultCase(Collections.singletonList(defaultCaseTask5));
decisionTask.setDecisionCases(decisionCases);
WorkflowTask workflowTask20 = new WorkflowTask();
workflowTask20.setName("junit_task_20");
workflowTask20.setInputParameters(inputParams);
workflowTask20.setTaskReferenceName("t20");
// right fork
WorkflowTask rightForkTask10 = new WorkflowTask();
rightForkTask10.setName("junit_task_10");
rightForkTask10.setInputParameters(inputParams);
rightForkTask10.setTaskReferenceName("t10");
// fork task
WorkflowTask forkTask = new WorkflowTask();
forkTask.setName("forkTask");
forkTask.setType(TaskType.FORK_JOIN.name());
forkTask.setTaskReferenceName("forkTask");
forkTask.getForkTasks().add(Arrays.asList(decisionTask, workflowTask20));
forkTask.getForkTasks().add(Collections.singletonList(rightForkTask10));
// join task
WorkflowTask joinTask = new WorkflowTask();
joinTask.setName("joinTask");
joinTask.setType(TaskType.JOIN.name());
joinTask.setTaskReferenceName("joinTask");
joinTask.setJoinOn(Arrays.asList("t20", "t10"));
workflowDef.getTasks().add(forkTask);
workflowDef.getTasks().add(joinTask);
metadataService.updateWorkflowDef(workflowDef);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method testTaskDefTemplate.
@Test
public void testTaskDefTemplate() throws Exception {
System.setProperty("STACK2", "test_stack");
TaskDef templatedTask = new TaskDef();
templatedTask.setName("templated_task");
Map<String, Object> httpRequest = new HashMap<>();
httpRequest.put("method", "GET");
httpRequest.put("vipStack", "${STACK2}");
httpRequest.put("uri", "/get/something");
Map<String, Object> body = new HashMap<>();
body.put("inputPaths", Arrays.asList("${workflow.input.path1}", "${workflow.input.path2}"));
body.put("requestDetails", "${workflow.input.requestDetails}");
body.put("outputPath", "${workflow.input.outputPath}");
httpRequest.put("body", body);
templatedTask.getInputTemplate().put("http_request", httpRequest);
metadataService.registerTaskDef(Arrays.asList(templatedTask));
WorkflowDef templateWf = new WorkflowDef();
templateWf.setName("template_workflow");
WorkflowTask wft = new WorkflowTask();
wft.setName(templatedTask.getName());
wft.setWorkflowTaskType(TaskType.SIMPLE);
wft.setTaskReferenceName("t0");
templateWf.getTasks().add(wft);
templateWf.setSchemaVersion(2);
metadataService.registerWorkflowDef(templateWf);
Map<String, Object> requestDetails = new HashMap<>();
requestDetails.put("key1", "value1");
requestDetails.put("key2", 42);
Map<String, Object> input = new HashMap<>();
input.put("path1", "file://path1");
input.put("path2", "file://path2");
input.put("outputPath", "s3://bucket/outputPath");
input.put("requestDetails", requestDetails);
String id = startOrLoadWorkflowExecution(templateWf.getName(), 1, "testTaskDefTemplate", input, null, null);
assertNotNull(id);
Workflow workflow = workflowExecutionService.getExecutionStatus(id, true);
assertNotNull(workflow);
assertTrue(workflow.getReasonForIncompletion(), !workflow.getStatus().isTerminal());
assertEquals(1, workflow.getTasks().size());
Task task = workflow.getTasks().get(0);
Map<String, Object> taskInput = task.getInputData();
assertNotNull(taskInput);
assertTrue(taskInput.containsKey("http_request"));
assertTrue(taskInput.get("http_request") instanceof Map);
// Use the commented sysout to get the string value
// System.out.println(om.writeValueAsString(om.writeValueAsString(taskInput)));
String expected = "{\"http_request\":{\"method\":\"GET\",\"vipStack\":\"test_stack\",\"body\":{\"requestDetails\":{\"key1\":\"value1\",\"key2\":42},\"outputPath\":\"s3://bucket/outputPath\",\"inputPaths\":[\"file://path1\",\"file://path2\"]},\"uri\":\"/get/something\"}}";
assertEquals(expected, objectMapper.writeValueAsString(taskInput));
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class AbstractWorkflowServiceTest method createWFWithResponseTimeout.
private void createWFWithResponseTimeout() {
TaskDef task = new TaskDef();
task.setName("task_rt");
task.setTimeoutSeconds(120);
task.setRetryCount(RETRY_COUNT);
task.setRetryDelaySeconds(0);
task.setResponseTimeoutSeconds(10);
metadataService.registerTaskDef(Collections.singletonList(task));
WorkflowDef def = new WorkflowDef();
def.setName("RTOWF");
def.setDescription(def.getName());
def.setVersion(1);
def.setInputParameters(Arrays.asList("param1", "param2"));
Map<String, Object> outputParameters = new HashMap<>();
outputParameters.put("o1", "${workflow.input.param1}");
outputParameters.put("o2", "${t2.output.uuid}");
outputParameters.put("o3", "${t1.output.op}");
def.setOutputParameters(outputParameters);
def.setFailureWorkflow("$workflow.input.failureWfName");
def.setSchemaVersion(2);
LinkedList<WorkflowTask> wftasks = new LinkedList<>();
WorkflowTask wft1 = new WorkflowTask();
wft1.setName("task_rt");
Map<String, Object> ip1 = new HashMap<>();
ip1.put("p1", "${workflow.input.param1}");
ip1.put("p2", "${workflow.input.param2}");
wft1.setInputParameters(ip1);
wft1.setTaskReferenceName("task_rt_t1");
WorkflowTask wft2 = new WorkflowTask();
wft2.setName("junit_task_2");
Map<String, Object> ip2 = new HashMap<>();
ip2.put("tp1", "${workflow.input.param1}");
ip2.put("tp2", "${t1.output.op}");
wft2.setInputParameters(ip2);
wft2.setTaskReferenceName("t2");
wftasks.add(wft1);
wftasks.add(wft2);
def.setTasks(wftasks);
metadataService.updateWorkflowDef(def);
}
Aggregations