use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class CassandraDAOTest method testWorkflowCRUD.
@Test
public void testWorkflowCRUD() {
String workflowId = IDGenerator.generate();
Workflow workflow = new Workflow();
workflow.setWorkflowId(workflowId);
workflow.setInput(new HashMap<>());
workflow.setStatus(Workflow.WorkflowStatus.RUNNING);
workflow.setCreateTime(System.currentTimeMillis());
// create a new workflow in the datastore
String id = executionDAO.createWorkflow(workflow);
assertEquals(workflowId, id);
// read the workflow from the datastore
Workflow found = executionDAO.getWorkflow(workflowId);
assertEquals(workflow, found);
// update the workflow
workflow.setStatus(Workflow.WorkflowStatus.COMPLETED);
executionDAO.updateWorkflow(workflow);
found = executionDAO.getWorkflow(workflowId);
assertEquals(workflow, found);
// remove the workflow from datastore
boolean removed = executionDAO.removeWorkflow(workflowId);
assertTrue(removed);
// read workflow again
workflow = executionDAO.getWorkflow(workflowId, true);
assertNull(workflow);
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class CassandraExecutionDAO method getWorkflow.
@Override
public Workflow getWorkflow(String workflowId, boolean includeTasks) {
Workflow workflow = null;
try {
ResultSet resultSet;
if (includeTasks) {
resultSet = session.execute(selectWorkflowWithTasksStatement.bind(UUID.fromString(workflowId), DEFAULT_SHARD_ID));
List<Task> tasks = new ArrayList<>();
List<Row> rows = resultSet.all();
if (rows.size() == 0) {
LOGGER.info("Workflow {} not found in datastore", workflowId);
return null;
}
for (Row row : rows) {
String entityKey = row.getString(ENTITY_KEY);
if (ENTITY_TYPE_WORKFLOW.equals(entityKey)) {
workflow = readValue(row.getString(PAYLOAD_KEY), Workflow.class);
} else if (ENTITY_TYPE_TASK.equals(entityKey)) {
Task task = readValue(row.getString(PAYLOAD_KEY), Task.class);
tasks.add(task);
} else {
throw new ApplicationException(ApplicationException.Code.INTERNAL_ERROR, String.format("Invalid row with entityKey: %s found in datastore for workflow: %s", entityKey, workflowId));
}
}
if (workflow != null) {
recordCassandraDaoRequests("getWorkflow", "n/a", workflow.getWorkflowName());
tasks.sort(Comparator.comparingInt(Task::getSeq));
workflow.setTasks(tasks);
}
} else {
resultSet = session.execute(selectWorkflowStatement.bind(UUID.fromString(workflowId)));
workflow = Optional.ofNullable(resultSet.one()).map(row -> {
Workflow wf = readValue(row.getString(PAYLOAD_KEY), Workflow.class);
recordCassandraDaoRequests("getWorkflow", "n/a", wf.getWorkflowName());
return wf;
}).orElse(null);
}
return workflow;
} catch (ApplicationException e) {
throw e;
} catch (IllegalArgumentException e) {
Monitors.error(CLASS_NAME, "getWorkflow");
String errorMsg = String.format("Invalid workflow id: %s", workflowId);
LOGGER.error(errorMsg, e);
throw new ApplicationException(Code.INVALID_INPUT, errorMsg, e);
} catch (Exception e) {
Monitors.error(CLASS_NAME, "getWorkflow");
String errorMsg = String.format("Failed to get workflow: %s", workflowId);
LOGGER.error(errorMsg, e);
throw new ApplicationException(Code.BACKEND_ERROR, errorMsg);
}
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class TestDeciderOutcomes method testOptional.
@Test
public void testOptional() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
WorkflowTask task1 = new WorkflowTask();
task1.setName("task0");
task1.setType("SIMPLE");
task1.setTaskReferenceName("t0");
task1.getInputParameters().put("taskId", "${CPEWF_TASK_ID}");
task1.setOptional(true);
task1.setTaskDefinition(new TaskDef("task0"));
WorkflowTask task2 = new WorkflowTask();
task2.setName("task1");
task2.setType("SIMPLE");
task2.setTaskReferenceName("t1");
task2.setTaskDefinition(new TaskDef("task1"));
def.getTasks().add(task1);
def.getTasks().add(task2);
def.setSchemaVersion(2);
Workflow workflow = new Workflow();
workflow.setWorkflowDefinition(def);
workflow.setStartTime(System.currentTimeMillis());
DeciderOutcome outcome = deciderService.decide(workflow);
assertNotNull(outcome);
System.out.println("Schedule after starting: " + outcome.tasksToBeScheduled);
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals(task1.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
System.out.println("TaskId of the scheduled task in input: " + outcome.tasksToBeScheduled.get(0).getInputData());
for (int i = 0; i < 3; i++) {
String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId();
assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId"));
workflow.getTasks().clear();
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
workflow.getTasks().get(0).setStatus(Status.FAILED);
outcome = deciderService.decide(workflow);
assertNotNull(outcome);
System.out.println("Schedule: " + outcome.tasksToBeScheduled);
System.out.println("Update: " + outcome.tasksToBeUpdated);
assertEquals(1, outcome.tasksToBeUpdated.size());
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals(Task.Status.FAILED, workflow.getTasks().get(0).getStatus());
assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId());
assertEquals(task1.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
assertEquals(i + 1, outcome.tasksToBeScheduled.get(0).getRetryCount());
}
String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId();
workflow.getTasks().clear();
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
workflow.getTasks().get(0).setStatus(Status.FAILED);
outcome = deciderService.decide(workflow);
assertNotNull(outcome);
System.out.println("Schedule: " + outcome.tasksToBeScheduled);
System.out.println("Update: " + outcome.tasksToBeUpdated);
assertEquals(1, outcome.tasksToBeUpdated.size());
assertEquals(1, outcome.tasksToBeScheduled.size());
assertEquals(Task.Status.COMPLETED_WITH_ERRORS, workflow.getTasks().get(0).getStatus());
assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId());
assertEquals(task2.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName());
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class TestDeciderOutcomes method testOptionalWithDynamicFork.
@Test
public void testOptionalWithDynamicFork() {
WorkflowDef def = new WorkflowDef();
def.setName("test");
WorkflowTask task1 = new WorkflowTask();
task1.setName("fork0");
task1.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC);
task1.setTaskReferenceName("fork0");
task1.setDynamicForkTasksInputParamName("forkedInputs");
task1.setDynamicForkTasksParam("forks");
task1.getInputParameters().put("forks", "${workflow.input.forks}");
task1.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}");
WorkflowTask task2 = new WorkflowTask();
task2.setName("join0");
task2.setType("JOIN");
task2.setTaskReferenceName("join0");
def.getTasks().add(task1);
def.getTasks().add(task2);
def.setSchemaVersion(2);
Workflow workflow = new Workflow();
workflow.setWorkflowDefinition(def);
List<WorkflowTask> forks = new LinkedList<>();
Map<String, Map<String, Object>> forkedInputs = new HashMap<>();
for (int i = 0; i < 3; i++) {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("f" + i);
workflowTask.setTaskReferenceName("f" + i);
workflowTask.setWorkflowTaskType(TaskType.SIMPLE);
workflowTask.setOptional(true);
workflowTask.setTaskDefinition(new TaskDef("f" + i));
forks.add(workflowTask);
forkedInputs.put(workflowTask.getTaskReferenceName(), new HashMap<>());
}
workflow.getInput().put("forks", forks);
workflow.getInput().put("forkedInputs", forkedInputs);
workflow.setStartTime(System.currentTimeMillis());
DeciderOutcome outcome = deciderService.decide(workflow);
assertNotNull(outcome);
assertEquals(5, outcome.tasksToBeScheduled.size());
assertEquals(0, outcome.tasksToBeUpdated.size());
assertEquals(SystemTaskType.FORK.name(), outcome.tasksToBeScheduled.get(0).getTaskType());
assertEquals(Task.Status.COMPLETED, outcome.tasksToBeScheduled.get(0).getStatus());
for (int retryCount = 0; retryCount < 4; retryCount++) {
for (Task taskToBeScheduled : outcome.tasksToBeScheduled) {
if (taskToBeScheduled.getTaskDefName().equals("join0")) {
assertEquals(Task.Status.IN_PROGRESS, taskToBeScheduled.getStatus());
} else if (taskToBeScheduled.getTaskType().matches("(f0|f1|f2)")) {
assertEquals(Task.Status.SCHEDULED, taskToBeScheduled.getStatus());
taskToBeScheduled.setStatus(Status.FAILED);
}
taskToBeScheduled.setUpdateTime(System.currentTimeMillis());
}
workflow.getTasks().addAll(outcome.tasksToBeScheduled);
outcome = deciderService.decide(workflow);
assertNotNull(outcome);
}
assertEquals(SystemTaskType.JOIN.name(), outcome.tasksToBeScheduled.get(0).getTaskType());
for (int i = 0; i < 3; i++) {
assertEquals(Task.Status.COMPLETED_WITH_ERRORS, outcome.tasksToBeUpdated.get(i).getStatus());
assertEquals("f" + (i), outcome.tasksToBeUpdated.get(i).getTaskDefName());
}
assertEquals(Task.Status.IN_PROGRESS, outcome.tasksToBeScheduled.get(0).getStatus());
new Join().execute(workflow, outcome.tasksToBeScheduled.get(0), null);
assertEquals(Task.Status.COMPLETED, outcome.tasksToBeScheduled.get(0).getStatus());
outcome.tasksToBeScheduled.stream().map(task -> task.getStatus() + ":" + task.getTaskType() + ":").forEach(System.out::println);
outcome.tasksToBeUpdated.stream().map(task -> task.getStatus() + ":" + task.getTaskType() + ":").forEach(System.out::println);
}
use of com.netflix.conductor.common.run.Workflow in project conductor by Netflix.
the class TestDeciderService method testGetTaskInput.
@SuppressWarnings("unchecked")
@Test
public void testGetTaskInput() {
Map<String, Object> ip = new HashMap<>();
ip.put("workflowInputParam", "${workflow.input.requestId}");
ip.put("taskOutputParam", "${task2.output.location}");
List<Map<String, Object>> json = new LinkedList<>();
Map<String, Object> m1 = new HashMap<>();
m1.put("name", "person name");
m1.put("city", "New York");
m1.put("phone", 2120001234);
m1.put("status", "${task2.output.isPersonActive}");
Map<String, Object> m2 = new HashMap<>();
m2.put("employer", "City Of New York");
m2.put("color", "purple");
m2.put("requestId", "${workflow.input.requestId}");
json.add(m1);
json.add(m2);
ip.put("complexJson", json);
WorkflowDef def = new WorkflowDef();
def.setName("testGetTaskInput");
def.setSchemaVersion(2);
Workflow workflow = new Workflow();
workflow.setWorkflowDefinition(def);
workflow.getInput().put("requestId", "request id 001");
Task task = new Task();
task.setReferenceTaskName("task2");
task.getOutputData().put("location", "http://location");
task.getOutputData().put("isPersonActive", true);
workflow.getTasks().add(task);
Map<String, Object> taskInput = parametersUtils.getTaskInput(ip, workflow, null, null);
assertNotNull(taskInput);
assertTrue(taskInput.containsKey("workflowInputParam"));
assertTrue(taskInput.containsKey("taskOutputParam"));
assertEquals("request id 001", taskInput.get("workflowInputParam"));
assertEquals("http://location", taskInput.get("taskOutputParam"));
assertNotNull(taskInput.get("complexJson"));
assertTrue(taskInput.get("complexJson") instanceof List);
List<Map<String, Object>> resolvedInput = (List<Map<String, Object>>) taskInput.get("complexJson");
assertEquals(2, resolvedInput.size());
}
Aggregations