use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.
the class TaskResourceTest method testBatchPoll.
@Test
public void testBatchPoll() throws Exception {
Task task = new Task();
task.setTaskType("SIMPLE");
task.setWorkerId("123");
task.setDomain("test");
List<Task> listOfTasks = new ArrayList<>();
listOfTasks.add(task);
when(mockTaskService.batchPoll(anyString(), anyString(), anyString(), anyInt(), anyInt())).thenReturn(listOfTasks);
assertEquals(listOfTasks, taskResource.batchPoll("SIMPLE", "123", "test", 1, 100));
}
use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.
the class CassandraDAOTest method testTaskDefLimitCRUD.
@Test
public void testTaskDefLimitCRUD() {
String taskDefName = "test_task_def";
String taskId = IDGenerator.generate();
TaskDef taskDef = new TaskDef();
taskDef.setConcurrentExecLimit(1);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskDefinition(taskDef);
Task task = new Task();
task.setTaskDefName(taskDefName);
task.setTaskId(taskId);
task.setWorkflowInstanceId(IDGenerator.generate());
task.setWorkflowTask(workflowTask);
task.setTaskType("test_task");
task.setWorkflowType("test_workflow");
task.setStatus(Task.Status.SCHEDULED);
Task newTask = new Task();
newTask.setTaskDefName(taskDefName);
newTask.setTaskId(IDGenerator.generate());
newTask.setWorkflowInstanceId(IDGenerator.generate());
newTask.setWorkflowTask(workflowTask);
newTask.setTaskType("test_task");
newTask.setWorkflowType("test_workflow");
newTask.setStatus(Task.Status.SCHEDULED);
// no tasks are IN_PROGRESS
executionDAO.updateTaskDefLimit(task, false);
assertFalse(executionDAO.exceedsInProgressLimit(task));
// set a task to IN_PROGRESS
task.setStatus(Status.IN_PROGRESS);
executionDAO.updateTaskDefLimit(task, false);
// when same task is checked
assertFalse(executionDAO.exceedsInProgressLimit(task));
// check if new task can be added
assertTrue(executionDAO.exceedsInProgressLimit(newTask));
// set IN_PROGRESS task to COMPLETED
task.setStatus(Status.COMPLETED);
executionDAO.updateTaskDefLimit(task, false);
// check new task again
assertFalse(executionDAO.exceedsInProgressLimit(newTask));
// set new task to IN_PROGRESS
newTask.setStatus(Status.IN_PROGRESS);
executionDAO.updateTaskDefLimit(newTask, false);
// check new task again
assertFalse(executionDAO.exceedsInProgressLimit(newTask));
// force remove from task def limit
executionDAO.updateTaskDefLimit(newTask, true);
assertFalse(executionDAO.exceedsInProgressLimit(task));
}
use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.
the class CassandraExecutionDAO method getWorkflow.
@Override
public Workflow getWorkflow(String workflowId, boolean includeTasks) {
Workflow workflow = null;
try {
ResultSet resultSet;
if (includeTasks) {
resultSet = session.execute(selectWorkflowWithTasksStatement.bind(UUID.fromString(workflowId), DEFAULT_SHARD_ID));
List<Task> tasks = new ArrayList<>();
List<Row> rows = resultSet.all();
if (rows.size() == 0) {
LOGGER.info("Workflow {} not found in datastore", workflowId);
return null;
}
for (Row row : rows) {
String entityKey = row.getString(ENTITY_KEY);
if (ENTITY_TYPE_WORKFLOW.equals(entityKey)) {
workflow = readValue(row.getString(PAYLOAD_KEY), Workflow.class);
} else if (ENTITY_TYPE_TASK.equals(entityKey)) {
Task task = readValue(row.getString(PAYLOAD_KEY), Task.class);
tasks.add(task);
} else {
throw new ApplicationException(ApplicationException.Code.INTERNAL_ERROR, String.format("Invalid row with entityKey: %s found in datastore for workflow: %s", entityKey, workflowId));
}
}
if (workflow != null) {
recordCassandraDaoRequests("getWorkflow", "n/a", workflow.getWorkflowName());
tasks.sort(Comparator.comparingInt(Task::getSeq));
workflow.setTasks(tasks);
}
} else {
resultSet = session.execute(selectWorkflowStatement.bind(UUID.fromString(workflowId)));
workflow = Optional.ofNullable(resultSet.one()).map(row -> {
Workflow wf = readValue(row.getString(PAYLOAD_KEY), Workflow.class);
recordCassandraDaoRequests("getWorkflow", "n/a", wf.getWorkflowName());
return wf;
}).orElse(null);
}
return workflow;
} catch (ApplicationException e) {
throw e;
} catch (IllegalArgumentException e) {
Monitors.error(CLASS_NAME, "getWorkflow");
String errorMsg = String.format("Invalid workflow id: %s", workflowId);
LOGGER.error(errorMsg, e);
throw new ApplicationException(Code.INVALID_INPUT, errorMsg, e);
} catch (Exception e) {
Monitors.error(CLASS_NAME, "getWorkflow");
String errorMsg = String.format("Failed to get workflow: %s", workflowId);
LOGGER.error(errorMsg, e);
throw new ApplicationException(Code.BACKEND_ERROR, errorMsg);
}
}
use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.
the class CassandraExecutionDAO method updateWorkflow.
@Override
public String updateWorkflow(Workflow workflow) {
try {
List<Task> tasks = workflow.getTasks();
workflow.setTasks(new LinkedList<>());
String payload = toJson(workflow);
recordCassandraDaoRequests("updateWorkflow", "n/a", workflow.getWorkflowName());
recordCassandraDaoPayloadSize("updateWorkflow", payload.length(), "n/a", workflow.getWorkflowName());
session.execute(updateWorkflowStatement.bind(payload, UUID.fromString(workflow.getWorkflowId())));
workflow.setTasks(tasks);
return workflow.getWorkflowId();
} catch (Exception e) {
Monitors.error(CLASS_NAME, "updateWorkflow");
String errorMsg = String.format("Failed to update workflow: %s", workflow.getWorkflowId());
LOGGER.error(errorMsg, e);
throw new ApplicationException(Code.BACKEND_ERROR, errorMsg);
}
}
use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.
the class CassandraExecutionDAO method getTask.
@Override
public Task getTask(String taskId) {
try {
String workflowId = lookupWorkflowIdFromTaskId(taskId);
if (workflowId == null) {
return null;
}
// TODO: implement for query against multiple shards
ResultSet resultSet = session.execute(selectTaskStatement.bind(UUID.fromString(workflowId), DEFAULT_SHARD_ID, taskId));
return Optional.ofNullable(resultSet.one()).map(row -> {
Task task = readValue(row.getString(PAYLOAD_KEY), Task.class);
recordCassandraDaoRequests("getTask", task.getTaskType(), task.getWorkflowType());
recordCassandraDaoPayloadSize("getTask", toJson(task).length(), task.getTaskType(), task.getWorkflowType());
return task;
}).orElse(null);
} catch (ApplicationException ae) {
throw ae;
} catch (Exception e) {
Monitors.error(CLASS_NAME, "getTask");
String errorMsg = String.format("Error getting task by id: %s", taskId);
LOGGER.error(errorMsg, e);
throw new ApplicationException(Code.BACKEND_ERROR, errorMsg);
}
}
Aggregations