Search in sources :

Example 56 with Task

use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.

the class TaskResourceTest method testBatchPoll.

@Test
public void testBatchPoll() throws Exception {
    Task task = new Task();
    task.setTaskType("SIMPLE");
    task.setWorkerId("123");
    task.setDomain("test");
    List<Task> listOfTasks = new ArrayList<>();
    listOfTasks.add(task);
    when(mockTaskService.batchPoll(anyString(), anyString(), anyString(), anyInt(), anyInt())).thenReturn(listOfTasks);
    assertEquals(listOfTasks, taskResource.batchPoll("SIMPLE", "123", "test", 1, 100));
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 57 with Task

use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.

the class CassandraDAOTest method testTaskDefLimitCRUD.

@Test
public void testTaskDefLimitCRUD() {
    String taskDefName = "test_task_def";
    String taskId = IDGenerator.generate();
    TaskDef taskDef = new TaskDef();
    taskDef.setConcurrentExecLimit(1);
    WorkflowTask workflowTask = new WorkflowTask();
    workflowTask.setTaskDefinition(taskDef);
    Task task = new Task();
    task.setTaskDefName(taskDefName);
    task.setTaskId(taskId);
    task.setWorkflowInstanceId(IDGenerator.generate());
    task.setWorkflowTask(workflowTask);
    task.setTaskType("test_task");
    task.setWorkflowType("test_workflow");
    task.setStatus(Task.Status.SCHEDULED);
    Task newTask = new Task();
    newTask.setTaskDefName(taskDefName);
    newTask.setTaskId(IDGenerator.generate());
    newTask.setWorkflowInstanceId(IDGenerator.generate());
    newTask.setWorkflowTask(workflowTask);
    newTask.setTaskType("test_task");
    newTask.setWorkflowType("test_workflow");
    newTask.setStatus(Task.Status.SCHEDULED);
    // no tasks are IN_PROGRESS
    executionDAO.updateTaskDefLimit(task, false);
    assertFalse(executionDAO.exceedsInProgressLimit(task));
    // set a task to IN_PROGRESS
    task.setStatus(Status.IN_PROGRESS);
    executionDAO.updateTaskDefLimit(task, false);
    // when same task is checked
    assertFalse(executionDAO.exceedsInProgressLimit(task));
    // check if new task can be added
    assertTrue(executionDAO.exceedsInProgressLimit(newTask));
    // set IN_PROGRESS task to COMPLETED
    task.setStatus(Status.COMPLETED);
    executionDAO.updateTaskDefLimit(task, false);
    // check new task again
    assertFalse(executionDAO.exceedsInProgressLimit(newTask));
    // set new task to IN_PROGRESS
    newTask.setStatus(Status.IN_PROGRESS);
    executionDAO.updateTaskDefLimit(newTask, false);
    // check new task again
    assertFalse(executionDAO.exceedsInProgressLimit(newTask));
    // force remove from task def limit
    executionDAO.updateTaskDefLimit(newTask, true);
    assertFalse(executionDAO.exceedsInProgressLimit(task));
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) TaskDef(com.netflix.conductor.common.metadata.tasks.TaskDef) WorkflowTask(com.netflix.conductor.common.metadata.workflow.WorkflowTask) Test(org.junit.Test)

Example 58 with Task

use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.

the class CassandraExecutionDAO method getWorkflow.

@Override
public Workflow getWorkflow(String workflowId, boolean includeTasks) {
    Workflow workflow = null;
    try {
        ResultSet resultSet;
        if (includeTasks) {
            resultSet = session.execute(selectWorkflowWithTasksStatement.bind(UUID.fromString(workflowId), DEFAULT_SHARD_ID));
            List<Task> tasks = new ArrayList<>();
            List<Row> rows = resultSet.all();
            if (rows.size() == 0) {
                LOGGER.info("Workflow {} not found in datastore", workflowId);
                return null;
            }
            for (Row row : rows) {
                String entityKey = row.getString(ENTITY_KEY);
                if (ENTITY_TYPE_WORKFLOW.equals(entityKey)) {
                    workflow = readValue(row.getString(PAYLOAD_KEY), Workflow.class);
                } else if (ENTITY_TYPE_TASK.equals(entityKey)) {
                    Task task = readValue(row.getString(PAYLOAD_KEY), Task.class);
                    tasks.add(task);
                } else {
                    throw new ApplicationException(ApplicationException.Code.INTERNAL_ERROR, String.format("Invalid row with entityKey: %s found in datastore for workflow: %s", entityKey, workflowId));
                }
            }
            if (workflow != null) {
                recordCassandraDaoRequests("getWorkflow", "n/a", workflow.getWorkflowName());
                tasks.sort(Comparator.comparingInt(Task::getSeq));
                workflow.setTasks(tasks);
            }
        } else {
            resultSet = session.execute(selectWorkflowStatement.bind(UUID.fromString(workflowId)));
            workflow = Optional.ofNullable(resultSet.one()).map(row -> {
                Workflow wf = readValue(row.getString(PAYLOAD_KEY), Workflow.class);
                recordCassandraDaoRequests("getWorkflow", "n/a", wf.getWorkflowName());
                return wf;
            }).orElse(null);
        }
        return workflow;
    } catch (ApplicationException e) {
        throw e;
    } catch (IllegalArgumentException e) {
        Monitors.error(CLASS_NAME, "getWorkflow");
        String errorMsg = String.format("Invalid workflow id: %s", workflowId);
        LOGGER.error(errorMsg, e);
        throw new ApplicationException(Code.INVALID_INPUT, errorMsg, e);
    } catch (Exception e) {
        Monitors.error(CLASS_NAME, "getWorkflow");
        String errorMsg = String.format("Failed to get workflow: %s", workflowId);
        LOGGER.error(errorMsg, e);
        throw new ApplicationException(Code.BACKEND_ERROR, errorMsg);
    }
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) ResultSet(com.datastax.driver.core.ResultSet) ArrayList(java.util.ArrayList) Workflow(com.netflix.conductor.common.run.Workflow) Row(com.datastax.driver.core.Row) ApplicationException(com.netflix.conductor.core.execution.ApplicationException)

Example 59 with Task

use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.

the class CassandraExecutionDAO method updateWorkflow.

@Override
public String updateWorkflow(Workflow workflow) {
    try {
        List<Task> tasks = workflow.getTasks();
        workflow.setTasks(new LinkedList<>());
        String payload = toJson(workflow);
        recordCassandraDaoRequests("updateWorkflow", "n/a", workflow.getWorkflowName());
        recordCassandraDaoPayloadSize("updateWorkflow", payload.length(), "n/a", workflow.getWorkflowName());
        session.execute(updateWorkflowStatement.bind(payload, UUID.fromString(workflow.getWorkflowId())));
        workflow.setTasks(tasks);
        return workflow.getWorkflowId();
    } catch (Exception e) {
        Monitors.error(CLASS_NAME, "updateWorkflow");
        String errorMsg = String.format("Failed to update workflow: %s", workflow.getWorkflowId());
        LOGGER.error(errorMsg, e);
        throw new ApplicationException(Code.BACKEND_ERROR, errorMsg);
    }
}
Also used : Task(com.netflix.conductor.common.metadata.tasks.Task) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) ApplicationException(com.netflix.conductor.core.execution.ApplicationException)

Example 60 with Task

use of com.netflix.conductor.common.metadata.tasks.Task in project conductor by Netflix.

the class CassandraExecutionDAO method getTask.

@Override
public Task getTask(String taskId) {
    try {
        String workflowId = lookupWorkflowIdFromTaskId(taskId);
        if (workflowId == null) {
            return null;
        }
        // TODO: implement for query against multiple shards
        ResultSet resultSet = session.execute(selectTaskStatement.bind(UUID.fromString(workflowId), DEFAULT_SHARD_ID, taskId));
        return Optional.ofNullable(resultSet.one()).map(row -> {
            Task task = readValue(row.getString(PAYLOAD_KEY), Task.class);
            recordCassandraDaoRequests("getTask", task.getTaskType(), task.getWorkflowType());
            recordCassandraDaoPayloadSize("getTask", toJson(task).length(), task.getTaskType(), task.getWorkflowType());
            return task;
        }).orElse(null);
    } catch (ApplicationException ae) {
        throw ae;
    } catch (Exception e) {
        Monitors.error(CLASS_NAME, "getTask");
        String errorMsg = String.format("Error getting task by id: %s", taskId);
        LOGGER.error(errorMsg, e);
        throw new ApplicationException(Code.BACKEND_ERROR, errorMsg);
    }
}
Also used : DEFAULT_TOTAL_PARTITIONS(com.netflix.conductor.util.Constants.DEFAULT_TOTAL_PARTITIONS) Row(com.datastax.driver.core.Row) WORKFLOW_ID_KEY(com.netflix.conductor.util.Constants.WORKFLOW_ID_KEY) LoggerFactory(org.slf4j.LoggerFactory) ExecutionDAO(com.netflix.conductor.dao.ExecutionDAO) Task(com.netflix.conductor.common.metadata.tasks.Task) Singleton(javax.inject.Singleton) ArrayList(java.util.ArrayList) PreparedStatement(com.datastax.driver.core.PreparedStatement) Inject(javax.inject.Inject) ENTITY_KEY(com.netflix.conductor.util.Constants.ENTITY_KEY) CassandraConfiguration(com.netflix.conductor.cassandra.CassandraConfiguration) DEFAULT_SHARD_ID(com.netflix.conductor.util.Constants.DEFAULT_SHARD_ID) ResultSet(com.datastax.driver.core.ResultSet) TOTAL_PARTITIONS_KEY(com.netflix.conductor.util.Constants.TOTAL_PARTITIONS_KEY) Workflow(com.netflix.conductor.common.run.Workflow) IN_PROGRESS(com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS) Session(com.datastax.driver.core.Session) PollDataDAO(com.netflix.conductor.dao.PollDataDAO) PollData(com.netflix.conductor.common.metadata.tasks.PollData) EventExecution(com.netflix.conductor.common.metadata.events.EventExecution) LinkedList(java.util.LinkedList) BatchStatement(com.datastax.driver.core.BatchStatement) Code(com.netflix.conductor.core.execution.ApplicationException.Code) TaskDef(com.netflix.conductor.common.metadata.tasks.TaskDef) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) Logger(org.slf4j.Logger) Trace(com.netflix.conductor.annotations.Trace) RetryUtil(com.netflix.conductor.common.utils.RetryUtil) TASK_ID_KEY(com.netflix.conductor.util.Constants.TASK_ID_KEY) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) PAYLOAD_KEY(com.netflix.conductor.util.Constants.PAYLOAD_KEY) Monitors(com.netflix.conductor.metrics.Monitors) TOTAL_TASKS_KEY(com.netflix.conductor.util.Constants.TOTAL_TASKS_KEY) List(java.util.List) ENTITY_TYPE_WORKFLOW(com.netflix.conductor.util.Constants.ENTITY_TYPE_WORKFLOW) Statements(com.netflix.conductor.util.Statements) Optional(java.util.Optional) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) ENTITY_TYPE_TASK(com.netflix.conductor.util.Constants.ENTITY_TYPE_TASK) Comparator(java.util.Comparator) Task(com.netflix.conductor.common.metadata.tasks.Task) ApplicationException(com.netflix.conductor.core.execution.ApplicationException) ResultSet(com.datastax.driver.core.ResultSet) ApplicationException(com.netflix.conductor.core.execution.ApplicationException)

Aggregations

Task (com.netflix.conductor.common.metadata.tasks.Task)357 Workflow (com.netflix.conductor.common.run.Workflow)249 Test (org.junit.Test)248 WorkflowTask (com.netflix.conductor.common.metadata.workflow.WorkflowTask)227 HashMap (java.util.HashMap)147 WorkflowDef (com.netflix.conductor.common.metadata.workflow.WorkflowDef)121 SubWorkflow (com.netflix.conductor.core.execution.tasks.SubWorkflow)110 TaskDef (com.netflix.conductor.common.metadata.tasks.TaskDef)95 UserTask (com.netflix.conductor.tests.utils.UserTask)73 Map (java.util.Map)53 LinkedList (java.util.LinkedList)51 WorkflowSystemTask (com.netflix.conductor.core.execution.tasks.WorkflowSystemTask)45 List (java.util.List)45 ApplicationException (com.netflix.conductor.core.execution.ApplicationException)41 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)39 TaskResult (com.netflix.conductor.common.metadata.tasks.TaskResult)38 Status (com.netflix.conductor.common.metadata.tasks.Task.Status)32 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)29 Collectors (java.util.stream.Collectors)29 TaskType (com.netflix.conductor.common.metadata.workflow.TaskType)28