use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class IsolatedTaskQueueProducer method addTaskQueues.
@VisibleForTesting
void addTaskQueues() {
Set<TaskDef> isolationTaskDefs = getIsolationExecutionNameSpaces();
logger.debug("Retrieved queues {}", isolationTaskDefs);
Set<String> taskTypes = SystemTaskWorkerCoordinator.taskNameWorkflowTaskMapping.keySet();
for (TaskDef isolatedTaskDef : isolationTaskDefs) {
for (String taskType : taskTypes) {
String taskQueue = QueueUtils.getQueueName(taskType, null, isolatedTaskDef.getIsolationGroupId(), isolatedTaskDef.getExecutionNameSpace());
logger.debug("Adding taskQueue:'{}' to system task worker coordinator", taskQueue);
SystemTaskWorkerCoordinator.queue.add(taskQueue);
}
}
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class KafkaPublishTaskMapper method getMappedTasks.
/**
* This method maps a {@link WorkflowTask} of type {@link TaskType#KAFKA_PUBLISH}
* to a {@link Task} in a {@link Task.Status#SCHEDULED} state
*
* @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
* @return a List with just one Kafka task
* @throws TerminateWorkflowException In case if the task definition does not exist
*/
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {
logger.debug("TaskMapperContext {} in KafkaPublishTaskMapper", taskMapperContext);
WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
String taskId = taskMapperContext.getTaskId();
int retryCount = taskMapperContext.getRetryCount();
TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()).orElseGet(() -> Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())).orElse(null));
Map<String, Object> input = parametersUtils.getTaskInputV2(taskToSchedule.getInputParameters(), workflowInstance, taskId, taskDefinition);
Task kafkaPublishTask = new Task();
kafkaPublishTask.setTaskType(taskToSchedule.getType());
kafkaPublishTask.setTaskDefName(taskToSchedule.getName());
kafkaPublishTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
kafkaPublishTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
kafkaPublishTask.setWorkflowType(workflowInstance.getWorkflowName());
kafkaPublishTask.setCorrelationId(workflowInstance.getCorrelationId());
kafkaPublishTask.setScheduledTime(System.currentTimeMillis());
kafkaPublishTask.setTaskId(taskId);
kafkaPublishTask.setInputData(input);
kafkaPublishTask.setStatus(Task.Status.SCHEDULED);
kafkaPublishTask.setRetryCount(retryCount);
kafkaPublishTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
kafkaPublishTask.setWorkflowTask(taskToSchedule);
kafkaPublishTask.setWorkflowPriority(workflowInstance.getPriority());
if (Objects.nonNull(taskDefinition)) {
kafkaPublishTask.setExecutionNameSpace(taskDefinition.getExecutionNameSpace());
kafkaPublishTask.setIsolationGroupId(taskDefinition.getIsolationGroupId());
kafkaPublishTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency());
kafkaPublishTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds());
}
return Collections.singletonList(kafkaPublishTask);
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class LambdaTaskMapper method getMappedTasks.
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) {
LOGGER.debug("TaskMapperContext {} in LambdaTaskMapper", taskMapperContext);
WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
String taskId = taskMapperContext.getTaskId();
TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()).orElseGet(() -> Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())).orElse(null));
Map<String, Object> taskInput = parametersUtils.getTaskInputV2(taskMapperContext.getTaskToSchedule().getInputParameters(), workflowInstance, taskId, taskDefinition);
Task lambdaTask = new Task();
lambdaTask.setTaskType(Lambda.TASK_NAME);
lambdaTask.setTaskDefName(taskMapperContext.getTaskToSchedule().getName());
lambdaTask.setReferenceTaskName(taskMapperContext.getTaskToSchedule().getTaskReferenceName());
lambdaTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
lambdaTask.setWorkflowType(workflowInstance.getWorkflowName());
lambdaTask.setCorrelationId(workflowInstance.getCorrelationId());
lambdaTask.setStartTime(System.currentTimeMillis());
lambdaTask.setScheduledTime(System.currentTimeMillis());
lambdaTask.setInputData(taskInput);
lambdaTask.setTaskId(taskId);
lambdaTask.setStatus(Task.Status.IN_PROGRESS);
lambdaTask.setWorkflowTask(taskToSchedule);
lambdaTask.setWorkflowPriority(workflowInstance.getPriority());
return Collections.singletonList(lambdaTask);
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class SimpleTaskMapper method getMappedTasks.
/**
* This method maps a {@link WorkflowTask} of type {@link TaskType#SIMPLE}
* to a {@link Task}
*
* @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
* @throws TerminateWorkflowException In case if the task definition does not exist
* @return: a List with just one simple task
*/
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {
logger.debug("TaskMapperContext {} in SimpleTaskMapper", taskMapperContext);
WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
int retryCount = taskMapperContext.getRetryCount();
String retriedTaskId = taskMapperContext.getRetryTaskId();
TaskDef taskDefinition = Optional.ofNullable(taskToSchedule.getTaskDefinition()).orElseThrow(() -> {
String reason = String.format("Invalid task. Task %s does not have a definition", taskToSchedule.getName());
return new TerminateWorkflowException(reason);
});
Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, taskDefinition, taskMapperContext.getTaskId());
Task simpleTask = new Task();
simpleTask.setStartDelayInSeconds(taskToSchedule.getStartDelay());
simpleTask.setTaskId(taskMapperContext.getTaskId());
simpleTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
simpleTask.setInputData(input);
simpleTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
simpleTask.setWorkflowType(workflowInstance.getWorkflowName());
simpleTask.setStatus(Task.Status.SCHEDULED);
simpleTask.setTaskType(taskToSchedule.getName());
simpleTask.setTaskDefName(taskToSchedule.getName());
simpleTask.setCorrelationId(workflowInstance.getCorrelationId());
simpleTask.setScheduledTime(System.currentTimeMillis());
simpleTask.setRetryCount(retryCount);
simpleTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
simpleTask.setResponseTimeoutSeconds(taskDefinition.getResponseTimeoutSeconds());
simpleTask.setWorkflowTask(taskToSchedule);
simpleTask.setRetriedTaskId(retriedTaskId);
simpleTask.setWorkflowPriority(workflowInstance.getPriority());
simpleTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency());
simpleTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds());
return Collections.singletonList(simpleTask);
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class ExecutionDAOTest method testTaskExceedsLimit.
@Test
public void testTaskExceedsLimit() {
TaskDef taskDefinition = new TaskDef();
taskDefinition.setName("task1");
taskDefinition.setConcurrentExecLimit(1);
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setName("task1");
workflowTask.setTaskDefinition(taskDefinition);
workflowTask.setTaskDefinition(taskDefinition);
List<Task> tasks = new LinkedList<>();
for (int i = 0; i < 15; i++) {
Task task = new Task();
task.setScheduledTime(1L);
task.setSeq(i + 1);
task.setTaskId("t_" + i);
task.setWorkflowInstanceId("workflow_" + i);
task.setReferenceTaskName("task1");
task.setTaskDefName("task1");
tasks.add(task);
task.setStatus(Task.Status.SCHEDULED);
task.setWorkflowTask(workflowTask);
}
getExecutionDAO().createTasks(tasks);
assertFalse(getExecutionDAO().exceedsInProgressLimit(tasks.get(0)));
tasks.get(0).setStatus(Task.Status.IN_PROGRESS);
getExecutionDAO().updateTask(tasks.get(0));
for (Task task : tasks) {
assertTrue(getExecutionDAO().exceedsInProgressLimit(task));
}
}
Aggregations