use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class ForkJoinDynamicTaskMapper method getDynamicForkTasksAndInput.
/**
* This method is used to get the List of dynamic workflow tasks and their input based on the {@link WorkflowTask#getDynamicForkTasksParam()}
*
* @param taskToSchedule: The Task of type FORK_JOIN_DYNAMIC that needs to scheduled, which has the input parameters
* @param workflowInstance: The instance of the {@link Workflow} which represents the workflow being executed.
* @param dynamicForkTaskParam: The key representing the dynamic fork join json payload which is available in {@link WorkflowTask#getInputParameters()}
* @return a {@link Pair} representing the list of dynamic fork tasks in {@link Pair#getLeft()} and the input for the dynamic fork tasks in {@link Pair#getRight()}
* @throws TerminateWorkflowException : In case of input parameters of the dynamic fork tasks not represented as {@link Map}
*/
@SuppressWarnings("unchecked")
@VisibleForTesting
Pair<List<WorkflowTask>, Map<String, Map<String, Object>>> getDynamicForkTasksAndInput(WorkflowTask taskToSchedule, Workflow workflowInstance, String dynamicForkTaskParam) throws TerminateWorkflowException {
Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, null, null);
Object dynamicForkTasksJson = input.get(dynamicForkTaskParam);
List<WorkflowTask> dynamicForkWorkflowTasks = objectMapper.convertValue(dynamicForkTasksJson, ListOfWorkflowTasks);
if (dynamicForkWorkflowTasks == null) {
dynamicForkWorkflowTasks = new ArrayList<WorkflowTask>();
}
for (WorkflowTask workflowTask : dynamicForkWorkflowTasks) {
if ((workflowTask.getTaskDefinition() == null) && StringUtils.isNotBlank(workflowTask.getName())) {
workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName()));
}
}
Object dynamicForkTasksInput = input.get(taskToSchedule.getDynamicForkTasksInputParamName());
if (!(dynamicForkTasksInput instanceof Map)) {
throw new TerminateWorkflowException("Input to the dynamically forked tasks is not a map -> expecting a map of K,V but found " + dynamicForkTasksInput);
}
return new ImmutablePair<>(dynamicForkWorkflowTasks, (Map<String, Map<String, Object>>) dynamicForkTasksInput);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class ForkJoinDynamicTaskMapper method createDynamicForkTask.
/**
* This method creates a FORK task and adds the list of dynamic fork tasks keyed by "forkedTaskDefs" and
* their names keyed by "forkedTasks" into {@link Task#getInputData()}
*
* @param taskToSchedule A {@link WorkflowTask} representing {@link TaskType#FORK_JOIN_DYNAMIC}
* @param workflowInstance: A instance of the {@link Workflow} which represents the workflow being executed.
* @param taskId: The string representation of {@link java.util.UUID} which will be set as the taskId.
* @param dynForkTasks: The list of dynamic forked tasks, the reference names of these tasks will be added to the forkDynamicTask
* @return A new instance of {@link Task} representing a {@link SystemTaskType#FORK}
*/
@VisibleForTesting
Task createDynamicForkTask(WorkflowTask taskToSchedule, Workflow workflowInstance, String taskId, List<WorkflowTask> dynForkTasks) {
Task forkDynamicTask = new Task();
forkDynamicTask.setTaskType(SystemTaskType.FORK.name());
forkDynamicTask.setTaskDefName(SystemTaskType.FORK.name());
forkDynamicTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
forkDynamicTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
forkDynamicTask.setCorrelationId(workflowInstance.getCorrelationId());
forkDynamicTask.setScheduledTime(System.currentTimeMillis());
forkDynamicTask.setEndTime(System.currentTimeMillis());
List<String> forkedTaskNames = dynForkTasks.stream().map(WorkflowTask::getTaskReferenceName).collect(Collectors.toList());
forkDynamicTask.getInputData().put("forkedTasks", forkedTaskNames);
// TODO: Remove this parameter in the later releases
forkDynamicTask.getInputData().put("forkedTaskDefs", dynForkTasks);
forkDynamicTask.setTaskId(taskId);
forkDynamicTask.setStatus(Task.Status.COMPLETED);
forkDynamicTask.setWorkflowTask(taskToSchedule);
forkDynamicTask.setWorkflowPriority(workflowInstance.getPriority());
return forkDynamicTask;
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class KafkaPublishTaskMapper method getMappedTasks.
/**
* This method maps a {@link WorkflowTask} of type {@link TaskType#KAFKA_PUBLISH}
* to a {@link Task} in a {@link Task.Status#SCHEDULED} state
*
* @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
* @return a List with just one Kafka task
* @throws TerminateWorkflowException In case if the task definition does not exist
*/
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {
logger.debug("TaskMapperContext {} in KafkaPublishTaskMapper", taskMapperContext);
WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
String taskId = taskMapperContext.getTaskId();
int retryCount = taskMapperContext.getRetryCount();
TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()).orElseGet(() -> Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())).orElse(null));
Map<String, Object> input = parametersUtils.getTaskInputV2(taskToSchedule.getInputParameters(), workflowInstance, taskId, taskDefinition);
Task kafkaPublishTask = new Task();
kafkaPublishTask.setTaskType(taskToSchedule.getType());
kafkaPublishTask.setTaskDefName(taskToSchedule.getName());
kafkaPublishTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
kafkaPublishTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
kafkaPublishTask.setWorkflowType(workflowInstance.getWorkflowName());
kafkaPublishTask.setCorrelationId(workflowInstance.getCorrelationId());
kafkaPublishTask.setScheduledTime(System.currentTimeMillis());
kafkaPublishTask.setTaskId(taskId);
kafkaPublishTask.setInputData(input);
kafkaPublishTask.setStatus(Task.Status.SCHEDULED);
kafkaPublishTask.setRetryCount(retryCount);
kafkaPublishTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
kafkaPublishTask.setWorkflowTask(taskToSchedule);
kafkaPublishTask.setWorkflowPriority(workflowInstance.getPriority());
if (Objects.nonNull(taskDefinition)) {
kafkaPublishTask.setExecutionNameSpace(taskDefinition.getExecutionNameSpace());
kafkaPublishTask.setIsolationGroupId(taskDefinition.getIsolationGroupId());
kafkaPublishTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency());
kafkaPublishTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds());
}
return Collections.singletonList(kafkaPublishTask);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class LambdaTaskMapper method getMappedTasks.
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) {
LOGGER.debug("TaskMapperContext {} in LambdaTaskMapper", taskMapperContext);
WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
String taskId = taskMapperContext.getTaskId();
TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition()).orElseGet(() -> Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())).orElse(null));
Map<String, Object> taskInput = parametersUtils.getTaskInputV2(taskMapperContext.getTaskToSchedule().getInputParameters(), workflowInstance, taskId, taskDefinition);
Task lambdaTask = new Task();
lambdaTask.setTaskType(Lambda.TASK_NAME);
lambdaTask.setTaskDefName(taskMapperContext.getTaskToSchedule().getName());
lambdaTask.setReferenceTaskName(taskMapperContext.getTaskToSchedule().getTaskReferenceName());
lambdaTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
lambdaTask.setWorkflowType(workflowInstance.getWorkflowName());
lambdaTask.setCorrelationId(workflowInstance.getCorrelationId());
lambdaTask.setStartTime(System.currentTimeMillis());
lambdaTask.setScheduledTime(System.currentTimeMillis());
lambdaTask.setInputData(taskInput);
lambdaTask.setTaskId(taskId);
lambdaTask.setStatus(Task.Status.IN_PROGRESS);
lambdaTask.setWorkflowTask(taskToSchedule);
lambdaTask.setWorkflowPriority(workflowInstance.getPriority());
return Collections.singletonList(lambdaTask);
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowTask in project conductor by Netflix.
the class SimpleTaskMapper method getMappedTasks.
/**
* This method maps a {@link WorkflowTask} of type {@link TaskType#SIMPLE}
* to a {@link Task}
*
* @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
* @throws TerminateWorkflowException In case if the task definition does not exist
* @return: a List with just one simple task
*/
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {
logger.debug("TaskMapperContext {} in SimpleTaskMapper", taskMapperContext);
WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
int retryCount = taskMapperContext.getRetryCount();
String retriedTaskId = taskMapperContext.getRetryTaskId();
TaskDef taskDefinition = Optional.ofNullable(taskToSchedule.getTaskDefinition()).orElseThrow(() -> {
String reason = String.format("Invalid task. Task %s does not have a definition", taskToSchedule.getName());
return new TerminateWorkflowException(reason);
});
Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, taskDefinition, taskMapperContext.getTaskId());
Task simpleTask = new Task();
simpleTask.setStartDelayInSeconds(taskToSchedule.getStartDelay());
simpleTask.setTaskId(taskMapperContext.getTaskId());
simpleTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
simpleTask.setInputData(input);
simpleTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
simpleTask.setWorkflowType(workflowInstance.getWorkflowName());
simpleTask.setStatus(Task.Status.SCHEDULED);
simpleTask.setTaskType(taskToSchedule.getName());
simpleTask.setTaskDefName(taskToSchedule.getName());
simpleTask.setCorrelationId(workflowInstance.getCorrelationId());
simpleTask.setScheduledTime(System.currentTimeMillis());
simpleTask.setRetryCount(retryCount);
simpleTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
simpleTask.setResponseTimeoutSeconds(taskDefinition.getResponseTimeoutSeconds());
simpleTask.setWorkflowTask(taskToSchedule);
simpleTask.setRetriedTaskId(retriedTaskId);
simpleTask.setWorkflowPriority(workflowInstance.getPriority());
simpleTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency());
simpleTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds());
return Collections.singletonList(simpleTask);
}
Aggregations