use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class TestWorkflowExecutor method testScheduleNextIteration.
@Test
public void testScheduleNextIteration() {
Workflow workflow = generateSampleWorkflow();
workflow.setTaskToDomain(new HashMap<String, String>() {
{
put("TEST", "domain1");
}
});
Task loopTask = mock(Task.class);
WorkflowTask loopWfTask = mock(WorkflowTask.class);
when(loopTask.getWorkflowTask()).thenReturn(loopWfTask);
List<WorkflowTask> loopOver = new ArrayList<WorkflowTask>() {
{
WorkflowTask e = new WorkflowTask();
e.setType(TaskType.TASK_TYPE_SIMPLE);
e.setName("TEST");
e.setTaskDefinition(new TaskDef());
add(e);
}
};
when(loopWfTask.getLoopOver()).thenReturn(loopOver);
workflowExecutor.scheduleNextIteration(loopTask, workflow);
verify(executionDAOFacade).getTaskPollDataByDomain("TEST", "domain1");
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class JsonJQTransformTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() throws Exception {
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("json_jq_transform_task");
taskToSchedule.setType(TaskType.JSON_JQ_TRANSFORM.name());
taskToSchedule.setTaskDefinition(new TaskDef("json_jq_transform_task"));
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("in1", new String[] { "a", "b" });
taskInput.put("in2", new String[] { "c", "d" });
taskInput.put("queryExpression", "{ out: (.in1 + .in2) }");
taskToSchedule.setInputParameters(taskInput);
String taskId = IDGenerator.generate();
WorkflowDef wd = new WorkflowDef();
Workflow w = new Workflow();
w.setWorkflowDefinition(wd);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(wd).withWorkflowInstance(w).withTaskDefinition(new TaskDef()).withTaskToSchedule(taskToSchedule).withTaskInput(taskInput).withRetryCount(0).withTaskId(taskId).build();
List<Task> mappedTasks = new JsonJQTransformTaskMapper(parametersUtils, metadataDAO).getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType());
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class KafkaPublishTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() {
// Given
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("kafka_task");
taskToSchedule.setType(TaskType.KAFKA_PUBLISH.name());
taskToSchedule.setTaskDefinition(new TaskDef("kafka_task"));
String taskId = IDGenerator.generate();
String retriedTaskId = IDGenerator.generate();
Workflow workflow = new Workflow();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(workflowDef).withWorkflowInstance(workflow).withTaskDefinition(new TaskDef()).withTaskToSchedule(taskToSchedule).withTaskInput(new HashMap<>()).withRetryCount(0).withRetryTaskId(retriedTaskId).withTaskId(taskId).build();
// when
List<Task> mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType());
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class KafkaPublishTaskMapperTest method getMappedTasks_WithoutTaskDef.
@Test
public void getMappedTasks_WithoutTaskDef() {
// Given
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("kafka_task");
taskToSchedule.setType(TaskType.KAFKA_PUBLISH.name());
String taskId = IDGenerator.generate();
String retriedTaskId = IDGenerator.generate();
Workflow workflow = new Workflow();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskDef taskdefinition = new TaskDef();
String testExecutionNameSpace = "testExecutionNameSpace";
taskdefinition.setExecutionNameSpace(testExecutionNameSpace);
String testIsolationGroupId = "testIsolationGroupId";
taskdefinition.setIsolationGroupId(testIsolationGroupId);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(workflowDef).withWorkflowInstance(workflow).withTaskDefinition(taskdefinition).withTaskToSchedule(taskToSchedule).withTaskInput(new HashMap<>()).withRetryCount(0).withRetryTaskId(retriedTaskId).withTaskId(taskId).build();
// when
List<Task> mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType());
assertEquals(testExecutionNameSpace, mappedTasks.get(0).getExecutionNameSpace());
assertEquals(testIsolationGroupId, mappedTasks.get(0).getIsolationGroupId());
}
use of com.netflix.conductor.common.metadata.tasks.TaskDef in project conductor by Netflix.
the class SetVariableTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() throws Exception {
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setType(TaskType.TASK_TYPE_SET_VARIABLE);
String taskId = IDGenerator.generate();
WorkflowDef workflowDef = new WorkflowDef();
Workflow workflow = new Workflow();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(workflowDef).withWorkflowInstance(workflow).withTaskDefinition(new TaskDef()).withTaskToSchedule(taskToSchedule).withRetryCount(0).withTaskId(taskId).build();
List<Task> mappedTasks = new SetVariableTaskMapper().getMappedTasks(taskMapperContext);
Assert.assertNotNull(mappedTasks);
Assert.assertEquals(1, mappedTasks.size());
Assert.assertEquals(TaskType.TASK_TYPE_SET_VARIABLE, mappedTasks.get(0).getTaskType());
}
Aggregations