use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class TestSubWorkflow method testStartSubWorkflowWithWorkflowInput.
@Test
public void testStartSubWorkflowWithWorkflowInput() {
WorkflowDef workflowDef = new WorkflowDef();
Workflow workflowInstance = new Workflow();
workflowInstance.setWorkflowDefinition(workflowDef);
Task task = new Task();
task.setOutputData(new HashMap<>());
Map<String, Object> inputData = new HashMap<>();
inputData.put("subWorkflowName", "UnitWorkFlow");
inputData.put("subWorkflowVersion", 3);
Map<String, Object> workflowInput = new HashMap<>();
workflowInput.put("test", "value");
inputData.put("workflowInput", workflowInput);
task.setInputData(inputData);
when(workflowExecutor.startWorkflow(eq("UnitWorkFlow"), eq(3), eq(workflowInput), eq(null), any(), any(), any(), eq(null), any())).thenReturn("workflow_1");
subWorkflow.start(workflowInstance, task, workflowExecutor);
assertEquals("workflow_1", task.getSubWorkflowId());
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class JsonJQTransformTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() throws Exception {
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("json_jq_transform_task");
taskToSchedule.setType(TaskType.JSON_JQ_TRANSFORM.name());
taskToSchedule.setTaskDefinition(new TaskDef("json_jq_transform_task"));
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("in1", new String[] { "a", "b" });
taskInput.put("in2", new String[] { "c", "d" });
taskInput.put("queryExpression", "{ out: (.in1 + .in2) }");
taskToSchedule.setInputParameters(taskInput);
String taskId = IDGenerator.generate();
WorkflowDef wd = new WorkflowDef();
Workflow w = new Workflow();
w.setWorkflowDefinition(wd);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(wd).withWorkflowInstance(w).withTaskDefinition(new TaskDef()).withTaskToSchedule(taskToSchedule).withTaskInput(taskInput).withRetryCount(0).withTaskId(taskId).build();
List<Task> mappedTasks = new JsonJQTransformTaskMapper(parametersUtils, metadataDAO).getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType());
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class JsonJQTransformTaskMapperTest method getMappedTasks_WithoutTaskDef.
@Test
public void getMappedTasks_WithoutTaskDef() throws Exception {
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("json_jq_transform_task");
taskToSchedule.setType(TaskType.JSON_JQ_TRANSFORM.name());
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("in1", new String[] { "a", "b" });
taskInput.put("in2", new String[] { "c", "d" });
taskInput.put("queryExpression", "{ out: (.in1 + .in2) }");
taskToSchedule.setInputParameters(taskInput);
String taskId = IDGenerator.generate();
WorkflowDef wd = new WorkflowDef();
Workflow w = new Workflow();
w.setWorkflowDefinition(wd);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(wd).withWorkflowInstance(w).withTaskDefinition(null).withTaskToSchedule(taskToSchedule).withTaskInput(taskInput).withRetryCount(0).withTaskId(taskId).build();
List<Task> mappedTasks = new JsonJQTransformTaskMapper(parametersUtils, metadataDAO).getMappedTasks(taskMapperContext);
assertEquals(1, mappedTasks.size());
assertNotNull(mappedTasks);
assertEquals(TaskType.JSON_JQ_TRANSFORM.name(), mappedTasks.get(0).getTaskType());
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class KafkaPublishTaskMapperTest method getMappedTasks.
@Test
public void getMappedTasks() {
// Given
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("kafka_task");
taskToSchedule.setType(TaskType.KAFKA_PUBLISH.name());
taskToSchedule.setTaskDefinition(new TaskDef("kafka_task"));
String taskId = IDGenerator.generate();
String retriedTaskId = IDGenerator.generate();
Workflow workflow = new Workflow();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(workflowDef).withWorkflowInstance(workflow).withTaskDefinition(new TaskDef()).withTaskToSchedule(taskToSchedule).withTaskInput(new HashMap<>()).withRetryCount(0).withRetryTaskId(retriedTaskId).withTaskId(taskId).build();
// when
List<Task> mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType());
}
use of com.netflix.conductor.common.metadata.workflow.WorkflowDef in project conductor by Netflix.
the class KafkaPublishTaskMapperTest method getMappedTasks_WithoutTaskDef.
@Test
public void getMappedTasks_WithoutTaskDef() {
// Given
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("kafka_task");
taskToSchedule.setType(TaskType.KAFKA_PUBLISH.name());
String taskId = IDGenerator.generate();
String retriedTaskId = IDGenerator.generate();
Workflow workflow = new Workflow();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);
TaskDef taskdefinition = new TaskDef();
String testExecutionNameSpace = "testExecutionNameSpace";
taskdefinition.setExecutionNameSpace(testExecutionNameSpace);
String testIsolationGroupId = "testIsolationGroupId";
taskdefinition.setIsolationGroupId(testIsolationGroupId);
TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder().withWorkflowDefinition(workflowDef).withWorkflowInstance(workflow).withTaskDefinition(taskdefinition).withTaskToSchedule(taskToSchedule).withTaskInput(new HashMap<>()).withRetryCount(0).withRetryTaskId(retriedTaskId).withTaskId(taskId).build();
// when
List<Task> mappedTasks = kafkaTaskMapper.getMappedTasks(taskMapperContext);
// Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.KAFKA_PUBLISH.name(), mappedTasks.get(0).getTaskType());
assertEquals(testExecutionNameSpace, mappedTasks.get(0).getExecutionNameSpace());
assertEquals(testIsolationGroupId, mappedTasks.get(0).getIsolationGroupId());
}
Aggregations