use of com.netflix.conductor.core.orchestration.ExecutionDAOFacade in project conductor by Netflix.
the class DoWhileTest method setup.
@Before
public void setup() {
workflow = Mockito.mock(Workflow.class);
deciderService = Mockito.mock(DeciderService.class);
metadataDAO = Mockito.mock(MetadataDAO.class);
queueDAO = Mockito.mock(QueueDAO.class);
parametersUtils = Mockito.mock(ParametersUtils.class);
metadataMapperService = Mockito.mock(MetadataMapperService.class);
workflowStatusListener = Mockito.mock(WorkflowStatusListener.class);
executionDAOFacade = Mockito.mock(ExecutionDAOFacade.class);
executionLockService = Mockito.mock(ExecutionLockService.class);
config = Mockito.mock(Configuration.class);
provider = spy(new WorkflowExecutor(deciderService, metadataDAO, queueDAO, metadataMapperService, workflowStatusListener, executionDAOFacade, config, executionLockService, parametersUtils));
loopWorkflowTask1 = new WorkflowTask();
loopWorkflowTask1.setTaskReferenceName("task1");
loopWorkflowTask1.setName("task1");
loopWorkflowTask2 = new WorkflowTask();
loopWorkflowTask2.setTaskReferenceName("task2");
loopWorkflowTask2.setName("task2");
task1 = new Task();
task1.setWorkflowTask(loopWorkflowTask1);
task1.setReferenceTaskName("task1");
task1.setStatus(Task.Status.COMPLETED);
task1.setTaskType(TaskType.HTTP.name());
task1.setInputData(new HashMap<>());
task1.setIteration(1);
task2 = new Task();
task2.setWorkflowTask(loopWorkflowTask2);
task2.setReferenceTaskName("task2");
task2.setStatus(Task.Status.COMPLETED);
task2.setTaskType(TaskType.HTTP.name());
task2.setInputData(new HashMap<>());
task2.setIteration(1);
loopTask = new Task();
loopTask.setReferenceTaskName("loopTask");
loopTask.setTaskType(TaskType.DO_WHILE.name());
loopTask.setInputData(new HashMap<>());
loopTask.setIteration(1);
loopWorkflowTask = new WorkflowTask();
loopWorkflowTask.setTaskReferenceName("loopTask");
loopWorkflowTask.setType(TaskType.DO_WHILE.name());
loopWorkflowTask.setName("loopTask");
loopWorkflowTask.setLoopCondition("if ($.loopTask['iteration'] < 1) { false; } else { true; }");
loopWorkflowTask.setLoopOver(Arrays.asList(task1.getWorkflowTask(), task2.getWorkflowTask()));
loopTask.setWorkflowTask(loopWorkflowTask);
doWhile = new DoWhile();
loopTaskDef = Mockito.mock(TaskDef.class);
Mockito.doReturn(loopTaskDef).when(provider).getTaskDefinition(loopTask);
Mockito.doReturn(task1).when(workflow).getTaskByRefName(task1.getReferenceTaskName());
Mockito.doReturn(task2).when(workflow).getTaskByRefName(task2.getReferenceTaskName());
Mockito.doReturn(task1).when(workflow).getTaskByRefName("task1__2");
Mockito.doReturn(task2).when(workflow).getTaskByRefName("task2__2");
Mockito.doReturn(new HashMap<>()).when(parametersUtils).getTaskInputV2(isA(Map.class), isA(Workflow.class), isA(String.class), isA(TaskDef.class));
}
use of com.netflix.conductor.core.orchestration.ExecutionDAOFacade in project conductor by Netflix.
the class TestWorkflowExecutor method init.
@Before
public void init() {
TestConfiguration config = new TestConfiguration();
executionDAOFacade = mock(ExecutionDAOFacade.class);
metadataDAO = mock(MetadataDAO.class);
queueDAO = mock(QueueDAO.class);
workflowStatusListener = mock(WorkflowStatusListener.class);
ExternalPayloadStorageUtils externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class);
executionLockService = mock(ExecutionLockService.class);
ObjectMapper objectMapper = new JsonMapperProvider().get();
ParametersUtils parametersUtils = new ParametersUtils();
Map<String, TaskMapper> taskMappers = new HashMap<>();
taskMappers.put("DECISION", new DecisionTaskMapper());
taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO));
taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper());
taskMappers.put("JOIN", new JoinTaskMapper());
taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO));
taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO));
taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils));
taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO));
taskMappers.put("EVENT", new EventTaskMapper(parametersUtils));
taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils));
taskMappers.put("HTTP", new HTTPTaskMapper(parametersUtils, metadataDAO));
taskMappers.put("LAMBDA", new LambdaTaskMapper(parametersUtils, metadataDAO));
new SubWorkflow(new JsonMapperProvider().get());
new Lambda();
DeciderService deciderService = new DeciderService(parametersUtils, metadataDAO, externalPayloadStorageUtils, taskMappers, config);
MetadataMapperService metadataMapperService = new MetadataMapperService(metadataDAO);
workflowExecutor = new WorkflowExecutor(deciderService, metadataDAO, queueDAO, metadataMapperService, workflowStatusListener, executionDAOFacade, config, executionLockService, parametersUtils);
}
use of com.netflix.conductor.core.orchestration.ExecutionDAOFacade in project conductor by Netflix.
the class ArchivingWorkflowStatusListenerTest method before.
@Before
public void before() {
wf = new Workflow();
wf.setWorkflowId(UUID.randomUUID().toString());
executionDAOFacade = Mockito.mock(ExecutionDAOFacade.class);
cut = new ArchivingWorkflowStatusListener(executionDAOFacade);
}
use of com.netflix.conductor.core.orchestration.ExecutionDAOFacade in project conductor by Netflix.
the class TestWorkflowExecutor method testScheduleTask.
@Test
public void testScheduleTask() {
AtomicBoolean httpTaskExecuted = new AtomicBoolean(false);
AtomicBoolean http2TaskExecuted = new AtomicBoolean(false);
new Wait();
new WorkflowSystemTask("HTTP") {
@Override
public boolean isAsync() {
return true;
}
@Override
public void start(Workflow workflow, Task task, WorkflowExecutor executor) {
httpTaskExecuted.set(true);
task.setStatus(Status.COMPLETED);
super.start(workflow, task, executor);
}
};
new WorkflowSystemTask("HTTP2") {
@Override
public void start(Workflow workflow, Task task, WorkflowExecutor executor) {
http2TaskExecuted.set(true);
task.setStatus(Status.COMPLETED);
super.start(workflow, task, executor);
}
};
Workflow workflow = new Workflow();
workflow.setWorkflowId("1");
List<Task> tasks = new LinkedList<>();
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setWorkflowTaskType(TaskType.USER_DEFINED);
taskToSchedule.setType("HTTP");
WorkflowTask taskToSchedule2 = new WorkflowTask();
taskToSchedule2.setWorkflowTaskType(TaskType.USER_DEFINED);
taskToSchedule2.setType("HTTP2");
WorkflowTask wait = new WorkflowTask();
wait.setWorkflowTaskType(TaskType.WAIT);
wait.setType("WAIT");
wait.setTaskReferenceName("wait");
Task task1 = new Task();
task1.setTaskType(taskToSchedule.getType());
task1.setTaskDefName(taskToSchedule.getName());
task1.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
task1.setWorkflowInstanceId(workflow.getWorkflowId());
task1.setCorrelationId(workflow.getCorrelationId());
task1.setScheduledTime(System.currentTimeMillis());
task1.setTaskId(IDGenerator.generate());
task1.setInputData(new HashMap<>());
task1.setStatus(Status.SCHEDULED);
task1.setRetryCount(0);
task1.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
task1.setWorkflowTask(taskToSchedule);
Task task2 = new Task();
task2.setTaskType(Wait.NAME);
task2.setTaskDefName(taskToSchedule.getName());
task2.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
task2.setWorkflowInstanceId(workflow.getWorkflowId());
task2.setCorrelationId(workflow.getCorrelationId());
task2.setScheduledTime(System.currentTimeMillis());
task2.setInputData(new HashMap<>());
task2.setTaskId(IDGenerator.generate());
task2.setStatus(Status.IN_PROGRESS);
task2.setWorkflowTask(taskToSchedule);
Task task3 = new Task();
task3.setTaskType(taskToSchedule2.getType());
task3.setTaskDefName(taskToSchedule.getName());
task3.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
task3.setWorkflowInstanceId(workflow.getWorkflowId());
task3.setCorrelationId(workflow.getCorrelationId());
task3.setScheduledTime(System.currentTimeMillis());
task3.setTaskId(IDGenerator.generate());
task3.setInputData(new HashMap<>());
task3.setStatus(Status.SCHEDULED);
task3.setRetryCount(0);
task3.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
task3.setWorkflowTask(taskToSchedule);
tasks.add(task1);
tasks.add(task2);
tasks.add(task3);
when(executionDAOFacade.createTasks(tasks)).thenReturn(tasks);
AtomicInteger startedTaskCount = new AtomicInteger(0);
doAnswer(invocation -> {
startedTaskCount.incrementAndGet();
return null;
}).when(executionDAOFacade).updateTask(any());
AtomicInteger queuedTaskCount = new AtomicInteger(0);
final Answer answer = invocation -> {
String queueName = invocation.getArgument(0, String.class);
System.out.println(queueName);
queuedTaskCount.incrementAndGet();
return null;
};
doAnswer(answer).when(queueDAO).push(any(), any(), anyLong());
doAnswer(answer).when(queueDAO).push(any(), any(), anyInt(), anyLong());
boolean stateChanged = workflowExecutor.scheduleTask(workflow, tasks);
assertEquals(2, startedTaskCount.get());
assertEquals(1, queuedTaskCount.get());
assertTrue(stateChanged);
assertFalse(httpTaskExecuted.get());
assertTrue(http2TaskExecuted.get());
}
Aggregations