use of org.springframework.cloud.dataflow.rest.client.dsl.task.Task in project spring-cloud-dataflow by spring-cloud.
the class DataFlowIT method testDeletePreviouslyUsedVersionShouldFailIfRelaunched.
@Test
public void testDeletePreviouslyUsedVersionShouldFailIfRelaunched() {
// Scenario: Deleting a previously used version should fail if relaunched.
// Given A task with 2 versions
// And I create a task definition
// And I launch task definition using version 2 of app
// And I unregister version 2 of app
// When I launch task definition using version 2 of app
// Then Task should fail
minimumVersionCheck("testDeletePreviouslyUsedVersionShouldFailIfRelaunched");
registerNewTimestampVersion();
Task task = createTaskDefinition();
long launchId = task.launch(Collections.singletonMap("version.timestamp", TEST_VERSION_NUMBER), null);
validateSuccessfulTaskLaunch(task, launchId);
resetTimestampVersion();
assertThatThrownBy(() -> task.launch(Collections.singletonMap("version.timestamp", TEST_VERSION_NUMBER), null)).isInstanceOf(DataFlowClientException.class).hasMessageContaining("Unknown task app: timestamp");
}
use of org.springframework.cloud.dataflow.rest.client.dsl.task.Task in project spring-cloud-dataflow by spring-cloud.
the class DataFlowIT method taskDefinitionDeleteWithCleanup.
@Test
public void taskDefinitionDeleteWithCleanup() {
Task task = Task.builder(dataFlowOperations).name(randomTaskName()).definition("scenario").description("Test scenario batch app that will fail on first pass").build();
String stepName = randomStepName();
List<String> args = createNewJobandStepScenario(task.getTaskName(), stepName);
// task first launch
long launchId = task.launch(args);
// Verify task
validateSuccessfulTaskLaunch(task, launchId);
// verify task definition is gone and executions are removed
this.dataFlowOperations.taskOperations().destroy(task.getTaskName(), true);
verifyTaskDefAndTaskExecutionCount(task.getTaskName(), 0, 0);
}
use of org.springframework.cloud.dataflow.rest.client.dsl.task.Task in project spring-cloud-dataflow by spring-cloud.
the class DataFlowIT method testRollingBackDefaultToPreviousVersionAndRunningShouldBeSuccessful.
@Test
public void testRollingBackDefaultToPreviousVersionAndRunningShouldBeSuccessful() {
// Scenario: Rolling back default to previous version and running should be successful
// Given A task with 2 versions
// And I create a task definition
// And I launch task definition using default app version
// And I set the default to version 2 of the app
// And I launch task definition using default app version
// And I set the default to version 1 of the app
// When I create a task definition
// And I launch task definition using default app version
// Then Task should succeed
// And The version for the task execution should be version 1
minimumVersionCheck("testRollingBackDefaultToPreviousVersionAndRunningShouldBeSuccessful");
registerNewTimestampVersion();
Task task = createTaskDefinition();
long launchId = task.launch();
validateSuccessfulTaskLaunch(task, launchId);
validateSpecifiedVersion(task, CURRENT_VERSION_NUMBER);
setDefaultVersionForTimestamp(TEST_VERSION_NUMBER);
launchId = task.launch();
validateSuccessfulTaskLaunch(task, launchId, 2);
validateSpecifiedVersion(task, TEST_VERSION_NUMBER);
task = createTaskDefinition();
setDefaultVersionForTimestamp(CURRENT_VERSION_NUMBER);
launchId = task.launch();
validateSuccessfulTaskLaunch(task, launchId);
validateSpecifiedVersion(task, CURRENT_VERSION_NUMBER);
}
use of org.springframework.cloud.dataflow.rest.client.dsl.task.Task in project spring-cloud-dataflow by spring-cloud.
the class DataFlowIT method sequentialAndFailedSplitTest.
@Test
public void sequentialAndFailedSplitTest() {
logger.info("composed-task-sequentialAndFailedSplit-test");
TaskBuilder taskBuilder = Task.builder(dataFlowOperations);
try (Task task = taskBuilder.name(randomTaskName()).definition(String.format("t1: timestamp && <t2: timestamp ||b:scenario --io.spring.fail-batch=true --io.spring.jobName=%s --spring.cloud.task.batch.fail-on-job-failure=true || t3: timestamp> && t4: timestamp", randomJobName())).description("sequentialAndFailedSplitTest").build()) {
assertThat(task.composedTaskChildTasks().size()).isEqualTo(5);
assertThat(task.composedTaskChildTasks().stream().map(Task::getTaskName).collect(Collectors.toList())).hasSameElementsAs(fullTaskNames(task, "b", "t1", "t2", "t3", "t4"));
long launchId = task.launch(composedTaskLaunchArguments());
if (runtimeApps.dataflowServerVersionLowerThan("2.8.0-SNAPSHOT")) {
Awaitility.await().until(() -> task.executionStatus(launchId) == TaskExecutionStatus.COMPLETE);
} else {
Awaitility.await().until(() -> task.executionStatus(launchId) == TaskExecutionStatus.ERROR);
}
// Parent Task
assertThat(task.executions().size()).isEqualTo(1);
assertThat(task.execution(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
task.executions().forEach(execution -> assertThat(execution.getExitCode()).isEqualTo(EXIT_CODE_SUCCESS));
// Successful
childTasksBySuffix(task, "t1", "t2", "t3").forEach(childTask -> {
assertThat(childTask.executions().size()).isEqualTo(1);
assertThat(childTask.executionByParentExecutionId(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
});
// Failed tasks
childTasksBySuffix(task, "b").forEach(childTask -> {
assertThat(childTask.executions().size()).isEqualTo(1);
assertThat(childTask.executionByParentExecutionId(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_ERROR);
});
// Not run tasks
childTasksBySuffix(task, "t4").forEach(childTask -> {
assertThat(childTask.executions().size()).isEqualTo(0);
});
// Parent Task
assertThat(taskBuilder.allTasks().size()).isEqualTo(task.composedTaskChildTasks().size() + 1);
// restart job
assertThat(task.executions().size()).isEqualTo(1);
List<Long> jobExecutionIds = task.executions().stream().findFirst().get().getJobExecutionIds();
assertThat(jobExecutionIds.size()).isEqualTo(1);
dataFlowOperations.jobOperations().executionRestart(jobExecutionIds.get(0));
long launchId2 = task.executions().stream().mapToLong(TaskExecutionResource::getExecutionId).max().getAsLong();
Awaitility.await().until(() -> task.executionStatus(launchId2) == TaskExecutionStatus.COMPLETE);
assertThat(task.executions().size()).isEqualTo(2);
assertThat(task.executionStatus(launchId2)).isEqualTo(TaskExecutionStatus.COMPLETE);
assertThat(task.execution(launchId2).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
childTasksBySuffix(task, "b").forEach(childTask -> {
assertThat(childTask.executions().size()).isEqualTo(2);
assertThat(childTask.executionByParentExecutionId(launchId2).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
});
childTasksBySuffix(task, "t4").forEach(childTask -> {
assertThat(childTask.executions().size()).isEqualTo(1);
assertThat(childTask.executionByParentExecutionId(launchId2).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
});
assertThat(task.jobExecutionResources().size()).isEqualTo(2);
}
assertThat(taskBuilder.allTasks().size()).isEqualTo(0);
}
use of org.springframework.cloud.dataflow.rest.client.dsl.task.Task in project spring-cloud-dataflow by spring-cloud.
the class DataFlowIT method testDataFlowUsesAllPropertiesRegardlessIfPreviousExecutionWasDeleted.
@Test
public void testDataFlowUsesAllPropertiesRegardlessIfPreviousExecutionWasDeleted() {
// Scenario: Task Launch should use last available task execution for its properties after
// deleting previous version
// Given A task definition exists
// And 2 task execution exist each having different properties
// And I delete the last task execution
// When I launch task definition using default app version
// Then It should succeed
// And The task execution will contain the properties from the last available task
minimumVersionCheck("testDataFlowUsesAllPropertiesRegardlessIfPreviousExecutionWasDeleted");
try (Task task = createTaskDefinition()) {
List<Long> firstLaunchIds = createTaskExecutionsForDefinition(task, Collections.singletonMap("app.timestamp.firstkey", "firstvalue"), 1);
verifyAllSpecifiedTaskExecutions(task, firstLaunchIds, true);
long secondLaunchId = task.launch(Collections.singletonMap("app.timestamp.secondkey", "secondvalue"), Collections.emptyList());
assertThat(task.execution(secondLaunchId).isPresent()).isTrue();
validateSuccessfulTaskLaunch(task, secondLaunchId, 2);
safeCleanupTaskExecution(task, secondLaunchId);
assertThat(task.execution(secondLaunchId).isPresent()).isFalse();
long thirdLaunchId = task.launch(Collections.singletonMap("app.timestamp.thirdkey", "thirdvalue"), Collections.emptyList());
assertThat(task.execution(thirdLaunchId).isPresent()).isTrue();
validateSuccessfulTaskLaunch(task, thirdLaunchId, 2);
Optional<TaskExecutionResource> taskExecution = task.execution(thirdLaunchId);
Map<String, String> properties = taskExecution.get().getAppProperties();
assertThat(properties.containsKey("firstkey")).isTrue();
assertThat(properties.containsKey("secondkey")).isFalse();
assertThat(properties.containsKey("thirdkey")).isTrue();
}
}
Aggregations