Search in sources :

Example 1 with TaskBuilder

use of org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder in project spring-cloud-dataflow by spring-cloud.

the class DataFlowIT method sequentialAndFailedSplitTest.

@Test
public void sequentialAndFailedSplitTest() {
    logger.info("composed-task-sequentialAndFailedSplit-test");
    TaskBuilder taskBuilder = Task.builder(dataFlowOperations);
    try (Task task = taskBuilder.name(randomTaskName()).definition(String.format("t1: timestamp && <t2: timestamp ||b:scenario --io.spring.fail-batch=true --io.spring.jobName=%s --spring.cloud.task.batch.fail-on-job-failure=true || t3: timestamp> && t4: timestamp", randomJobName())).description("sequentialAndFailedSplitTest").build()) {
        assertThat(task.composedTaskChildTasks().size()).isEqualTo(5);
        assertThat(task.composedTaskChildTasks().stream().map(Task::getTaskName).collect(Collectors.toList())).hasSameElementsAs(fullTaskNames(task, "b", "t1", "t2", "t3", "t4"));
        long launchId = task.launch(composedTaskLaunchArguments());
        if (runtimeApps.dataflowServerVersionLowerThan("2.8.0-SNAPSHOT")) {
            Awaitility.await().until(() -> task.executionStatus(launchId) == TaskExecutionStatus.COMPLETE);
        } else {
            Awaitility.await().until(() -> task.executionStatus(launchId) == TaskExecutionStatus.ERROR);
        }
        // Parent Task
        assertThat(task.executions().size()).isEqualTo(1);
        assertThat(task.execution(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
        task.executions().forEach(execution -> assertThat(execution.getExitCode()).isEqualTo(EXIT_CODE_SUCCESS));
        // Successful
        childTasksBySuffix(task, "t1", "t2", "t3").forEach(childTask -> {
            assertThat(childTask.executions().size()).isEqualTo(1);
            assertThat(childTask.executionByParentExecutionId(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
        });
        // Failed tasks
        childTasksBySuffix(task, "b").forEach(childTask -> {
            assertThat(childTask.executions().size()).isEqualTo(1);
            assertThat(childTask.executionByParentExecutionId(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_ERROR);
        });
        // Not run tasks
        childTasksBySuffix(task, "t4").forEach(childTask -> {
            assertThat(childTask.executions().size()).isEqualTo(0);
        });
        // Parent Task
        assertThat(taskBuilder.allTasks().size()).isEqualTo(task.composedTaskChildTasks().size() + 1);
        // restart job
        assertThat(task.executions().size()).isEqualTo(1);
        List<Long> jobExecutionIds = task.executions().stream().findFirst().get().getJobExecutionIds();
        assertThat(jobExecutionIds.size()).isEqualTo(1);
        dataFlowOperations.jobOperations().executionRestart(jobExecutionIds.get(0));
        long launchId2 = task.executions().stream().mapToLong(TaskExecutionResource::getExecutionId).max().getAsLong();
        Awaitility.await().until(() -> task.executionStatus(launchId2) == TaskExecutionStatus.COMPLETE);
        assertThat(task.executions().size()).isEqualTo(2);
        assertThat(task.executionStatus(launchId2)).isEqualTo(TaskExecutionStatus.COMPLETE);
        assertThat(task.execution(launchId2).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
        childTasksBySuffix(task, "b").forEach(childTask -> {
            assertThat(childTask.executions().size()).isEqualTo(2);
            assertThat(childTask.executionByParentExecutionId(launchId2).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
        });
        childTasksBySuffix(task, "t4").forEach(childTask -> {
            assertThat(childTask.executions().size()).isEqualTo(1);
            assertThat(childTask.executionByParentExecutionId(launchId2).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
        });
        assertThat(task.jobExecutionResources().size()).isEqualTo(2);
    }
    assertThat(taskBuilder.allTasks().size()).isEqualTo(0);
}
Also used : TaskBuilder(org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder) Task(org.springframework.cloud.dataflow.rest.client.dsl.task.Task) AtomicLong(java.util.concurrent.atomic.AtomicLong) TaskExecutionResource(org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource) Test(org.junit.jupiter.api.Test)

Example 2 with TaskBuilder

use of org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder in project spring-cloud-dataflow by spring-cloud.

the class DataFlowIT method ctrLaunchTest.

@Test
public void ctrLaunchTest() {
    logger.info("composed-task-ctrLaunch-test");
    TaskBuilder taskBuilder = Task.builder(dataFlowOperations);
    try (Task task = taskBuilder.name(randomTaskName()).definition("a: timestamp && b:timestamp").description("ctrLaunchTest").build()) {
        assertThat(task.composedTaskChildTasks().stream().map(Task::getTaskName).collect(Collectors.toList())).hasSameElementsAs(fullTaskNames(task, "a", "b"));
        long launchId = task.launch(composedTaskLaunchArguments());
        Awaitility.await().until(() -> task.executionStatus(launchId) == TaskExecutionStatus.COMPLETE);
        // Parent Task Successfully completed
        assertThat(task.executions().size()).isEqualTo(1);
        assertThat(task.executionStatus(launchId)).isEqualTo(TaskExecutionStatus.COMPLETE);
        assertThat(task.execution(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
        task.executions().forEach(execution -> assertThat(execution.getExitCode()).isEqualTo(EXIT_CODE_SUCCESS));
        // Child tasks successfully completed
        task.composedTaskChildTasks().forEach(childTask -> {
            assertThat(childTask.executions().size()).isEqualTo(1);
            assertThat(childTask.executionByParentExecutionId(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
        });
        // Attempt a job restart
        assertThat(task.executions().size()).isEqualTo(1);
        List<Long> jobExecutionIds = task.executions().stream().findFirst().get().getJobExecutionIds();
        assertThat(jobExecutionIds.size()).isEqualTo(1);
        // There is an Error deserialization issue related to backward compatibility with SCDF
        // 2.6.x
        // The Exception thrown by the 2.6.x servers can not be deserialized by the
        // VndErrorResponseErrorHandler in 2.8+ clients.
        Assumptions.assumingThat(runtimeApps.dataflowServerVersionEqualOrGreaterThan("2.7.0"), () -> {
            Exception exception = assertThrows(DataFlowClientException.class, () -> {
                dataFlowOperations.jobOperations().executionRestart(jobExecutionIds.get(0));
            });
            assertTrue(exception.getMessage().contains(" and state 'COMPLETED' is not restartable"));
        });
    }
    assertThat(taskBuilder.allTasks().size()).isEqualTo(0);
}
Also used : TaskBuilder(org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder) Task(org.springframework.cloud.dataflow.rest.client.dsl.task.Task) AtomicLong(java.util.concurrent.atomic.AtomicLong) DataFlowClientException(org.springframework.cloud.dataflow.rest.client.DataFlowClientException) IOException(java.io.IOException) Test(org.junit.jupiter.api.Test)

Example 3 with TaskBuilder

use of org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder in project spring-cloud-dataflow-acceptance-tests by spring-cloud.

the class TaskUtils method createTaskDefinitions.

/**
 * Create a specified number of task definitions using the taskNamePrefix.
 * If a task definition already exists then the app will skip that entry and continue creating the rest of the task definitions.
 *
 * @param taskNamePrefix The task name prefix to use for each of the task definitions.
 * @param numberOfTaskDefinitions The number of task definitions to create.
 * @param dataFlowOperations The dataflowOperations instance used to connect to dataflow.
 */
public static void createTaskDefinitions(String taskNamePrefix, int numberOfTaskDefinitions, DataFlowOperations dataFlowOperations) {
    logger.info(String.format("Creating %s task definitions", numberOfTaskDefinitions));
    TaskBuilder taskBuilder = Task.builder(dataFlowOperations);
    String ctrTaskDefinition = "timestamp";
    final List<Task> taskDefinitions = new ArrayList<>();
    for (int i = 0; i < numberOfTaskDefinitions; i++) {
        try {
            taskDefinitions.add(taskBuilder.name(taskNamePrefix + "-" + i).definition(ctrTaskDefinition).description("Task Definition created for perf test").build());
        } catch (DataFlowClientException dfce) {
            if (dfce.getMessage().contains("because another one has already been registered"))
                ;
            {
                logger.info(dfce.getMessage());
            }
        }
    }
}
Also used : TaskBuilder(org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder) Task(org.springframework.cloud.dataflow.rest.client.dsl.task.Task) ArrayList(java.util.ArrayList) DataFlowClientException(org.springframework.cloud.dataflow.rest.client.DataFlowClientException)

Example 4 with TaskBuilder

use of org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder in project spring-cloud-dataflow-acceptance-tests by spring-cloud.

the class BatchRemotePartitioningAT method runBatchRemotePartitionJobCloudFoundry.

@Test
@EnabledIfSystemProperty(named = "PLATFORM_TYPE", matches = "cloudfoundry")
public void runBatchRemotePartitionJobCloudFoundry() {
    logger.info("runBatchRemotePartitionJob - cloudfoundry");
    final String prefix = CFConnectionProperties.CLOUDFOUNDRY_PROPERTIES;
    String taskDefinition = TASK_NAME + String.format(" --%s.%s=%s", prefix, "username", cfConnectionProperties.getUsername()) + String.format(" --%s.%s=%s", prefix, "password", cfConnectionProperties.getPassword()) + String.format(" --%s.%s=%s", prefix, "org", cfConnectionProperties.getOrg()) + String.format(" --%s.%s=%s", prefix, "username", cfConnectionProperties.getUsername()) + String.format(" --%s.%s=%s", prefix, "space", cfConnectionProperties.getSpace()) + String.format(" --%s.%s=%s", prefix, "url", cfConnectionProperties.getUrl().toString()) + String.format(" --%s.%s=%s", prefix, "skipSslValidation", cfConnectionProperties.isSkipSslValidation());
    TaskBuilder taskBuilder = Task.builder(dataFlowOperations);
    try (Task task = taskBuilder.name(randomName()).definition(taskDefinition).description("runBatchRemotePartitionJob - cloudfoundry").build()) {
        long launchId = task.launch(Collections.EMPTY_MAP, Arrays.asList("--platform=cloudfoundry"));
        Awaitility.await().until(() -> task.executionStatus(launchId) == TaskExecutionStatus.COMPLETE);
        assertThat(task.executions().size()).isEqualTo(1);
        assertThat(task.execution(launchId).isPresent()).isTrue();
        assertThat(task.execution(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
    }
}
Also used : TaskBuilder(org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder) Task(org.springframework.cloud.dataflow.rest.client.dsl.task.Task) EnabledIfSystemProperty(org.junit.jupiter.api.condition.EnabledIfSystemProperty) Test(org.junit.jupiter.api.Test)

Example 5 with TaskBuilder

use of org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder in project spring-cloud-dataflow-acceptance-tests by spring-cloud.

the class BatchRemotePartitioningAT method runBatchRemotePartitionJobKubernetes.

@Test
@EnabledIfSystemProperty(named = "PLATFORM_TYPE", matches = "kubernetes")
public void runBatchRemotePartitionJobKubernetes() {
    logger.info("runBatchRemotePartitionJob - kubernetes");
    TaskBuilder taskBuilder = Task.builder(dataFlowOperations);
    try (Task task = taskBuilder.name(randomName()).definition(TASK_NAME).description("runBatchRemotePartitionJob - kubernetes").build()) {
        long launchId = task.launch(Collections.singletonMap("deployer.*.kubernetes.deployment-service-account-name", testProperties.getPlatform().getConnection().getPlatformName()), Arrays.asList("--platform=kubernetes", "--artifact=docker://springcloud/batch-remote-partition:0.0.2-SNAPSHOT"));
        Awaitility.await().until(() -> task.executionStatus(launchId) == TaskExecutionStatus.COMPLETE);
        assertThat(task.executions().size()).isEqualTo(1);
        assertThat(task.execution(launchId).isPresent()).isTrue();
        assertThat(task.execution(launchId).get().getExitCode()).isEqualTo(EXIT_CODE_SUCCESS);
    }
}
Also used : TaskBuilder(org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder) Task(org.springframework.cloud.dataflow.rest.client.dsl.task.Task) EnabledIfSystemProperty(org.junit.jupiter.api.condition.EnabledIfSystemProperty) Test(org.junit.jupiter.api.Test)

Aggregations

Task (org.springframework.cloud.dataflow.rest.client.dsl.task.Task)10 TaskBuilder (org.springframework.cloud.dataflow.rest.client.dsl.task.TaskBuilder)10 Test (org.junit.jupiter.api.Test)8 AtomicLong (java.util.concurrent.atomic.AtomicLong)3 EnabledIfSystemProperty (org.junit.jupiter.api.condition.EnabledIfSystemProperty)3 ArrayList (java.util.ArrayList)2 DataFlowClientException (org.springframework.cloud.dataflow.rest.client.DataFlowClientException)2 TaskExecutionResource (org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource)2 IOException (java.io.IOException)1