Search in sources :

Example 31 with Job

use of com.google.cloud.dataproc.v1.Job in project jkube by eclipse.

the class JobHandlerTest method jobHandlerTest.

@Test
public void jobHandlerTest() {
    ResourceConfig config = ResourceConfig.builder().imagePullPolicy("IfNotPresent").controllerName("testing").serviceAccount("test-account").restartPolicy("OnFailure").volumes(volumes1).build();
    Job job = jobHandler.get(config, images);
    // Assertion
    assertNotNull(job.getSpec());
    assertNotNull(job.getMetadata());
    assertNotNull(job.getSpec().getTemplate());
    assertEquals("testing", job.getMetadata().getName());
    assertEquals("test-account", job.getSpec().getTemplate().getSpec().getServiceAccountName());
    assertFalse(job.getSpec().getTemplate().getSpec().getVolumes().isEmpty());
    assertEquals("OnFailure", job.getSpec().getTemplate().getSpec().getRestartPolicy());
    assertEquals("test", job.getSpec().getTemplate().getSpec().getVolumes().get(0).getName());
    assertEquals("/test/path", job.getSpec().getTemplate().getSpec().getVolumes().get(0).getHostPath().getPath());
    assertNotNull(job.getSpec().getTemplate().getSpec().getContainers());
}
Also used : ResourceConfig(org.eclipse.jkube.kit.config.resource.ResourceConfig) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) Test(org.junit.Test)

Example 32 with Job

use of com.google.cloud.dataproc.v1.Job in project quick by bakdata.

the class JobCleanerTest method shouldDeleteSucceededJobs.

@Test
void shouldDeleteSucceededJobs() {
    final JobCleaner jobCleaner = new JobCleaner(this.client);
    final KubernetesResources resources = new KubernetesResources();
    final Job deletionJob = resources.createDeletionJob("test", "image", List.of("--key", "value"));
    final Job finalJob = new JobBuilder(deletionJob).withNewStatus().withActive(0).withSucceeded(1).endStatus().build();
    this.kubernetesServer.getClient().batch().v1().jobs().create(finalJob);
    List<Job> jobList = this.kubernetesServer.getClient().batch().v1().jobs().list().getItems();
    assertThat(jobList).hasSize(1);
    assertThatNoException().isThrownBy(jobCleaner::deleteJobs);
    jobList = this.kubernetesServer.getClient().batch().v1().jobs().list().getItems();
    assertThat(jobList).isEmpty();
}
Also used : JobBuilder(io.fabric8.kubernetes.api.model.batch.v1.JobBuilder) KubernetesResources(com.bakdata.quick.manager.k8s.KubernetesResources) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) Test(org.junit.jupiter.api.Test) KubernetesTest(com.bakdata.quick.manager.k8s.KubernetesTest)

Example 33 with Job

use of com.google.cloud.dataproc.v1.Job in project quick by bakdata.

the class JobCleanerTest method shouldRunWithoutErrorForJobsWithoutStatus.

@Test
void shouldRunWithoutErrorForJobsWithoutStatus() {
    final JobCleaner jobCleaner = new JobCleaner(this.client);
    final KubernetesResources resources = new KubernetesResources();
    final Job deletionJob = resources.createDeletionJob("test", "image", List.of("--key", "value"));
    this.kubernetesServer.getClient().batch().v1().jobs().create(deletionJob);
    assertThatNoException().isThrownBy(jobCleaner::deleteJobs);
}
Also used : KubernetesResources(com.bakdata.quick.manager.k8s.KubernetesResources) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) Test(org.junit.jupiter.api.Test) KubernetesTest(com.bakdata.quick.manager.k8s.KubernetesTest)

Example 34 with Job

use of com.google.cloud.dataproc.v1.Job in project java-pubsublite-spark by googleapis.

the class SamplesIntegrationTest method testSimpleWrite.

@Test
public void testSimpleWrite() throws Exception {
    UUID testId = UUID.randomUUID();
    setupDestinationWithTestId(testId);
    try {
        // Run Dataproc job, block until it finishes
        SparkJob.Builder sparkJobBuilder = SparkJob.newBuilder().setMainClass("pubsublite.spark.SimpleWrite").putProperties("spark.submit.deployMode", "cluster").putProperties("spark.yarn.appMasterEnv.DESTINATION_TOPIC_PATH", destinationTopicPath.toString());
        runDataprocJob(sparkJobBuilder);
        // Verify write results in PSL
        verifySimpleWriteResultViaPSL();
    } finally {
        deleteSubscriptionExample(cloudRegion.value(), destinationSubscriptionPath);
        deleteTopicExample(cloudRegion.value(), destinationTopicPath);
    }
}
Also used : SparkJob(com.google.cloud.dataproc.v1.SparkJob) UUID(java.util.UUID) Test(org.junit.Test)

Example 35 with Job

use of com.google.cloud.dataproc.v1.Job in project java-pubsublite-spark by googleapis.

the class SamplesIntegrationTest method testWordCount.

/**
 * Note that source single word messages have been published to a permanent topic.
 */
@Test
public void testWordCount() throws Exception {
    UUID testId = UUID.randomUUID();
    setupSourceWithTestId(testId);
    setupDestinationWithTestId(testId);
    try {
        // Run Dataproc job, block until it finishes
        SparkJob.Builder sparkJobBuilder = SparkJob.newBuilder().setMainClass("pubsublite.spark.WordCount").putProperties("spark.submit.deployMode", "cluster").putProperties("spark.yarn.appMasterEnv.SOURCE_SUBSCRIPTION_PATH", sourceSubscriptionPath.toString()).putProperties("spark.yarn.appMasterEnv.DESTINATION_TOPIC_PATH", destinationTopicPath.toString());
        runDataprocJob(sparkJobBuilder);
        // Verify final destination messages in Pub/Sub Lite
        verifyWordCountResultViaPSL();
    } finally {
        deleteSubscriptionExample(cloudRegion.value(), sourceSubscriptionPath);
        deleteSubscriptionExample(cloudRegion.value(), destinationSubscriptionPath);
        deleteTopicExample(cloudRegion.value(), destinationTopicPath);
    }
}
Also used : SparkJob(com.google.cloud.dataproc.v1.SparkJob) UUID(java.util.UUID) Test(org.junit.Test)

Aggregations

Job (org.pentaho.platform.api.scheduler2.Job)94 Test (org.junit.Test)91 Job (io.fabric8.kubernetes.api.model.batch.v1.Job)38 Serializable (java.io.Serializable)25 ArrayList (java.util.ArrayList)24 HashMap (java.util.HashMap)22 SimpleJobTrigger (org.pentaho.platform.api.scheduler2.SimpleJobTrigger)21 Job (com.google.cloud.talent.v4beta1.Job)20 JobScheduleRequest (org.pentaho.platform.web.http.api.resources.JobScheduleRequest)19 ComplexJobTrigger (org.pentaho.platform.api.scheduler2.ComplexJobTrigger)18 SchedulerException (org.pentaho.platform.api.scheduler2.SchedulerException)17 JobServiceClient (com.google.cloud.talent.v4beta1.JobServiceClient)16 Date (java.util.Date)14 IJobFilter (org.pentaho.platform.api.scheduler2.IJobFilter)14 Job (com.google.cloud.video.transcoder.v1.Job)13 TranscoderServiceClient (com.google.cloud.video.transcoder.v1.TranscoderServiceClient)13 JobBuilder (io.fabric8.kubernetes.api.model.batch.v1.JobBuilder)13 IJobTrigger (org.pentaho.platform.api.scheduler2.IJobTrigger)12 Map (java.util.Map)11 Test (org.junit.jupiter.api.Test)10