use of com.google.cloud.dataproc.v1.Job in project jkube by eclipse.
the class JobHandlerTest method jobHandlerTest.
@Test
public void jobHandlerTest() {
ResourceConfig config = ResourceConfig.builder().imagePullPolicy("IfNotPresent").controllerName("testing").serviceAccount("test-account").restartPolicy("OnFailure").volumes(volumes1).build();
Job job = jobHandler.get(config, images);
// Assertion
assertNotNull(job.getSpec());
assertNotNull(job.getMetadata());
assertNotNull(job.getSpec().getTemplate());
assertEquals("testing", job.getMetadata().getName());
assertEquals("test-account", job.getSpec().getTemplate().getSpec().getServiceAccountName());
assertFalse(job.getSpec().getTemplate().getSpec().getVolumes().isEmpty());
assertEquals("OnFailure", job.getSpec().getTemplate().getSpec().getRestartPolicy());
assertEquals("test", job.getSpec().getTemplate().getSpec().getVolumes().get(0).getName());
assertEquals("/test/path", job.getSpec().getTemplate().getSpec().getVolumes().get(0).getHostPath().getPath());
assertNotNull(job.getSpec().getTemplate().getSpec().getContainers());
}
use of com.google.cloud.dataproc.v1.Job in project quick by bakdata.
the class JobCleanerTest method shouldDeleteSucceededJobs.
@Test
void shouldDeleteSucceededJobs() {
final JobCleaner jobCleaner = new JobCleaner(this.client);
final KubernetesResources resources = new KubernetesResources();
final Job deletionJob = resources.createDeletionJob("test", "image", List.of("--key", "value"));
final Job finalJob = new JobBuilder(deletionJob).withNewStatus().withActive(0).withSucceeded(1).endStatus().build();
this.kubernetesServer.getClient().batch().v1().jobs().create(finalJob);
List<Job> jobList = this.kubernetesServer.getClient().batch().v1().jobs().list().getItems();
assertThat(jobList).hasSize(1);
assertThatNoException().isThrownBy(jobCleaner::deleteJobs);
jobList = this.kubernetesServer.getClient().batch().v1().jobs().list().getItems();
assertThat(jobList).isEmpty();
}
use of com.google.cloud.dataproc.v1.Job in project quick by bakdata.
the class JobCleanerTest method shouldRunWithoutErrorForJobsWithoutStatus.
@Test
void shouldRunWithoutErrorForJobsWithoutStatus() {
final JobCleaner jobCleaner = new JobCleaner(this.client);
final KubernetesResources resources = new KubernetesResources();
final Job deletionJob = resources.createDeletionJob("test", "image", List.of("--key", "value"));
this.kubernetesServer.getClient().batch().v1().jobs().create(deletionJob);
assertThatNoException().isThrownBy(jobCleaner::deleteJobs);
}
use of com.google.cloud.dataproc.v1.Job in project java-pubsublite-spark by googleapis.
the class SamplesIntegrationTest method testSimpleWrite.
@Test
public void testSimpleWrite() throws Exception {
UUID testId = UUID.randomUUID();
setupDestinationWithTestId(testId);
try {
// Run Dataproc job, block until it finishes
SparkJob.Builder sparkJobBuilder = SparkJob.newBuilder().setMainClass("pubsublite.spark.SimpleWrite").putProperties("spark.submit.deployMode", "cluster").putProperties("spark.yarn.appMasterEnv.DESTINATION_TOPIC_PATH", destinationTopicPath.toString());
runDataprocJob(sparkJobBuilder);
// Verify write results in PSL
verifySimpleWriteResultViaPSL();
} finally {
deleteSubscriptionExample(cloudRegion.value(), destinationSubscriptionPath);
deleteTopicExample(cloudRegion.value(), destinationTopicPath);
}
}
use of com.google.cloud.dataproc.v1.Job in project java-pubsublite-spark by googleapis.
the class SamplesIntegrationTest method testWordCount.
/**
* Note that source single word messages have been published to a permanent topic.
*/
@Test
public void testWordCount() throws Exception {
UUID testId = UUID.randomUUID();
setupSourceWithTestId(testId);
setupDestinationWithTestId(testId);
try {
// Run Dataproc job, block until it finishes
SparkJob.Builder sparkJobBuilder = SparkJob.newBuilder().setMainClass("pubsublite.spark.WordCount").putProperties("spark.submit.deployMode", "cluster").putProperties("spark.yarn.appMasterEnv.SOURCE_SUBSCRIPTION_PATH", sourceSubscriptionPath.toString()).putProperties("spark.yarn.appMasterEnv.DESTINATION_TOPIC_PATH", destinationTopicPath.toString());
runDataprocJob(sparkJobBuilder);
// Verify final destination messages in Pub/Sub Lite
verifyWordCountResultViaPSL();
} finally {
deleteSubscriptionExample(cloudRegion.value(), sourceSubscriptionPath);
deleteSubscriptionExample(cloudRegion.value(), destinationSubscriptionPath);
deleteTopicExample(cloudRegion.value(), destinationTopicPath);
}
}
Aggregations