use of com.google.cloud.scheduler.v1.Job in project cdap by cdapio.
the class DataprocRuntimeJobManager method launch.
@Override
public void launch(RuntimeJobInfo runtimeJobInfo) throws Exception {
String bucket = DataprocUtils.getBucketName(this.bucket);
ProgramRunInfo runInfo = runtimeJobInfo.getProgramRunInfo();
LOG.debug("Launching run {} with following configurations: cluster {}, project {}, region {}, bucket {}.", runInfo.getRun(), clusterName, projectId, region, bucket);
// TODO: CDAP-16408 use fixed directory for caching twill, application, artifact jars
File tempDir = Files.createTempDirectory("dataproc.launcher").toFile();
// on dataproc bucket the run root will be <bucket>/cdap-job/<runid>/. All the files for this run will be copied
// under that base dir.
String runRootPath = getPath(DataprocUtils.CDAP_GCS_ROOT, runInfo.getRun());
try {
// step 1: build twill.jar and launcher.jar and add them to files to be copied to gcs
List<LocalFile> localFiles = getRuntimeLocalFiles(runtimeJobInfo.getLocalizeFiles(), tempDir);
// step 2: upload all the necessary files to gcs so that those files are available to dataproc job
List<Future<LocalFile>> uploadFutures = new ArrayList<>();
for (LocalFile fileToUpload : localFiles) {
String targetFilePath = getPath(runRootPath, fileToUpload.getName());
uploadFutures.add(provisionerContext.execute(() -> uploadFile(bucket, targetFilePath, fileToUpload)).toCompletableFuture());
}
List<LocalFile> uploadedFiles = new ArrayList<>();
for (Future<LocalFile> uploadFuture : uploadFutures) {
uploadedFiles.add(uploadFuture.get());
}
// step 3: build the hadoop job request to be submitted to dataproc
SubmitJobRequest request = getSubmitJobRequest(runtimeJobInfo, uploadedFiles);
// step 4: submit hadoop job to dataproc
try {
Job job = getJobControllerClient().submitJob(request);
LOG.debug("Successfully submitted hadoop job {} to cluster {}.", job.getReference().getJobId(), clusterName);
} catch (AlreadyExistsException ex) {
// the job id already exists, ignore the job.
LOG.warn("The dataproc job {} already exists. Ignoring resubmission of the job.", request.getJob().getReference().getJobId());
}
DataprocUtils.emitMetric(provisionerContext, region, "provisioner.submitJob.response.count");
} catch (Exception e) {
// delete all uploaded gcs files in case of exception
DataprocUtils.deleteGCSPath(getStorageClient(), bucket, runRootPath);
DataprocUtils.emitMetric(provisionerContext, region, "provisioner.submitJob.response.count", e);
throw new Exception(String.format("Error while launching job %s on cluster %s", getJobId(runInfo), clusterName), e);
} finally {
// delete local temp directory
deleteDirectoryContents(tempDir);
}
}
use of com.google.cloud.scheduler.v1.Job in project pravega by pravega.
the class RemoteSequential method isTestRunning.
private boolean isTestRunning(final String jobId, final Metronome client) {
Job jobStatus = client.getJob(jobId);
boolean isRunning = false;
if (jobStatus.getHistory() == null) {
isRunning = true;
} else if ((jobStatus.getHistory().getSuccessCount() == 0) && (jobStatus.getHistory().getFailureCount() == 0)) {
isRunning = true;
}
return isRunning;
}
use of com.google.cloud.scheduler.v1.Job in project pravega by pravega.
the class RemoteSequential method startTestExecution.
@Override
public CompletableFuture<Void> startTestExecution(Method testMethod) {
Exceptions.handleInterrupted(() -> TimeUnit.SECONDS.sleep(60));
// This will be removed once issue https://github.com/pravega/pravega/issues/1665 is resolved.
log.debug("Starting test execution for method: {}", testMethod);
final Metronome client = AuthEnabledMetronomeClient.getClient();
String className = testMethod.getDeclaringClass().getName();
String methodName = testMethod.getName();
// All jobIds should have lowercase for metronome.
String jobId = (methodName + ".testJob").toLowerCase();
return CompletableFuture.runAsync(() -> {
client.createJob(newJob(jobId, className, methodName));
Response response = client.triggerJobRun(jobId);
if (response.status() != CREATED.getStatusCode()) {
throw new TestFrameworkException(TestFrameworkException.Type.ConnectionFailed, "Error while starting " + "test " + testMethod);
} else {
log.info("Created job succeeded with: " + response.toString());
}
}).thenCompose(v2 -> waitForJobCompletion(jobId, client)).<Void>thenApply(v1 -> {
if (client.getJob(jobId).getHistory().getFailureCount() != 0) {
throw new AssertionError("Test failed, detailed logs can be found at " + "https://MasterIP/mesos, under metronome framework tasks. MethodName: " + methodName);
}
return null;
}).whenComplete((v, ex) -> {
// deletejob once execution is complete.
deleteJob(jobId, client);
if (ex != null) {
log.error("Error while executing the test. ClassName: {}, MethodName: {}", className, methodName);
}
});
}
use of com.google.cloud.scheduler.v1.Job in project java-docs-samples by GoogleCloudPlatform.
the class JobSearchListJobs method listJobs.
// Search Jobs with histogram queries.
public static void listJobs(String projectId, String tenantId, String filter) throws IOException {
// the "close" method on the client to safely clean up any remaining background resources.
try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
TenantName parent = TenantName.of(projectId, tenantId);
ListJobsRequest request = ListJobsRequest.newBuilder().setParent(parent.toString()).setFilter(filter).build();
for (Job responseItem : jobServiceClient.listJobs(request).iterateAll()) {
System.out.format("Job name: %s%n", responseItem.getName());
System.out.format("Job requisition ID: %s%n", responseItem.getRequisitionId());
System.out.format("Job title: %s%n", responseItem.getTitle());
System.out.format("Job description: %s%n", responseItem.getDescription());
}
}
}
use of com.google.cloud.scheduler.v1.Job in project java-docs-samples by GoogleCloudPlatform.
the class CreateJobFromPreset method createJobFromPreset.
// Creates a job from a preset.
public static void createJobFromPreset(String projectId, String location, String inputUri, String outputUri, String preset) throws IOException {
// once, and can be reused for multiple requests.
try (TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.create()) {
var createJobRequest = CreateJobRequest.newBuilder().setJob(Job.newBuilder().setInputUri(inputUri).setOutputUri(outputUri).setTemplateId(preset).build()).setParent(LocationName.of(projectId, location).toString()).build();
// Send the job creation request and process the response.
Job job = transcoderServiceClient.createJob(createJobRequest);
System.out.println("Job: " + job.getName());
}
}
Aggregations