use of com.google.cloud.talent.v4.Job in project pentaho-platform by pentaho.
the class JobAdapter method unmarshal.
public Job unmarshal(JaxbSafeJob jaxbSafeJob) throws Exception {
if (jaxbSafeJob == null) {
return null;
}
Job job = new Job();
try {
job.setJobTrigger(jaxbSafeJob.jobTrigger);
job.setJobParams(toProperMap(jaxbSafeJob.jobParams));
job.setLastRun(jaxbSafeJob.lastRun);
job.setNextRun(jaxbSafeJob.nextRun);
job.setSchedulableClass(jaxbSafeJob.schedulableClass);
job.setJobId(jaxbSafeJob.jobId);
job.setUserName(jaxbSafeJob.userName);
job.setJobName(jaxbSafeJob.jobName);
job.setState(jaxbSafeJob.state);
} catch (Throwable t) {
// no message bundle since this is a development error case
// $NON-NLS-1$
logger.error("Error unmarshalling job", t);
return null;
}
return job;
}
use of com.google.cloud.talent.v4.Job in project pentaho-platform by pentaho.
the class SolutionImportHandlerIT method testImportSchedules.
@Test
public void testImportSchedules() throws PlatformImportException, SchedulerException {
SolutionImportHandler importHandler = new SolutionImportHandler(Collections.emptyList());
importHandler = spy(importHandler);
List<JobScheduleRequest> requests = new ArrayList<>(4);
requests.add(createJobScheduleRequest("NORMAL", JobState.NORMAL));
requests.add(createJobScheduleRequest("PAUSED", JobState.PAUSED));
requests.add(createJobScheduleRequest("PAUSED", JobState.COMPLETE));
requests.add(createJobScheduleRequest("PAUSED", JobState.ERROR));
doReturn(new ArrayList<Job>()).when(importHandler).getAllJobs(any());
importHandler.importSchedules(requests);
List<Job> jobs = scheduler.getJobs(job -> true);
assertEquals(4, jobs.size());
for (Job job : jobs) {
assertEquals(job.getJobName(), job.getState().toString());
}
}
use of com.google.cloud.talent.v4.Job in project cdap by caskdata.
the class DataprocRuntimeJobManager method launch.
@Override
public void launch(RuntimeJobInfo runtimeJobInfo) throws Exception {
String bucket = DataprocUtils.getBucketName(this.bucket);
ProgramRunInfo runInfo = runtimeJobInfo.getProgramRunInfo();
LOG.debug("Launching run {} with following configurations: cluster {}, project {}, region {}, bucket {}.", runInfo.getRun(), clusterName, projectId, region, bucket);
// TODO: CDAP-16408 use fixed directory for caching twill, application, artifact jars
File tempDir = Files.createTempDirectory("dataproc.launcher").toFile();
// on dataproc bucket the run root will be <bucket>/cdap-job/<runid>/. All the files for this run will be copied
// under that base dir.
String runRootPath = getPath(DataprocUtils.CDAP_GCS_ROOT, runInfo.getRun());
try {
// step 1: build twill.jar and launcher.jar and add them to files to be copied to gcs
List<LocalFile> localFiles = getRuntimeLocalFiles(runtimeJobInfo.getLocalizeFiles(), tempDir);
// step 2: upload all the necessary files to gcs so that those files are available to dataproc job
List<Future<LocalFile>> uploadFutures = new ArrayList<>();
for (LocalFile fileToUpload : localFiles) {
String targetFilePath = getPath(runRootPath, fileToUpload.getName());
uploadFutures.add(provisionerContext.execute(() -> uploadFile(bucket, targetFilePath, fileToUpload)).toCompletableFuture());
}
List<LocalFile> uploadedFiles = new ArrayList<>();
for (Future<LocalFile> uploadFuture : uploadFutures) {
uploadedFiles.add(uploadFuture.get());
}
// step 3: build the hadoop job request to be submitted to dataproc
SubmitJobRequest request = getSubmitJobRequest(runtimeJobInfo, uploadedFiles);
// step 4: submit hadoop job to dataproc
try {
Job job = getJobControllerClient().submitJob(request);
LOG.debug("Successfully submitted hadoop job {} to cluster {}.", job.getReference().getJobId(), clusterName);
} catch (AlreadyExistsException ex) {
// the job id already exists, ignore the job.
LOG.warn("The dataproc job {} already exists. Ignoring resubmission of the job.", request.getJob().getReference().getJobId());
}
DataprocUtils.emitMetric(provisionerContext, region, "provisioner.submitJob.response.count");
} catch (Exception e) {
// delete all uploaded gcs files in case of exception
DataprocUtils.deleteGCSPath(getStorageClient(), bucket, runRootPath);
DataprocUtils.emitMetric(provisionerContext, region, "provisioner.submitJob.response.count", e);
throw new Exception(String.format("Error while launching job %s on cluster %s", getJobId(runInfo), clusterName), e);
} finally {
// delete local temp directory
deleteDirectoryContents(tempDir);
}
}
use of com.google.cloud.talent.v4.Job in project java-docs-samples by GoogleCloudPlatform.
the class CustomRankingSearchJobs method searchCustomRankingJobs.
// Search Jobs using custom rankings.
public static void searchCustomRankingJobs(String projectId, String tenantId) throws IOException {
// the "close" method on the client to safely clean up any remaining background resources.
try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
TenantName parent = TenantName.of(projectId, tenantId);
String domain = "www.example.com";
String sessionId = "Hashed session identifier";
String userId = "Hashed user identifier";
RequestMetadata requestMetadata = RequestMetadata.newBuilder().setDomain(domain).setSessionId(sessionId).setUserId(userId).build();
SearchJobsRequest.CustomRankingInfo.ImportanceLevel importanceLevel = SearchJobsRequest.CustomRankingInfo.ImportanceLevel.EXTREME;
String rankingExpression = "(someFieldLong + 25) * 0.25";
SearchJobsRequest.CustomRankingInfo customRankingInfo = SearchJobsRequest.CustomRankingInfo.newBuilder().setImportanceLevel(importanceLevel).setRankingExpression(rankingExpression).build();
String orderBy = "custom_ranking desc";
SearchJobsRequest request = SearchJobsRequest.newBuilder().setParent(parent.toString()).setRequestMetadata(requestMetadata).setCustomRankingInfo(customRankingInfo).setOrderBy(orderBy).build();
for (SearchJobsResponse.MatchingJob responseItem : jobServiceClient.searchJobs(request).iterateAll()) {
System.out.format("Job summary: %s%n", responseItem.getJobSummary());
System.out.format("Job title snippet: %s%n", responseItem.getJobTitleSnippet());
Job job = responseItem.getJob();
System.out.format("Job name: %s%n", job.getName());
System.out.format("Job title: %s%n", job.getTitle());
}
}
}
use of com.google.cloud.talent.v4.Job in project java-docs-samples by GoogleCloudPlatform.
the class JobSearchCreateJobCustomAttributes method createJob.
// Create Job with Custom Attributes.
public static void createJob(String projectId, String tenantId, String companyId, String requisitionId) throws IOException {
// the "close" method on the client to safely clean up any remaining background resources.
try (JobServiceClient jobServiceClient = JobServiceClient.create()) {
TenantName parent = TenantName.of(projectId, tenantId);
// Custom attribute can be string or numeric value, and can be filtered in search queries.
// https://cloud.google.com/talent-solution/job-search/docs/custom-attributes
CustomAttribute customAttribute = CustomAttribute.newBuilder().addStringValues("Internship").addStringValues("Apprenticeship").setFilterable(true).build();
Job job = Job.newBuilder().setCompany(companyId).setTitle("Software Developer I").setDescription("This is a description of this <i>wonderful</i> job!").putCustomAttributes("FOR_STUDENTS", customAttribute).setRequisitionId(requisitionId).setLanguageCode("en-US").build();
CreateJobRequest request = CreateJobRequest.newBuilder().setParent(parent.toString()).setJob(job).build();
Job response = jobServiceClient.createJob(request);
System.out.printf("Created job: %s\n", response.getName());
}
}
Aggregations