use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerServiceTest method testGetContentCleanerJobException.
@Test
public void testGetContentCleanerJobException() throws Exception {
IJobFilter jobFilter = mock(IJobFilter.class);
List<Job> jobs = new ArrayList<>();
IPentahoSession session = mock(IPentahoSession.class);
doReturn(session).when(schedulerService).getSession();
doReturn("sessionName").when(session).getName();
doReturn(true).when(schedulerService.policy).isAllowed(AdministerSecurityAction.NAME);
doReturn(jobFilter).when(schedulerService).getJobFilter(anyBoolean(), nullable(String.class));
doThrow(new SchedulerException("")).when(schedulerService.scheduler).getJobs(any(IJobFilter.class));
try {
schedulerService.getContentCleanerJob();
fail();
} catch (SchedulerException e) {
// Should catch the exception
}
verify(schedulerService).getSession();
verify(session).getName();
verify(schedulerService.policy).isAllowed(AdministerSecurityAction.NAME);
verify(schedulerService.scheduler).getJobs(any(IJobFilter.class));
}
use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SolutionImportHandler method importSchedules.
protected void importSchedules(List<JobScheduleRequest> scheduleList) throws PlatformImportException {
if (CollectionUtils.isNotEmpty(scheduleList)) {
SchedulerResource schedulerResource = new SchedulerResource();
schedulerResource.pause();
for (JobScheduleRequest jobScheduleRequest : scheduleList) {
boolean jobExists = false;
List<Job> jobs = getAllJobs(schedulerResource);
if (jobs != null) {
// paramRequest to map<String, Serializable>
Map<String, Serializable> mapParamsRequest = new HashMap<>();
for (JobScheduleParam paramRequest : jobScheduleRequest.getJobParameters()) {
mapParamsRequest.put(paramRequest.getName(), paramRequest.getValue());
}
for (Job job : jobs) {
if ((mapParamsRequest.get(RESERVEDMAPKEY_LINEAGE_ID) != null) && (mapParamsRequest.get(RESERVEDMAPKEY_LINEAGE_ID).equals(job.getJobParams().get(RESERVEDMAPKEY_LINEAGE_ID)))) {
jobExists = true;
}
if (overwriteFile && jobExists) {
JobRequest jobRequest = new JobRequest();
jobRequest.setJobId(job.getJobId());
schedulerResource.removeJob(jobRequest);
jobExists = false;
break;
}
}
}
if (!jobExists) {
try {
Response response = createSchedulerJob(schedulerResource, jobScheduleRequest);
if (response.getStatus() == Response.Status.OK.getStatusCode()) {
if (response.getEntity() != null) {
// get the schedule job id from the response and add it to the import session
ImportSession.getSession().addImportedScheduleJobId(response.getEntity().toString());
}
}
} catch (Exception e) {
// the space(s)
if (jobScheduleRequest.getInputFile().contains(" ") || jobScheduleRequest.getOutputFile().contains(" ")) {
getLogger().info(Messages.getInstance().getString("SolutionImportHandler.SchedulesWithSpaces", jobScheduleRequest.getInputFile()));
File inFile = new File(jobScheduleRequest.getInputFile());
File outFile = new File(jobScheduleRequest.getOutputFile());
String inputFileName = inFile.getParent() + RepositoryFile.SEPARATOR + inFile.getName().replace(" ", "_");
String outputFileName = outFile.getParent() + RepositoryFile.SEPARATOR + outFile.getName().replace(" ", "_");
jobScheduleRequest.setInputFile(inputFileName);
jobScheduleRequest.setOutputFile(outputFileName);
try {
if (!File.separator.equals(RepositoryFile.SEPARATOR)) {
// on windows systems, the backslashes will result in the file not being found in the repository
jobScheduleRequest.setInputFile(inputFileName.replace(File.separator, RepositoryFile.SEPARATOR));
jobScheduleRequest.setOutputFile(outputFileName.replace(File.separator, RepositoryFile.SEPARATOR));
}
Response response = createSchedulerJob(schedulerResource, jobScheduleRequest);
if (response.getStatus() == Response.Status.OK.getStatusCode()) {
if (response.getEntity() != null) {
// get the schedule job id from the response and add it to the import session
ImportSession.getSession().addImportedScheduleJobId(response.getEntity().toString());
}
}
} catch (Exception ex) {
// log it and keep going. we should stop processing all schedules just because one fails.
getLogger().error(Messages.getInstance().getString("SolutionImportHandler.ERROR_0001_ERROR_CREATING_SCHEDULE", e.getMessage()), ex);
}
} else {
// log it and keep going. we should stop processing all schedules just because one fails.
getLogger().error(Messages.getInstance().getString("SolutionImportHandler.ERROR_0001_ERROR_CREATING_SCHEDULE", e.getMessage()));
}
}
} else {
getLogger().info(Messages.getInstance().getString("DefaultImportHandler.ERROR_0009_OVERWRITE_CONTENT", jobScheduleRequest.toString()));
}
}
schedulerResource.start();
}
}
use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerResourceTest method testAddBlockout.
@Test
public void testAddBlockout() throws Exception {
JobScheduleRequest mockJobScheduleRequest = mock(JobScheduleRequest.class);
Job mockJob = mock(Job.class);
doReturn(mockJob).when(schedulerResource.schedulerService).addBlockout(mockJobScheduleRequest);
String jobId = "jobId";
doReturn(jobId).when(mockJob).getJobId();
Response mockJobResponse = mock(Response.class);
doReturn(mockJobResponse).when(schedulerResource).buildPlainTextOkResponse(jobId);
Response testResponse = schedulerResource.addBlockout(mockJobScheduleRequest);
assertEquals(mockJobResponse, testResponse);
verify(schedulerResource.schedulerService, times(1)).addBlockout(mockJobScheduleRequest);
verify(mockJob, times(1)).getJobId();
verify(schedulerResource, times(1)).buildPlainTextOkResponse(jobId);
}
use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerResourceTest method testUpdateBlockout.
@Test
public void testUpdateBlockout() throws Exception {
String jobId = "jobId";
JobScheduleRequest mockJobScheduleRequest = mock(JobScheduleRequest.class);
doReturn(true).when(schedulerResource.schedulerService).isScheduleAllowed();
JobRequest mockJobRequest = mock(JobRequest.class);
doReturn(mockJobRequest).when(schedulerResource).getJobRequest();
Job mockJob = mock(Job.class);
doReturn(mockJob).when(schedulerResource.schedulerService).updateBlockout(jobId, mockJobScheduleRequest);
doReturn(jobId).when(mockJob).getJobId();
Response mockResponse = mock(Response.class);
doReturn(mockResponse).when(schedulerResource).buildPlainTextOkResponse(jobId);
Response testResponse = schedulerResource.updateBlockout(jobId, mockJobScheduleRequest);
assertEquals(mockResponse, testResponse);
verify(schedulerResource.schedulerService, times(1)).updateBlockout(jobId, mockJobScheduleRequest);
verify(mockJob, times(1)).getJobId();
}
use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerResourceTest method testCreateJob.
@Test
public void testCreateJob() throws Exception {
JobScheduleRequest mockRequest = mock(JobScheduleRequest.class);
Job mockJob = mock(Job.class);
doReturn(mockJob).when(schedulerResource.schedulerService).createJob(mockRequest);
String jobId = "jobId";
doReturn(jobId).when(mockJob).getJobId();
Response mockResponse = mock(Response.class);
doReturn(mockResponse).when(schedulerResource).buildPlainTextOkResponse(jobId);
Response testResponse = schedulerResource.createJob(mockRequest);
assertEquals(mockResponse, testResponse);
verify(schedulerResource.schedulerService, times(1)).createJob(mockRequest);
verify(mockJob, times(1)).getJobId();
verify(schedulerResource, times(1)).buildPlainTextOkResponse(jobId);
}
Aggregations