use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerServiceTest method testRemoveJobException.
@Test
public void testRemoveJobException() throws SchedulerException {
Job job = mock(Job.class);
doReturn(job).when(schedulerService).getJob(nullable(String.class));
doReturn(true).when(schedulerService).isScheduleAllowed();
doThrow(new SchedulerException("pause-exception")).when(schedulerService.scheduler).removeJob(nullable(String.class));
try {
schedulerService.removeJob("job-id");
} catch (SchedulerException e) {
assertEquals("pause-exception", e.getMessage());
}
}
use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerResourceTest method testGetJob.
@Test
public void testGetJob() throws Exception {
String jobId = "jobId";
String asCronString = "asCronString";
Job mockJob = mock(Job.class);
doReturn(mockJob).when(schedulerResource.schedulerService).getJobInfo(jobId);
Response mockResponse = mock(Response.class);
doReturn(mockResponse).when(schedulerResource).buildOkResponse(mockJob);
Response testResponse = schedulerResource.getJob(jobId, asCronString);
assertEquals(mockResponse, testResponse);
verify(schedulerResource.schedulerService, times(1)).getJobInfo(jobId);
}
use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerResourceTest method testTriggerNow.
@Test
public void testTriggerNow() throws Exception {
JobRequest mockJobRequest = mock(JobRequest.class);
String jobId = "jobId";
doReturn(jobId).when(mockJobRequest).getJobId();
Job mockJob = mock(Job.class);
doReturn(mockJob).when(schedulerResource.schedulerService).triggerNow(jobId);
Job.JobState mockJobState = Job.JobState.BLOCKED;
doReturn(mockJobState).when(mockJob).getState();
Response mockResponse = mock(Response.class);
doReturn(mockResponse).when(schedulerResource).buildPlainTextOkResponse(mockJobState.name());
Response testResponse = schedulerResource.triggerNow(mockJobRequest);
assertEquals(mockResponse, testResponse);
verify(mockJobRequest, times(1)).getJobId();
verify(schedulerResource.schedulerService, times(1)).triggerNow(jobId);
verify(mockJob, times(1)).getState();
verify(schedulerResource, times(1)).buildPlainTextOkResponse(mockJobState.name());
}
use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerResourceTest method testRemoveJob.
@Test
public void testRemoveJob() throws Exception {
JobRequest mockJobRequest = mock(JobRequest.class);
String jobId = "jobId";
doReturn(jobId).when(mockJobRequest).getJobId();
Job mockJob = mock(Job.class);
doReturn(mockJob).when(schedulerResource.schedulerService).getJob(jobId);
Job.JobState mockJobState = Job.JobState.BLOCKED;
doReturn(mockJobState).when(mockJob).getState();
Response mockRemovedResponse = mock(Response.class);
doReturn(mockRemovedResponse).when(schedulerResource).buildPlainTextOkResponse("REMOVED");
Response mockJobStateResponse = mock(Response.class);
doReturn(mockJobStateResponse).when(schedulerResource).buildPlainTextOkResponse(mockJobState.name());
// Test 1
doReturn(true).when(schedulerResource.schedulerService).removeJob(jobId);
Response testResponse = schedulerResource.removeJob(mockJobRequest);
assertEquals(mockRemovedResponse, testResponse);
// Test 2
doReturn(false).when(schedulerResource.schedulerService).removeJob(jobId);
testResponse = schedulerResource.removeJob(mockJobRequest);
assertEquals(mockJobStateResponse, testResponse);
verify(mockJobRequest, times(3)).getJobId();
verify(schedulerResource.schedulerService, times(1)).getJob(jobId);
verify(mockJob, times(1)).getState();
verify(schedulerResource, times(1)).buildPlainTextOkResponse("REMOVED");
verify(schedulerResource, times(1)).buildPlainTextOkResponse(mockJobState.name());
}
use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.
the class SchedulerService method createJob.
public Job createJob(JobScheduleRequest scheduleRequest) throws IOException, SchedulerException, IllegalAccessException {
// Used to determine if created by a RunInBackgroundCommand
boolean runInBackground = scheduleRequest.getSimpleJobTrigger() == null && scheduleRequest.getComplexJobTrigger() == null && scheduleRequest.getCronJobTrigger() == null;
if (!runInBackground && !getPolicy().isAllowed(SchedulerAction.NAME)) {
throw new SecurityException();
}
boolean hasInputFile = !StringUtils.isEmpty(scheduleRequest.getInputFile());
RepositoryFile file = null;
if (hasInputFile) {
try {
file = getRepository().getFile(scheduleRequest.getInputFile());
} catch (UnifiedRepositoryException ure) {
hasInputFile = false;
logger.warn(ure.getMessage(), ure);
}
}
// if we have an inputfile, generate job name based on that if the name is not passed in
if (hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
// $NON-NLS-1$
scheduleRequest.setJobName(file.getName().substring(0, file.getName().lastIndexOf(".")));
} else if (!StringUtils.isEmpty(scheduleRequest.getActionClass())) {
String actionClass = scheduleRequest.getActionClass().substring(scheduleRequest.getActionClass().lastIndexOf(".") + 1);
// $NON-NLS-1$
scheduleRequest.setJobName(actionClass);
} else if (!hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
// just make up a name
// $NON-NLS-1$
scheduleRequest.setJobName("" + System.currentTimeMillis());
}
if (hasInputFile) {
if (file == null) {
logger.error("Cannot find input source file " + scheduleRequest.getInputFile() + " Aborting schedule...");
throw new SchedulerException(new ServiceException("Cannot find input source file " + scheduleRequest.getInputFile()));
}
Map<String, Serializable> metadata = getRepository().getFileMetadata(file.getId());
if (metadata.containsKey(RepositoryFile.SCHEDULABLE_KEY)) {
boolean schedulable = BooleanUtils.toBoolean((String) metadata.get(RepositoryFile.SCHEDULABLE_KEY));
if (!schedulable) {
throw new IllegalAccessException();
}
}
}
if (scheduleRequest.getTimeZone() != null) {
updateStartDateForTimeZone(scheduleRequest);
}
Job job = null;
IJobTrigger jobTrigger = SchedulerResourceUtil.convertScheduleRequestToJobTrigger(scheduleRequest, scheduler);
HashMap<String, Serializable> parameterMap = new HashMap<>();
for (JobScheduleParam param : scheduleRequest.getJobParameters()) {
parameterMap.put(param.getName(), param.getValue());
}
if (isPdiFile(file)) {
parameterMap = handlePDIScheduling(file, parameterMap, scheduleRequest.getPdiParameters());
}
parameterMap.put(LocaleHelper.USER_LOCALE_PARAM, LocaleHelper.getLocale());
if (hasInputFile) {
SchedulerOutputPathResolver outputPathResolver = getSchedulerOutputPathResolver(scheduleRequest);
String outputFile = outputPathResolver.resolveOutputFilePath();
String actionId = SchedulerResourceUtil.resolveActionId(scheduleRequest.getInputFile());
final String inputFile = scheduleRequest.getInputFile();
parameterMap.put(ActionUtil.QUARTZ_STREAMPROVIDER_INPUT_FILE, inputFile);
job = getScheduler().createJob(scheduleRequest.getJobName(), actionId, parameterMap, jobTrigger, new RepositoryFileStreamProvider(inputFile, outputFile, getAutoCreateUniqueFilename(scheduleRequest), getAppendDateFormat(scheduleRequest)));
} else {
// need to locate actions from plugins if done this way too (but for now, we're just on main)
String actionClass = scheduleRequest.getActionClass();
try {
@SuppressWarnings("unchecked") Class<IAction> iaction = getAction(actionClass);
job = getScheduler().createJob(scheduleRequest.getJobName(), iaction, parameterMap, jobTrigger);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
return job;
}
Aggregations