use of com.google.cloud.dataproc.v1.Job in project pravega by pravega.
the class RemoteSequential method newJob.
private Job newJob(String id, String className, String methodName) {
Map<String, String> labels = new HashMap<>(1);
labels.put("testMethodName", methodName);
// This can be used to set environment variables while executing the job on Metronome.
Map<String, String> env = new HashMap<>(2);
env.put("masterIP", System.getProperty("masterIP"));
env.put("env2", "value102");
Artifact art = new Artifact();
// It caches the artifacts, disabling it for now.
art.setCache(false);
// jar is not executable.
art.setExecutable(false);
art.setExtract(false);
art.setUri(System.getProperty("testArtifactUrl", "InvalidTestArtifactURL"));
Restart restart = new Restart();
// the tests are expected to finish in 2 mins, this can be changed to
restart.setActiveDeadlineSeconds(120);
// a higher value if required.
restart.setPolicy("NEVER");
Run run = new Run();
run.setArtifacts(Collections.singletonList(art));
run.setCmd("docker run --rm -v $(pwd):/data " + System.getProperty("dockerImageRegistry") + "/java:8 java" + " -DmasterIP=" + LoginClient.MESOS_MASTER + " -DskipServiceInstallation=" + Utils.isSkipServiceInstallationEnabled() + " -cp /data/pravega-test-system-" + System.getProperty("testVersion") + ".jar io.pravega.test.system.SingleJUnitTestRunner " + className + "#" + methodName + " > server.log 2>&1" + "; exit $?");
// CPU shares.
run.setCpus(0.5);
// amount of memory required for running test in MB.
run.setMem(512.0);
run.setDisk(50.0);
run.setEnv(env);
run.setMaxLaunchDelay(3600);
run.setRestart(restart);
run.setUser("root");
Job job = new Job();
job.setId(id);
job.setDescription(id);
job.setLabels(labels);
job.setRun(run);
return job;
}
use of com.google.cloud.dataproc.v1.Job in project pentaho-platform by pentaho.
the class EmbeddedVersionCheckSystemListener method deleteJobIfNecessary.
protected void deleteJobIfNecessary() throws SchedulerException {
// $NON-NLS-1$
IScheduler scheduler = PentahoSystem.get(IScheduler.class, "IScheduler2", null);
IJobFilter filter = new IJobFilter() {
public boolean accept(Job job) {
return job.getJobName().contains(EmbeddedVersionCheckSystemListener.VERSION_CHECK_JOBNAME);
}
};
// Like old code - remove the existing job and replace it
List<Job> matchingJobs = scheduler.getJobs(filter);
if ((matchingJobs != null) && (matchingJobs.size() > 0)) {
for (Job verCkJob : matchingJobs) {
scheduler.removeJob(verCkJob.getJobId());
}
}
}
use of com.google.cloud.dataproc.v1.Job in project pentaho-platform by pentaho.
the class SchedulerService method updateJob.
public Job updateJob(JobScheduleRequest scheduleRequest) throws IllegalAccessException, IOException, SchedulerException {
Job job = getScheduler().getJob(scheduleRequest.getJobId());
if (job != null) {
scheduleRequest.getJobParameters().add(new JobScheduleParam(QuartzScheduler.RESERVEDMAPKEY_ACTIONUSER, job.getUserName()));
}
Job newJob = createJob(scheduleRequest);
removeJob(scheduleRequest.getJobId());
return newJob;
}
use of com.google.cloud.dataproc.v1.Job in project pentaho-platform by pentaho.
the class SchedulerService method createJob.
public Job createJob(JobScheduleRequest scheduleRequest) throws IOException, SchedulerException, IllegalAccessException {
// Used to determine if created by a RunInBackgroundCommand
boolean runInBackground = scheduleRequest.getSimpleJobTrigger() == null && scheduleRequest.getComplexJobTrigger() == null && scheduleRequest.getCronJobTrigger() == null;
if (!runInBackground && !getPolicy().isAllowed(SchedulerAction.NAME)) {
throw new SecurityException();
}
boolean hasInputFile = !StringUtils.isEmpty(scheduleRequest.getInputFile());
RepositoryFile file = null;
if (hasInputFile) {
try {
file = getRepository().getFile(scheduleRequest.getInputFile());
} catch (UnifiedRepositoryException ure) {
hasInputFile = false;
logger.warn(ure.getMessage(), ure);
}
}
// if we have an inputfile, generate job name based on that if the name is not passed in
if (hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
// $NON-NLS-1$
scheduleRequest.setJobName(file.getName().substring(0, file.getName().lastIndexOf(".")));
} else if (!StringUtils.isEmpty(scheduleRequest.getActionClass())) {
String actionClass = scheduleRequest.getActionClass().substring(scheduleRequest.getActionClass().lastIndexOf(".") + 1);
// $NON-NLS-1$
scheduleRequest.setJobName(actionClass);
} else if (!hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
// just make up a name
// $NON-NLS-1$
scheduleRequest.setJobName("" + System.currentTimeMillis());
}
if (hasInputFile) {
if (file == null) {
logger.error("Cannot find input source file " + scheduleRequest.getInputFile() + " Aborting schedule...");
throw new SchedulerException(new ServiceException("Cannot find input source file " + scheduleRequest.getInputFile()));
}
Map<String, Serializable> metadata = getRepository().getFileMetadata(file.getId());
if (metadata.containsKey(RepositoryFile.SCHEDULABLE_KEY)) {
boolean schedulable = BooleanUtils.toBoolean((String) metadata.get(RepositoryFile.SCHEDULABLE_KEY));
if (!schedulable) {
throw new IllegalAccessException();
}
}
}
if (scheduleRequest.getTimeZone() != null) {
updateStartDateForTimeZone(scheduleRequest);
}
Job job = null;
IJobTrigger jobTrigger = SchedulerResourceUtil.convertScheduleRequestToJobTrigger(scheduleRequest, scheduler);
HashMap<String, Serializable> parameterMap = new HashMap<>();
for (JobScheduleParam param : scheduleRequest.getJobParameters()) {
parameterMap.put(param.getName(), param.getValue());
}
if (isPdiFile(file)) {
parameterMap = handlePDIScheduling(file, parameterMap, scheduleRequest.getPdiParameters());
}
parameterMap.put(LocaleHelper.USER_LOCALE_PARAM, LocaleHelper.getLocale());
if (hasInputFile) {
SchedulerOutputPathResolver outputPathResolver = getSchedulerOutputPathResolver(scheduleRequest);
String outputFile = outputPathResolver.resolveOutputFilePath();
String actionId = SchedulerResourceUtil.resolveActionId(scheduleRequest.getInputFile());
final String inputFile = scheduleRequest.getInputFile();
parameterMap.put(ActionUtil.QUARTZ_STREAMPROVIDER_INPUT_FILE, inputFile);
job = getScheduler().createJob(scheduleRequest.getJobName(), actionId, parameterMap, jobTrigger, new RepositoryFileStreamProvider(inputFile, outputFile, getAutoCreateUniqueFilename(scheduleRequest), getAppendDateFormat(scheduleRequest)));
} else {
// need to locate actions from plugins if done this way too (but for now, we're just on main)
String actionClass = scheduleRequest.getActionClass();
try {
@SuppressWarnings("unchecked") Class<IAction> iaction = getAction(actionClass);
job = getScheduler().createJob(scheduleRequest.getJobName(), iaction, parameterMap, jobTrigger);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
return job;
}
use of com.google.cloud.dataproc.v1.Job in project pentaho-platform by pentaho.
the class SchedulerService method pauseJob.
public JobState pauseJob(String jobId) throws SchedulerException {
Job job = getJob(jobId);
if (isScheduleAllowed() || PentahoSessionHolder.getSession().getName().equals(job.getUserName())) {
getScheduler().pauseJob(jobId);
}
job = getJob(jobId);
return job.getState();
}
Aggregations