use of org.pentaho.platform.api.scheduler2.JobTrigger in project pentaho-platform by pentaho.
the class GatherStatsListener method scheduleJob.
private void scheduleJob(int intervalInSeconds) throws Exception {
// $NON-NLS-1$
IScheduler scheduler = PentahoSystem.get(IScheduler.class, "IScheduler2", null);
JobTrigger trigger = new SimpleJobTrigger(new Date(), null, -1, intervalInSeconds);
jobMap.put("transFileName", getTransFileName());
scheduler.createJob(GatherStatsListener.JOB_NAME, GatherStatsAction.class, jobMap, trigger);
logger.info("Statistics gathering jop has been scheduled.");
}
use of org.pentaho.platform.api.scheduler2.JobTrigger in project pentaho-platform by pentaho.
the class BlockoutManagerUtil method willFire.
public static boolean willFire(IJobTrigger jobTrigger, List<IJobTrigger> blockOutTriggers, IScheduler scheduler) {
// Short return as to avoid having to calculate fire times
if (blockOutTriggers.isEmpty()) {
return true;
}
List<Date> fireTimes = getFireTimes(jobTrigger, scheduler);
for (IJobTrigger blockOutJobTrigger : blockOutTriggers) {
// We must verify further if the schedule is blocked completely or if it will fire
if (willBlockSchedule(jobTrigger, blockOutJobTrigger, scheduler)) {
boolean isBlockoutComplex = isComplexTrigger(blockOutJobTrigger);
// If recurrence intervals are the same, it will never fire
if (!isBlockoutComplex && !isComplexTrigger(jobTrigger) && getRecurrenceInterval(blockOutJobTrigger) == getRecurrenceInterval(jobTrigger)) {
return false;
}
List<Date> blockoutFireTimes = null;
if (isBlockoutComplex) {
blockoutFireTimes = getFireTimes(blockOutJobTrigger, scheduler);
}
// Loop through fire times and verify whether block out is blocking the schedule completely
boolean scheduleCompletelyBlocked = true;
for (Date fireTime : fireTimes) {
scheduleCompletelyBlocked = isBlockoutComplex ? willComplexBlockOutTriggerBlockDate(blockOutJobTrigger, blockoutFireTimes, fireTime) : willBlockDate(blockOutJobTrigger, fireTime, scheduler);
if (!scheduleCompletelyBlocked) {
break;
}
}
// Return false if after n iterations
if (scheduleCompletelyBlocked) {
return false;
}
}
}
return true;
}
use of org.pentaho.platform.api.scheduler2.JobTrigger in project pentaho-platform by pentaho.
the class DefaultSchedulerService method updateJob.
private void updateJob(String jobId, Map<String, ParamValue> jobParams, JobTrigger trigger) throws SchedulerException {
// $NON-NLS-1$
logger.debug("Creating job with schedule " + trigger.toString());
try {
// $NON-NLS-1$
IScheduler scheduler = PentahoSystem.get(IScheduler.class, "IScheduler2", null);
Map<String, Serializable> properJobParams = toProperMap(jobParams);
scheduler.updateJob(jobId, properJobParams, trigger);
} catch (SchedulerException e) {
// temporary error logging.. this needs to become an aspect
logger.error(e.getMessage(), e);
throw e;
}
}
use of org.pentaho.platform.api.scheduler2.JobTrigger in project pentaho-platform by pentaho.
the class SchedulerService method createJob.
public Job createJob(JobScheduleRequest scheduleRequest) throws IOException, SchedulerException, IllegalAccessException {
// Used to determine if created by a RunInBackgroundCommand
boolean runInBackground = scheduleRequest.getSimpleJobTrigger() == null && scheduleRequest.getComplexJobTrigger() == null && scheduleRequest.getCronJobTrigger() == null;
if (!runInBackground && !getPolicy().isAllowed(SchedulerAction.NAME)) {
throw new SecurityException();
}
boolean hasInputFile = !StringUtils.isEmpty(scheduleRequest.getInputFile());
RepositoryFile file = null;
if (hasInputFile) {
try {
file = getRepository().getFile(scheduleRequest.getInputFile());
} catch (UnifiedRepositoryException ure) {
hasInputFile = false;
logger.warn(ure.getMessage(), ure);
}
}
// if we have an inputfile, generate job name based on that if the name is not passed in
if (hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
// $NON-NLS-1$
scheduleRequest.setJobName(file.getName().substring(0, file.getName().lastIndexOf(".")));
} else if (!StringUtils.isEmpty(scheduleRequest.getActionClass())) {
String actionClass = scheduleRequest.getActionClass().substring(scheduleRequest.getActionClass().lastIndexOf(".") + 1);
// $NON-NLS-1$
scheduleRequest.setJobName(actionClass);
} else if (!hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
// just make up a name
// $NON-NLS-1$
scheduleRequest.setJobName("" + System.currentTimeMillis());
}
if (hasInputFile) {
if (file == null) {
logger.error("Cannot find input source file " + scheduleRequest.getInputFile() + " Aborting schedule...");
throw new SchedulerException(new ServiceException("Cannot find input source file " + scheduleRequest.getInputFile()));
}
Map<String, Serializable> metadata = getRepository().getFileMetadata(file.getId());
if (metadata.containsKey(RepositoryFile.SCHEDULABLE_KEY)) {
boolean schedulable = BooleanUtils.toBoolean((String) metadata.get(RepositoryFile.SCHEDULABLE_KEY));
if (!schedulable) {
throw new IllegalAccessException();
}
}
}
if (scheduleRequest.getTimeZone() != null) {
updateStartDateForTimeZone(scheduleRequest);
}
Job job = null;
IJobTrigger jobTrigger = SchedulerResourceUtil.convertScheduleRequestToJobTrigger(scheduleRequest, scheduler);
HashMap<String, Serializable> parameterMap = new HashMap<>();
for (JobScheduleParam param : scheduleRequest.getJobParameters()) {
parameterMap.put(param.getName(), param.getValue());
}
if (isPdiFile(file)) {
parameterMap = handlePDIScheduling(file, parameterMap, scheduleRequest.getPdiParameters());
}
parameterMap.put(LocaleHelper.USER_LOCALE_PARAM, LocaleHelper.getLocale());
if (scheduleRequest.getUseWorkerNodes() != null && !scheduleRequest.getUseWorkerNodes().trim().isEmpty()) {
parameterMap.put("useWorkerNodes", scheduleRequest.getUseWorkerNodes().trim());
}
if (hasInputFile) {
SchedulerOutputPathResolver outputPathResolver = getSchedulerOutputPathResolver(scheduleRequest);
String outputFile = outputPathResolver.resolveOutputFilePath();
String actionId = SchedulerResourceUtil.resolveActionId(scheduleRequest.getInputFile());
final String inputFile = scheduleRequest.getInputFile();
parameterMap.put(ActionUtil.QUARTZ_STREAMPROVIDER_INPUT_FILE, inputFile);
job = getScheduler().createJob(scheduleRequest.getJobName(), actionId, parameterMap, jobTrigger, new RepositoryFileStreamProvider(inputFile, outputFile, getAutoCreateUniqueFilename(scheduleRequest), getAppendDateFormat(scheduleRequest)));
} else {
// need to locate actions from plugins if done this way too (but for now, we're just on main)
String actionClass = scheduleRequest.getActionClass();
try {
@SuppressWarnings("unchecked") Class<IAction> iaction = getAction(actionClass);
job = getScheduler().createJob(scheduleRequest.getJobName(), iaction, parameterMap, jobTrigger);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
return job;
}
use of org.pentaho.platform.api.scheduler2.JobTrigger in project pentaho-platform by pentaho.
the class ScheduleExportUtil method createJobScheduleRequest.
public static JobScheduleRequest createJobScheduleRequest(Job job) {
if (job == null) {
throw new IllegalArgumentException(Messages.getInstance().getString("ScheduleExportUtil.JOB_MUST_NOT_BE_NULL"));
}
JobScheduleRequest schedule = new JobScheduleRequest();
schedule.setJobName(job.getJobName());
schedule.setDuration(job.getJobTrigger().getDuration());
schedule.setJobState(job.getState());
Map<String, Serializable> jobParams = job.getJobParams();
Object streamProviderObj = jobParams.get(QuartzScheduler.RESERVEDMAPKEY_STREAMPROVIDER);
RepositoryFileStreamProvider streamProvider = null;
if (streamProviderObj instanceof RepositoryFileStreamProvider) {
streamProvider = (RepositoryFileStreamProvider) streamProviderObj;
} else if (streamProviderObj instanceof String) {
String inputFilePath = null;
String outputFilePath = null;
String inputOutputString = (String) streamProviderObj;
String[] tokens = inputOutputString.split(":");
if (!ArrayUtils.isEmpty(tokens) && tokens.length == 2) {
inputFilePath = tokens[0].split("=")[1].trim();
outputFilePath = tokens[1].split("=")[1].trim();
streamProvider = new RepositoryFileStreamProvider(inputFilePath, outputFilePath, true);
}
}
if (streamProvider != null) {
schedule.setInputFile(streamProvider.getInputFilePath());
schedule.setOutputFile(streamProvider.getOutputFilePath());
} else {
// let's look to see if we can figure out the input and output file
String directory = (String) jobParams.get("directory");
String transName = (String) jobParams.get("transformation");
String jobName = (String) jobParams.get("job");
String artifact = transName == null ? jobName : transName;
if (directory != null && artifact != null) {
String outputFile = RepositoryFilenameUtils.concat(directory, artifact);
outputFile += "*";
if (artifact.equals(jobName)) {
artifact += ".kjb";
} else {
artifact += ".ktr";
}
String inputFile = RepositoryFilenameUtils.concat(directory, artifact);
schedule.setInputFile(inputFile);
schedule.setOutputFile(outputFile);
}
}
for (String key : jobParams.keySet()) {
Serializable serializable = jobParams.get(key);
if (RUN_PARAMETERS_KEY.equals(key)) {
if (schedule.getPdiParameters() == null) {
schedule.setPdiParameters(new HashMap<String, String>());
}
schedule.getPdiParameters().putAll((Map<String, String>) serializable);
} else {
JobScheduleParam param = null;
if (serializable instanceof String) {
String value = (String) serializable;
if (QuartzScheduler.RESERVEDMAPKEY_ACTIONCLASS.equals(key)) {
schedule.setActionClass(value);
} else if (IBlockoutManager.TIME_ZONE_PARAM.equals(key)) {
schedule.setTimeZone(value);
}
param = new JobScheduleParam(key, (String) serializable);
} else if (serializable instanceof Number) {
param = new JobScheduleParam(key, (Number) serializable);
} else if (serializable instanceof Date) {
param = new JobScheduleParam(key, (Date) serializable);
} else if (serializable instanceof Boolean) {
param = new JobScheduleParam(key, (Boolean) serializable);
}
if (param != null) {
schedule.getJobParameters().add(param);
}
}
}
if (job.getJobTrigger() instanceof SimpleJobTrigger) {
SimpleJobTrigger jobTrigger = (SimpleJobTrigger) job.getJobTrigger();
schedule.setSimpleJobTrigger(jobTrigger);
} else if (job.getJobTrigger() instanceof ComplexJobTrigger) {
ComplexJobTrigger jobTrigger = (ComplexJobTrigger) job.getJobTrigger();
// force it to a cron trigger to get the auto-parsing of the complex trigger
CronJobTrigger cron = new CronJobTrigger();
cron.setCronString(jobTrigger.getCronString());
cron.setStartTime(jobTrigger.getStartTime());
cron.setEndTime(jobTrigger.getEndTime());
cron.setDuration(jobTrigger.getDuration());
cron.setUiPassParam(jobTrigger.getUiPassParam());
schedule.setCronJobTrigger(cron);
} else if (job.getJobTrigger() instanceof CronJobTrigger) {
CronJobTrigger jobTrigger = (CronJobTrigger) job.getJobTrigger();
schedule.setCronJobTrigger(jobTrigger);
} else {
// don't know what this is, can't export it
throw new IllegalArgumentException(Messages.getInstance().getString("PentahoPlatformExporter.UNSUPPORTED_JobTrigger", job.getJobTrigger().getClass().getName()));
}
return schedule;
}
Aggregations