use of org.springframework.scheduling.quartz.JobDetailBean in project OpenClinica by OpenClinica.
the class ScheduledJobController method cancelScheduledJob.
@RequestMapping("/cancelScheduledJob")
public String cancelScheduledJob(HttpServletRequest request, HttpServletResponse response, @RequestParam("theJobName") String theJobName, @RequestParam("theJobGroupName") String theJobGroupName, @RequestParam("theTriggerName") String triggerName, @RequestParam("theTriggerGroupName") String triggerGroupName, @RequestParam("redirection") String redirection, ModelMap model) throws SchedulerException {
scheduler.getJobDetail(theJobName, theJobGroupName);
logger.debug("About to pause the job-->" + theJobName + "Job Group Name -->" + theJobGroupName);
SimpleTrigger oldTrigger = (SimpleTrigger) scheduler.getTrigger(triggerName, triggerGroupName);
if (oldTrigger != null) {
Date startTime = new Date(oldTrigger.getStartTime().getTime() + oldTrigger.getRepeatInterval());
if (triggerGroupName.equals(ExtractController.TRIGGER_GROUP_NAME)) {
interruptQuartzJob(scheduler, theJobName, theJobGroupName);
}
scheduler.pauseJob(theJobName, theJobGroupName);
SimpleTrigger newTrigger = new SimpleTrigger(triggerName, triggerGroupName);
newTrigger.setJobName(theJobName);
newTrigger.setJobGroup(theJobGroupName);
newTrigger.setJobDataMap(oldTrigger.getJobDataMap());
newTrigger.setVolatility(false);
newTrigger.setRepeatCount(oldTrigger.getRepeatCount());
newTrigger.setRepeatInterval(oldTrigger.getRepeatInterval());
newTrigger.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_REMAINING_COUNT);
newTrigger.setStartTime(startTime);
newTrigger.setRepeatInterval(oldTrigger.getRepeatInterval());
// these are the jobs which are from extract data and are not not required to be rescheduled.
scheduler.unscheduleJob(triggerName, triggerGroupName);
ArrayList<String> pageMessages = new ArrayList<String>();
if (triggerGroupName.equals(ExtractController.TRIGGER_GROUP_NAME)) {
scheduler.rescheduleJob(triggerName, triggerGroupName, newTrigger);
pageMessages.add("The Job " + theJobName + " has been cancelled");
} else if (triggerGroupName.equals(XsltTriggerService.TRIGGER_GROUP_NAME)) {
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setGroup(XsltTriggerService.TRIGGER_GROUP_NAME);
jobDetailBean.setName(newTrigger.getName());
jobDetailBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
jobDetailBean.setJobDataMap(newTrigger.getJobDataMap());
// need durability?
jobDetailBean.setDurability(true);
jobDetailBean.setVolatility(false);
scheduler.deleteJob(theJobName, theJobGroupName);
scheduler.scheduleJob(jobDetailBean, newTrigger);
pageMessages.add("The Job " + theJobName + " has been rescheduled");
}
request.setAttribute("pageMessages", pageMessages);
logger.debug("jobDetails>" + scheduler.getJobDetail(theJobName, theJobGroupName));
}
sdvUtil.forwardRequestFromController(request, response, "/pages/" + redirection);
return null;
}
use of org.springframework.scheduling.quartz.JobDetailBean in project OpenClinica by OpenClinica.
the class UpdateJobExportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
String action = fp.getString("action");
String triggerName = fp.getString("tname");
scheduler = getScheduler();
ExtractUtils extractUtils = new ExtractUtils();
Trigger updatingTrigger = scheduler.getTrigger(triggerName.trim(), XsltTriggerService.TRIGGER_GROUP_NAME);
if (StringUtil.isBlank(action)) {
setUpServlet(updatingTrigger);
forwardPage(Page.UPDATE_JOB_EXPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
// change and update trigger here
// validate first
// then update or send back
HashMap errors = validateForm(fp, request, scheduler.getTriggerNames(XsltTriggerService.TRIGGER_GROUP_NAME), updatingTrigger.getName());
if (!errors.isEmpty()) {
// send back
addPageMessage("Your modifications caused an error, please see the messages for more information.");
setUpServlet(updatingTrigger);
logger.error("errors : " + errors.toString());
forwardPage(Page.UPDATE_JOB_EXPORT);
} else {
// change trigger, update in database
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
StudyBean study = (StudyBean) studyDAO.findByPK(sm.getUserBean().getActiveStudyId());
DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
CoreResources cr = new CoreResources();
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
int datasetId = fp.getInt(DATASET_ID);
String period = fp.getString(PERIOD);
String email = fp.getString(EMAIL);
String jobName = fp.getString(JOB_NAME);
String jobDesc = fp.getString(JOB_DESC);
Date startDateTime = fp.getDateTime(DATE_START_JOB);
Integer exportFormatId = fp.getInt(FORMAT_ID);
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
// JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
while (i < exportFiles.length) {
temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcLocation(prePocLoc);
}
extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
SimpleTrigger trigger = null;
trigger = xsltService.generateXsltTrigger(xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", TRIGGER_GROUP_JOB);
// Updating the original trigger with user given inputs
trigger.setRepeatCount(64000);
trigger.setRepeatInterval(XsltTriggerService.getIntervalTime(period));
trigger.setDescription(jobDesc);
// set just the start date
trigger.setStartTime(startDateTime);
// + datasetId);
trigger.setName(jobName);
trigger.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT);
trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setGroup(xsltService.TRIGGER_GROUP_NAME);
jobDetailBean.setName(trigger.getName());
jobDetailBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
jobDetailBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
jobDetailBean.setDurability(true);
jobDetailBean.setVolatility(false);
try {
// scheduler.unscheduleJob(triggerName, "DEFAULT");
scheduler.deleteJob(triggerName, XsltTriggerService.TRIGGER_GROUP_NAME);
Date dataStart = scheduler.scheduleJob(jobDetailBean, trigger);
// Date dateStart = scheduler.rescheduleJob(triggerName,
// "DEFAULT", trigger);
// scheduler.rescheduleJob(triggerName, groupName,
// newTrigger)
addPageMessage("Your job has been successfully modified.");
forwardPage(Page.VIEW_JOB_SERVLET);
} catch (SchedulerException se) {
se.printStackTrace();
// set a message here with the exception message
setUpServlet(trigger);
addPageMessage("There was an unspecified error with your creation, please contact an administrator.");
forwardPage(Page.UPDATE_JOB_EXPORT);
}
}
}
}
use of org.springframework.scheduling.quartz.JobDetailBean in project OpenClinica by OpenClinica.
the class ExportDatasetServlet method processRequest.
@Override
public void processRequest() throws Exception {
DatasetDAO dsdao = new DatasetDAO(sm.getDataSource());
ArchivedDatasetFileDAO asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
FormProcessor fp = new FormProcessor(request);
GenerateExtractFileService generateFileService = new GenerateExtractFileService(sm.getDataSource(), (CoreResources) SpringServletAccess.getApplicationContext(context).getBean("coreResources"), (RuleSetRuleDao) SpringServletAccess.getApplicationContext(context).getBean("ruleSetRuleDao"));
String action = fp.getString("action");
int datasetId = fp.getInt("datasetId");
int adfId = fp.getInt("adfId");
if (datasetId == 0) {
try {
DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
datasetId = dsb.getId();
logger.info("dataset id was zero, trying session: " + datasetId);
} catch (NullPointerException e) {
e.printStackTrace();
logger.info("tripped over null pointer exception");
}
}
DatasetBean db = (DatasetBean) dsdao.findByPK(datasetId);
StudyDAO sdao = new StudyDAO(sm.getDataSource());
StudyBean study = (StudyBean) sdao.findByPK(db.getStudyId());
checkRoleByUserAndStudy(ub, study.getParentStudyId(), study.getId());
// Checks if the study is current study or child of current study
if (study.getId() != currentStudy.getId() && study.getParentStudyId() != currentStudy.getId()) {
addPageMessage(respage.getString("no_have_correct_privilege_current_study") + " " + respage.getString("change_active_study_or_contact"));
forwardPage(Page.MENU_SERVLET);
return;
}
/**
* @vbc 08/06/2008 NEW EXTRACT DATA IMPLEMENTATION get study_id and
* parentstudy_id int currentstudyid = currentStudy.getId(); int
* parentstudy = currentStudy.getParentStudyId(); if (parentstudy >
* 0) { // is OK } else { // same parentstudy = currentstudyid; } //
*/
int currentstudyid = currentStudy.getId();
// YW 11-09-2008 << modified logic here.
int parentstudy = currentstudyid;
// YW 11-09-2008 >>
StudyBean parentStudy = new StudyBean();
if (currentStudy.getParentStudyId() > 0) {
// StudyDAO sdao = new StudyDAO(sm.getDataSource());
parentStudy = (StudyBean) sdao.findByPK(currentStudy.getParentStudyId());
}
ExtractBean eb = generateFileService.generateExtractBean(db, currentStudy, parentStudy);
if (StringUtil.isBlank(action)) {
loadList(db, asdfdao, datasetId, fp, eb);
forwardPage(Page.EXPORT_DATASETS);
} else if ("delete".equalsIgnoreCase(action) && adfId > 0) {
boolean success = false;
ArchivedDatasetFileBean adfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(adfId);
File file = new File(adfBean.getFileReference());
if (!file.canWrite()) {
addPageMessage(respage.getString("write_protected"));
} else {
success = file.delete();
if (success) {
asdfdao.deleteArchiveDataset(adfBean);
addPageMessage(respage.getString("file_removed"));
} else {
addPageMessage(respage.getString("error_removing_file"));
}
}
loadList(db, asdfdao, datasetId, fp, eb);
forwardPage(Page.EXPORT_DATASETS);
} else {
logger.info("**** found action ****: " + action);
String generateReport = "";
// generate file, and show screen export
// String generalFileDir = DATASET_DIR + db.getId() +
// File.separator;
// change this up, so that we don't overwrite anything
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
String generalFileDir = DATASET_DIR + db.getId() + File.separator + sdfDir.format(new java.util.Date());
String fileName = "";
db.setName(db.getName().replaceAll(" ", "_"));
Page finalTarget = Page.GENERATE_DATASET;
finalTarget = Page.EXPORT_DATA_CUSTOM;
// now display report according to format specified
// TODO revise final target to set to fileReference????
long sysTimeBegin = System.currentTimeMillis();
int fId = 0;
if ("sas".equalsIgnoreCase(action)) {
// generateReport =
// dsdao.generateDataset(db,
// ExtractBean.SAS_FORMAT,
// currentStudy,
// parentStudy);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
String SASFileName = db.getName() + "_sas.sas";
// logger.info("found data set: "+generateReport);
generateFileService.createFile(SASFileName, generalFileDir, generateReport, db, sysTimeEnd, ExportFormatBean.TXTFILE, true, ub);
logger.info("created sas file");
request.setAttribute("generate", generalFileDir + SASFileName);
finalTarget.setFileName(generalFileDir + SASFileName);
fileName = SASFileName;
// won't work since page creator is private
} else if ("odm".equalsIgnoreCase(action)) {
String odmVersion = fp.getString("odmVersion");
String ODMXMLFileName = "";
// DRY
// HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "");
HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", true, true, true, null, ub);
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
ODMXMLFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
fileName = ODMXMLFileName;
request.setAttribute("generate", generalFileDir + ODMXMLFileName);
logger.debug("+++ set the following: " + generalFileDir + ODMXMLFileName);
// send a link with the SQL file? put the generated SQL file with the dataset?
if (fp.getString("xalan") != null) {
XalanTriggerService xts = new XalanTriggerService();
String propertiesPath = SQLInitServlet.getField("filePath");
// the trick there, we need to open up the zipped file and get at the XML
openZipFile(generalFileDir + ODMXMLFileName + ".zip");
// need to find out how to copy this xml file from /bin to the generalFileDir
SimpleTrigger simpleTrigger = xts.generateXalanTrigger(propertiesPath + File.separator + "ODMReportStylesheet.xsl", ODMXMLFileName, generalFileDir + "output.sql", db.getId());
scheduler = getScheduler();
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setGroup(xts.TRIGGER_GROUP_NAME);
jobDetailBean.setName(simpleTrigger.getName());
jobDetailBean.setJobClass(org.akaza.openclinica.web.job.XalanStatefulJob.class);
jobDetailBean.setJobDataMap(simpleTrigger.getJobDataMap());
// need durability?
jobDetailBean.setDurability(true);
jobDetailBean.setVolatility(false);
try {
Date dateStart = scheduler.scheduleJob(jobDetailBean, simpleTrigger);
logger.info("== found job date: " + dateStart.toString());
} catch (SchedulerException se) {
se.printStackTrace();
}
}
} else if ("txt".equalsIgnoreCase(action)) {
// generateReport =
// dsdao.generateDataset(db,
// ExtractBean.TXT_FORMAT,
// currentStudy,
// parentStudy);
// eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
String TXTFileName = "";
HashMap answerMap = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, db, currentstudyid, parentstudy, "", ub);
// and of course DRY
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
TXTFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
fileName = TXTFileName;
request.setAttribute("generate", generalFileDir + TXTFileName);
// finalTarget.setFileName(generalFileDir+TXTFileName);
logger.debug("+++ set the following: " + generalFileDir + TXTFileName);
} else if ("html".equalsIgnoreCase(action)) {
// html based dataset browser
TabReportBean answer = new TabReportBean();
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
request.setAttribute("dataset", db);
request.setAttribute("extractBean", eb);
finalTarget = Page.GENERATE_DATASET_HTML;
} else if ("spss".equalsIgnoreCase(action)) {
SPSSReportBean answer = new SPSSReportBean();
// removed three lines here and put them in generate file
// service, createSPSSFile method. tbh 01/2009
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
// System.out.println("*** isShowCRFversion:
// "+db.isShowCRFversion());
// TODO in the spirit of DRY, if this works we need to remove
// lines 443-776 in this servlet, tbh 01/2009
String DDLFileName = "";
HashMap answerMap = generateFileService.createSPSSFile(db, eb, currentStudy, parentStudy, sysTimeBegin, generalFileDir, answer, "", ub);
// hmm, DRY?
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
DDLFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
request.setAttribute("generate", generalFileDir + DDLFileName);
logger.debug("+++ set the following: " + generalFileDir + DDLFileName);
} else if ("csv".equalsIgnoreCase(action)) {
CommaReportBean answer = new CommaReportBean();
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
// logger.info("found data set: "+generateReport);
String CSVFileName = db.getName() + "_comma.txt";
fId = generateFileService.createFile(CSVFileName, generalFileDir, answer.toString(), db, sysTimeEnd, ExportFormatBean.CSVFILE, true, ub);
fileName = CSVFileName;
logger.info("just created csv file");
request.setAttribute("generate", generalFileDir + CSVFileName);
// finalTarget.setFileName(generalFileDir+CSVFileName);
} else if ("excel".equalsIgnoreCase(action)) {
// HSSFWorkbook excelReport = dsdao.generateExcelDataset(db,
// ExtractBean.XLS_FORMAT,
// currentStudy,
// parentStudy);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
// TODO this will change and point to a created excel
// spreadsheet, tbh
String excelFileName = db.getName() + "_excel.xls";
// fId = this.createFile(excelFileName,
// generalFileDir,
// excelReport,
// db, sysTimeEnd,
// ExportFormatBean.EXCELFILE);
// logger.info("just created csv file, for excel output");
// response.setHeader("Content-disposition","attachment;
// filename="+CSVFileName);
// logger.info("csv file name: "+CSVFileName);
finalTarget = Page.GENERATE_EXCEL_DATASET;
// response.setContentType("application/vnd.ms-excel");
response.setHeader("Content-Disposition", "attachment; filename=" + db.getName() + "_excel.xls");
request.setAttribute("generate", generalFileDir + excelFileName);
logger.info("set 'generate' to :" + generalFileDir + excelFileName);
fileName = excelFileName;
// excelReport.write(stream);
// stream.flush();
// stream.close();
// finalTarget.setFileName(WEB_DIR+db.getId()+"/"+excelFileName);
}
// <%@page contentType="application/vnd.ms-excel"%>
if (!finalTarget.equals(Page.GENERATE_EXCEL_DATASET) && !finalTarget.equals(Page.GENERATE_DATASET_HTML)) {
// to catch all the others and try to set a new path for file
// capture
// tbh, 4-18-05
// request.setAttribute("generate",finalTarget.getFileName());
// TODO changing path to show refresh page, then window with
// link to download file, tbh 06-08-05
// finalTarget.setFileName(
// "/WEB-INF/jsp/extract/generatedFileDataset.jsp");
finalTarget.setFileName("" + "/WEB-INF/jsp/extract/generateMetadataCore.jsp");
// also set up table here???
asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
ArchivedDatasetFileBean asdfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(fId);
// *** do we need this below? tbh
ArrayList newFileList = new ArrayList();
newFileList.add(asdfBean);
// request.setAttribute("filelist",newFileList);
ArrayList filterRows = ArchivedDatasetFileRow.generateRowsFromBeans(newFileList);
EntityBeanTable table = fp.getEntityBeanTable();
// sort by date
table.setSortingIfNotExplicitlySet(3, false);
String[] columns = { resword.getString("file_name"), resword.getString("run_time"), resword.getString("file_size"), resword.getString("created_date"), resword.getString("created_by") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(0);
table.hideColumnLink(1);
table.hideColumnLink(2);
table.hideColumnLink(3);
table.hideColumnLink(4);
// table.setQuery("ExportDataset?datasetId=" +db.getId(), new
// HashMap());
// trying to continue...
// session.setAttribute("newDataset",db);
request.setAttribute("dataset", db);
request.setAttribute("file", asdfBean);
table.setRows(filterRows);
table.computeDisplay();
request.setAttribute("table", table);
// *** do we need this above? tbh
}
logger.info("set first part of 'generate' to :" + generalFileDir);
logger.info("found file name: " + finalTarget.getFileName());
// String del = CoreResources.getField("dataset_file_delete");
// if (del.equalsIgnoreCase("true") || del.equals("")) {
// File deleteFile = new File(generalFileDir + fileName);
// deleteFile.delete();
// }
forwardPage(finalTarget);
}
}
use of org.springframework.scheduling.quartz.JobDetailBean in project head by mifos.
the class MifosScheduler method schedule.
@Deprecated
public void schedule(final String jobName, Date initialTime, long delay, JobRegistry jobRegistry, final JobRepository jobRepository, Map<String, Object> jobData, ResourcelessTransactionManager transactionManager) throws TaskSystemException {
try {
final TaskletStep step = new TaskletStep();
step.setName(jobName);
Tasklet tasklet = (Tasklet) Class.forName(BATCH_JOB_CLASS_PATH_PREFIX + getHelperName(jobName)).newInstance();
step.setTasklet(tasklet);
step.setJobRepository(jobRepository);
step.setTransactionManager(transactionManager);
step.afterPropertiesSet();
jobRegistry.register(new JobFactory() {
@Override
public Job createJob() {
SimpleJob job = new SimpleJob(jobName + "Job");
job.setJobRepository(jobRepository);
job.setRestartable(true);
job.registerJobExecutionListener(new BatchJobListener());
job.addStep(step);
return job;
}
@Override
public String getJobName() {
return jobName + "Job";
}
});
} catch (Exception e) {
throw new TaskSystemException(e);
}
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setJobDataAsMap(jobData);
try {
jobDetailBean.setJobClass(Class.forName(BATCH_JOB_CLASS_PATH_PREFIX + jobName));
} catch (ClassNotFoundException cnfe) {
throw new TaskSystemException(cnfe);
}
jobDetailBean.setName(jobName + "Job");
jobDetailBean.setGroup(Scheduler.DEFAULT_GROUP);
jobDetailBean.afterPropertiesSet();
SimpleTrigger trigger = new SimpleTrigger();
trigger.setName(jobName + "Job");
trigger.setGroup(Scheduler.DEFAULT_GROUP);
trigger.setStartTime(initialTime);
trigger.setRepeatInterval(delay);
trigger.setRepeatCount(SimpleTrigger.REPEAT_INDEFINITELY);
try {
scheduler.scheduleJob(jobDetailBean, trigger);
} catch (SchedulerException se) {
throw new TaskSystemException(se);
}
}
use of org.springframework.scheduling.quartz.JobDetailBean in project head by mifos.
the class MifosScheduler method scheduleLoanArrearsAndPortfolioAtRisk.
@Deprecated
public void scheduleLoanArrearsAndPortfolioAtRisk(Date initialTime, long delay, JobRegistry jobRegistry, final JobRepository jobRepository, Map<String, Object> jobData, ResourcelessTransactionManager transactionManager) throws TaskSystemException {
final String jobName = "LoanArrearsAndPortfolioAtRiskTask";
try {
final TaskletStep step1 = new TaskletStep();
step1.setName("LoanArrearsAndPortfolioAtRiskTask-step-1");
step1.setTasklet((Tasklet) Class.forName(BATCH_JOB_CLASS_PATH_PREFIX + getHelperName("LoanArrearsTask")).newInstance());
step1.setJobRepository(jobRepository);
step1.setTransactionManager(transactionManager);
step1.afterPropertiesSet();
final TaskletStep step2 = new TaskletStep();
step2.setName("LoanArrearsAndPortfolioAtRiskTask-step-2");
step2.setTasklet((Tasklet) Class.forName(BATCH_JOB_CLASS_PATH_PREFIX + getHelperName("PortfolioAtRiskTask")).newInstance());
step2.setJobRepository(jobRepository);
step2.setTransactionManager(transactionManager);
step2.afterPropertiesSet();
jobRegistry.register(new JobFactory() {
@Override
public Job createJob() {
SimpleJob job = new SimpleJob(jobName + "Job");
job.setJobRepository(jobRepository);
job.setRestartable(true);
job.registerJobExecutionListener(new BatchJobListener());
job.addStep(step1);
job.addStep(step2);
return job;
}
@Override
public String getJobName() {
return jobName + "Job";
}
});
} catch (Exception e) {
throw new TaskSystemException(e);
}
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setJobDataAsMap(jobData);
try {
jobDetailBean.setJobClass(Class.forName(BATCH_JOB_CLASS_PATH_PREFIX + "PortfolioAtRiskTask"));
} catch (ClassNotFoundException cnfe) {
throw new TaskSystemException(cnfe);
}
jobDetailBean.setName(jobName + "Job");
jobDetailBean.setGroup(Scheduler.DEFAULT_GROUP);
jobDetailBean.afterPropertiesSet();
SimpleTrigger trigger = new SimpleTrigger();
trigger.setName(jobName + "Job");
trigger.setGroup(Scheduler.DEFAULT_GROUP);
trigger.setStartTime(initialTime);
trigger.setRepeatInterval(delay);
trigger.setRepeatCount(SimpleTrigger.REPEAT_INDEFINITELY);
try {
scheduler.scheduleJob(jobDetailBean, trigger);
} catch (SchedulerException se) {
throw new TaskSystemException(se);
}
}
Aggregations