use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.
the class ViewImportJobServlet method processRequest.
@Override
protected void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
// First we must get a reference to a scheduler
scheduler = getScheduler();
// then we pull all the triggers that are specifically named
// IMPORT_TRIGGER.
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.groupEquals(IMPORT_TRIGGER));
// the next bit goes out and processes all the triggers
ArrayList triggerBeans = new ArrayList<TriggerBean>();
for (TriggerKey triggerKey : triggerKeys) {
String triggerName = triggerKey.getName();
Trigger trigger = scheduler.getTrigger(triggerKey);
logger.debug("found trigger, full name: " + triggerName);
try {
logger.debug("prev fire time " + trigger.getPreviousFireTime().toString());
logger.debug("next fire time " + trigger.getNextFireTime().toString());
logger.debug("final fire time: " + trigger.getFinalFireTime().toString());
} catch (NullPointerException npe) {
// could be nulls in the dates, etc
}
TriggerBean triggerBean = new TriggerBean();
triggerBean.setFullName(triggerName);
triggerBean.setPreviousDate(trigger.getPreviousFireTime());
triggerBean.setNextDate(trigger.getNextFireTime());
if (trigger.getDescription() != null) {
triggerBean.setDescription(trigger.getDescription());
}
// this next bit of code looks at the job data map and pulls out
// specific items
JobDataMap dataMap = new JobDataMap();
if (trigger.getJobDataMap().size() > 0) {
dataMap = trigger.getJobDataMap();
triggerBean.setStudyName(dataMap.getString(ExampleSpringJob.STUDY_NAME));
String oid = dataMap.getString("study_oid");
}
// this next bit of code looks to see if the trigger is paused
logger.debug("Trigger Priority: " + triggerName + " " + trigger.getPriority());
if (scheduler.getTriggerState(new TriggerKey(triggerName, IMPORT_TRIGGER)) == Trigger.TriggerState.PAUSED) {
triggerBean.setActive(false);
logger.debug("setting active to false for trigger: " + triggerName);
} else {
triggerBean.setActive(true);
logger.debug("setting active to TRUE for trigger: " + triggerName);
}
triggerBeans.add(triggerBean);
// our wrapper to show triggers
}
// set up the table here and get ready to send to the web page
ArrayList allRows = TriggerRow.generateRowsFromBeans(triggerBeans);
EntityBeanTable table = fp.getEntityBeanTable();
String[] columns = { resword.getString("name"), resword.getString("previous_fire_time"), resword.getString("next_fire_time"), resword.getString("description"), resword.getString("study"), resword.getString("actions") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(3);
table.hideColumnLink(5);
table.setQuery("ViewImportJob", new HashMap());
// table.addLink("", "CreateUserAccount");
table.setSortingColumnInd(0);
table.setRows(allRows);
table.computeDisplay();
request.setAttribute("table", table);
forwardPage(Page.VIEW_IMPORT_JOB);
}
use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.
the class ExampleSpringJob method executeInternal.
@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
// need to generate a Locale so that user beans and other things will
// generate normally
Locale locale = new Locale("en-US");
ResourceBundleProvider.updateLocale(locale);
ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle();
// logger.debug("--");
// logger.debug("-- executing a job " + message + " at " + new
// java.util.Date().toString());
JobDataMap dataMap = context.getMergedJobDataMap();
SimpleTrigger trigger = (SimpleTrigger) context.getTrigger();
try {
ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext");
String studySubjectNumber = ((CoreResources) appContext.getBean("coreResources")).getField("extract.number");
coreResources = (CoreResources) appContext.getBean("coreResources");
ruleSetRuleDao = (RuleSetRuleDao) appContext.getBean("ruleSetRuleDao");
dataSource = (DataSource) appContext.getBean("dataSource");
mailSender = (OpenClinicaMailSender) appContext.getBean("openClinicaMailSender");
AuditEventDAO auditEventDAO = new AuditEventDAO(dataSource);
// Scheduler scheduler = context.getScheduler();
// JobDetail detail = context.getJobDetail();
// jobDetailBean = (JobDetailBean) detail;
/*
* data map here should coincide with the job data map found in
* CreateJobExportServlet, with the following code: jobDataMap = new
* JobDataMap(); jobDataMap.put(DATASET_ID, datasetId);
* jobDataMap.put(PERIOD, period); jobDataMap.put(EMAIL, email);
* jobDataMap.put(TAB, tab); jobDataMap.put(CDISC, cdisc);
* jobDataMap.put(SPSS, spss);
*/
String alertEmail = dataMap.getString(EMAIL);
String localeStr = dataMap.getString(LOCALE);
if (localeStr != null) {
locale = new Locale(localeStr);
ResourceBundleProvider.updateLocale(locale);
pageMessages = ResourceBundleProvider.getPageMessagesBundle();
}
int dsId = dataMap.getInt(DATASET_ID);
String tab = dataMap.getString(TAB);
String cdisc = dataMap.getString(CDISC);
String cdisc12 = dataMap.getString(CDISC12);
if (cdisc12 == null) {
cdisc12 = "0";
}
String cdisc13 = dataMap.getString(CDISC13);
if (cdisc13 == null) {
cdisc13 = "0";
}
String cdisc13oc = dataMap.getString(CDISC13OC);
if (cdisc13oc == null) {
cdisc13oc = "0";
}
String spss = dataMap.getString(SPSS);
int userId = dataMap.getInt(USER_ID);
int studyId = dataMap.getInt(STUDY_ID);
// String datasetId = dataMap.getString(DATASET_ID);
// int dsId = new Integer(datasetId).intValue();
// String userAcctId = dataMap.getString(USER_ID);
// int userId = new Integer(userAcctId).intValue();
// why the flip-flop? if one property is set to 'true' we can
// see jobs in another screen but all properties have to be
// strings
logger.debug("-- found the job: " + dsId + " dataset id");
// for (Iterator it = dataMap.entrySet().iterator(); it.hasNext();)
// {
// java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
// Object key = entry.getKey();
// Object value = entry.getValue();
// // logger.debug("-- found datamap property: " + key.toString() +
// // " : " + value.toString());
// }
HashMap fileName = new HashMap<String, Integer>();
if (dsId > 0) {
// trying to not throw an error if there's no dataset id
DatasetDAO dsdao = new DatasetDAO(dataSource);
DatasetBean datasetBean = (DatasetBean) dsdao.findByPK(dsId);
StudyDAO studyDao = new StudyDAO(dataSource);
UserAccountDAO userAccountDAO = new UserAccountDAO(dataSource);
// hmm, three lines in the if block DRY?
String generalFileDir = "";
String generalFileDirCopy = "";
String exportFilePath = SQLInitServlet.getField("exportFilePath");
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
generalFileDir = DATASET_DIR + datasetBean.getId() + File.separator + sdfDir.format(new java.util.Date());
if (!"".equals(exportFilePath)) {
generalFileDirCopy = SQLInitServlet.getField("filePath") + exportFilePath + File.separator;
}
// logger.debug("-- created the following dir: " +
// generalFileDir);
long sysTimeBegin = System.currentTimeMillis();
// set up the user bean here, tbh
// logger.debug("-- gen tab file 00");
userBean = (UserAccountBean) userAccountDAO.findByPK(userId);
// needs to also be captured by the servlet, tbh
// logger.debug("-- gen tab file 00");
generateFileService = new GenerateExtractFileService(dataSource, coreResources, ruleSetRuleDao);
// logger.debug("-- gen tab file 00");
// tbh #5796 - covers a bug when the user changes studies, 10/2010
StudyBean activeStudy = (StudyBean) studyDao.findByPK(studyId);
StudyBean parentStudy = new StudyBean();
logger.debug("active study: " + studyId + " parent study: " + activeStudy.getParentStudyId());
if (activeStudy.getParentStudyId() > 0) {
// StudyDAO sdao = new StudyDAO(sm.getDataSource());
parentStudy = (StudyBean) studyDao.findByPK(activeStudy.getParentStudyId());
} else {
parentStudy = activeStudy;
// covers a bug in tab file creation, tbh 01/2009
}
logger.debug("-- found extract bean ");
ExtractBean eb = generateFileService.generateExtractBean(datasetBean, activeStudy, parentStudy);
MessageFormat mf = new MessageFormat("");
StringBuffer message = new StringBuffer();
StringBuffer auditMessage = new StringBuffer();
// use resource bundle page messages to generate the email, tbh
// 02/2009
// message.append(pageMessages.getString("html_email_header_1")
// + " " + alertEmail +
// pageMessages.getString("html_email_header_2") + "<br/>");
message.append("<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>");
message.append("<P>Dataset: " + datasetBean.getName() + "</P>");
message.append("<P>Study: " + activeStudy.getName() + "</P>");
message.append("<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2") + SQLInitServlet.getField("sysURL") + pageMessages.getString("html_email_body_3") + "</p>");
// logger.debug("-- gen tab file 00");
if ("1".equals(tab)) {
logger.debug("-- gen tab file 01");
fileName = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, datasetBean, activeStudy.getId(), parentStudy.getId(), generalFileDirCopy, userBean);
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_tab_delimited"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your tab-delimited file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_tab_delimited") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc)) {
String odmVersion = "oc1.2";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_12"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.2 w/OpenClinica Extension XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName)
// + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_12") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc12)) {
String odmVersion = "1.2";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.2 default");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_12_xml"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// // auditMessage.append(
// "You can access your ODM 1.2 XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_12_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc13)) {
String odmVersion = "1.3";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.3");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_13"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.3 XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_13") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc13oc)) {
String odmVersion = "oc1.3";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.3 oc");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_13_xml"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.3 w/OpenClinica Extension XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName)
// + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_13_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(spss)) {
SPSSReportBean answer = new SPSSReportBean();
fileName = generateFileService.createSPSSFile(datasetBean, eb, activeStudy, parentStudy, sysTimeBegin, generalFileDir, answer, generalFileDirCopy, userBean);
logger.debug("-- gen spss file");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_spss"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your SPSS files <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_spss") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
// wrap up the message, and send the email
message.append("<p>" + pageMessages.getString("html_email_body_5") + "</P><P>" + pageMessages.getString("email_footer"));
try {
mailSender.sendEmail(alertEmail.trim(), pageMessages.getString("job_ran_for") + " " + datasetBean.getName(), message.toString(), true);
} catch (OpenClinicaSystemException ose) {
// Do Nothing, In the future we might want to have an email
// status added to system.
}
TriggerBean triggerBean = new TriggerBean();
triggerBean.setDataset(datasetBean);
triggerBean.setUserAccount(userBean);
triggerBean.setFullName(trigger.getKey().getName());
auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, auditMessage.toString());
} else {
TriggerBean triggerBean = new TriggerBean();
// triggerBean.setDataset(datasetBean);
triggerBean.setUserAccount(userBean);
triggerBean.setFullName(trigger.getKey().getName());
auditEventDAO.createRowForExtractDataJobFailure(triggerBean);
// logger.debug("-- made it here for some reason, ds id: "
// + dsId);
}
// logger.debug("-- generated file: " + fileNameStr);
// dataSource.
} catch (Exception e) {
// TODO Auto-generated catch block -- ideally should generate a fail
// msg here, tbh 02/2009
logger.debug("-- found exception: " + e.getMessage());
e.printStackTrace();
}
}
use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.
the class ImportSpringJob method executeInternalInTransaction.
protected void executeInternalInTransaction(JobExecutionContext context) {
locale = new Locale("en-US");
ResourceBundleProvider.updateLocale(locale);
respage = ResourceBundleProvider.getPageMessagesBundle();
resword = ResourceBundleProvider.getWordsBundle();
triggerService = new TriggerService();
JobDataMap dataMap = context.getMergedJobDataMap();
SimpleTrigger trigger = (SimpleTrigger) context.getTrigger();
TriggerBean triggerBean = new TriggerBean();
triggerBean.setFullName(trigger.getKey().getName());
String contactEmail = dataMap.getString(EMAIL);
logger.debug("=== starting to run trigger " + trigger.getKey().getName() + " ===");
try {
ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext");
dataSource = (DataSource) appContext.getBean("dataSource");
mailSender = (OpenClinicaMailSender) appContext.getBean("openClinicaMailSender");
RuleSetServiceInterface ruleSetService = (RuleSetServiceInterface) appContext.getBean("ruleSetService");
itemDataDao = new ItemDataDAO(dataSource);
eventCrfDao = new EventCRFDAO(dataSource);
auditEventDAO = new AuditEventDAO(dataSource);
int userId = dataMap.getInt(USER_ID);
UserAccountDAO userAccountDAO = new UserAccountDAO(dataSource);
UserAccountBean ub = (UserAccountBean) userAccountDAO.findByPK(userId);
triggerBean.setUserAccount(ub);
String directory = dataMap.getString(DIRECTORY);
String studyName = dataMap.getString(STUDY_NAME);
String studyOid = dataMap.getString(STUDY_OID);
String localeStr = dataMap.getString(ExampleSpringJob.LOCALE);
if (localeStr != null) {
locale = new Locale(localeStr);
ResourceBundleProvider.updateLocale(locale);
respage = ResourceBundleProvider.getPageMessagesBundle();
resword = ResourceBundleProvider.getWordsBundle();
}
StudyDAO studyDAO = new StudyDAO(dataSource);
StudyBean studyBean;
if (studyOid != null) {
studyBean = studyDAO.findByOid(studyOid);
} else {
studyBean = (StudyBean) studyDAO.findByName(studyName);
}
// might also need study id here for the data service?
File fileDirectory = new File(SQLInitServlet.getField("filePath") + DIR_PATH + File.separator);
// File fileDirectory = new File(IMPORT_DIR);
if ("".equals(directory)) {
// avoid NPEs
// do nothing here?
} else {
// there is a separator at the end of IMPORT_DIR already...
// fileDirectory = new File(IMPORT_DIR + directory +
// File.separator);
fileDirectory = new File(SQLInitServlet.getField("filePath") + DIR_PATH + File.separator + directory + File.separator);
}
if (!fileDirectory.isDirectory()) {
fileDirectory.mkdirs();
}
// this is necessary the first time this is run, tbh
// File destDirectory = new File(IMPORT_DIR_2);
File destDirectory = new File(SQLInitServlet.getField("filePath") + DEST_DIR + File.separator);
if (!destDirectory.isDirectory()) {
destDirectory.mkdirs();
}
// look at directory, if there are new files, move them over and
// read them
// File fileDirectory = new File(directory);
String[] files = fileDirectory.list();
logger.debug("found " + files.length + " files under directory " + SQLInitServlet.getField("filePath") + DIR_PATH + File.separator + directory);
File[] target = new File[files.length];
File[] destination = new File[files.length];
for (int i = 0; i < files.length; i++) {
// hmm
if (!new File(fileDirectory + File.separator + files[i]).isDirectory()) {
File f = new File(fileDirectory + File.separator + files[i]);
if (f == null || f.getName() == null) {
logger.debug("found a null file");
} else if (f.getName().indexOf(".xml") < 0 && f.getName().indexOf(".XML") < 0) {
logger.debug("does not seem to be an xml file");
// we need a place holder to avoid 'gaps' in the file
// list
} else {
logger.debug("adding: " + f.getName());
// new File(IMPORT_DIR +
target[i] = f;
// directory +
// File.separator + files[i]);
// destination[i] = new File(IMPORT_DIR_2 + files[i]);
destination[i] = new File(SQLInitServlet.getField("filePath") + DEST_DIR + File.separator + files[i]);
}
}
}
if (target.length > 0 && destination.length > 0) {
cutAndPaste(target, destination);
// @pgawade 28-June-2012: Fix for issue #13964 - Remove the null
// elements from destination array of files
// which might be created because of presense of sub-directories
// or non-xml files under scheduled_data_import directory
// which are non-usable files for import.
destination = removeNullElements(destination);
// do everything else here with 'destination'
ArrayList<String> auditMessages = processData(destination, dataSource, respage, resword, ub, studyBean, destDirectory, triggerBean, ruleSetService);
auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, auditMessages.get(1));
try {
if (contactEmail != null && !"".equals(contactEmail)) {
mailSender.sendEmail(contactEmail, respage.getString("job_ran_for") + " " + triggerBean.getFullName(), generateMsg(auditMessages.get(0), contactEmail), true);
logger.debug("email body: " + auditMessages.get(1));
}
} catch (OpenClinicaSystemException e) {
// Do nothing
logger.error("=== throw an ocse === " + e.getMessage());
e.printStackTrace();
}
} else {
logger.debug("no real files found");
auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, respage.getString("job_ran_but_no_files"));
// no email here, tbh
}
// use the business logic to go through each one and import that
// data
// check to see if they were imported before?
// using the four methods:
// importCRFDataServce.validateStudyMetadata,
// service.lookupValidationErrors, service.fetchEventCRFBeans(?),
// and
// service.generateSummaryStatsBean(for the email we send out later)
} catch (Exception e) {
// more detailed reporting here
logger.error("found a fail exception: " + e.getMessage());
e.printStackTrace();
auditEventDAO.createRowForExtractDataJobFailure(triggerBean, e.getMessage());
try {
mailSender.sendEmail(contactEmail, respage.getString("job_failure_for") + " " + triggerBean.getFullName(), e.getMessage(), true);
} catch (OpenClinicaSystemException ose) {
// Do nothing
logger.error("=== throw an ocse: " + ose.getMessage());
}
}
}
use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.
the class TriggerService method generateTrigger.
public SimpleTrigger generateTrigger(FormProcessor fp, UserAccountBean userAccount, StudyBean study, String locale) {
Date startDateTime = fp.getDateTime(DATE_START_JOB);
// check the above?
int datasetId = fp.getInt(DATASET_ID);
String period = fp.getString(PERIOD);
String email = fp.getString(EMAIL);
String jobName = fp.getString(JOB_NAME);
String jobDesc = fp.getString(JOB_DESC);
String spss = fp.getString(SPSS);
String tab = fp.getString(TAB);
String cdisc = fp.getString(CDISC);
String cdisc12 = fp.getString(ExampleSpringJob.CDISC12);
String cdisc13 = fp.getString(ExampleSpringJob.CDISC13);
String cdisc13oc = fp.getString(ExampleSpringJob.CDISC13OC);
BigInteger interval = new BigInteger("0");
if ("monthly".equalsIgnoreCase(period)) {
// how many
interval = new BigInteger("2419200000");
// milliseconds in
// a month? should
// be 24192000000
} else if ("weekly".equalsIgnoreCase(period)) {
// how many
interval = new BigInteger("604800000");
// milliseconds in
// a week? should
// be 6048000000
} else {
// daily
// how many
interval = new BigInteger("86400000");
// milliseconds in a
// day?
}
// set up and commit job here
// set the job detail name,
// based on our choice of format above
// what if there is more than one detail?
// what is the number of times it should repeat?
// arbitrary large number, 64K should be enough :)
SimpleTrigger trigger = (SimpleTrigger) newTrigger().forJob(jobName, "DEFAULT").withDescription(jobDesc).startAt(startDateTime).withSchedule(simpleSchedule().withRepeatCount(64000).withIntervalInSeconds(interval.intValue()).withMisfireHandlingInstructionNextWithExistingCount());
// set job data map
JobDataMap jobDataMap = new JobDataMap();
jobDataMap.put(DATASET_ID, datasetId);
jobDataMap.put(PERIOD, period);
jobDataMap.put(EMAIL, email);
jobDataMap.put(TAB, tab);
jobDataMap.put(CDISC, cdisc);
jobDataMap.put(ExampleSpringJob.CDISC12, cdisc12);
jobDataMap.put(ExampleSpringJob.LOCALE, locale);
// System.out.println("found 1.2: " +
// jobDataMap.get(ExampleSpringJob.CDISC12));
jobDataMap.put(ExampleSpringJob.CDISC13, cdisc13);
// System.out.println("found 1.3: " +
// jobDataMap.get(ExampleSpringJob.CDISC13));
jobDataMap.put(ExampleSpringJob.CDISC13OC, cdisc13oc);
// System.out.println("found 1.3oc: " +
// jobDataMap.get(ExampleSpringJob.CDISC13OC));
jobDataMap.put(SPSS, spss);
jobDataMap.put(USER_ID, userAccount.getId());
// StudyDAO studyDAO = new StudyDAO();
jobDataMap.put(STUDY_ID, study.getId());
jobDataMap.put(STUDY_NAME, study.getName());
jobDataMap.put(STUDY_OID, study.getOid());
trigger.getTriggerBuilder().usingJobData(jobDataMap);
return trigger;
}
use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.
the class TriggerService method generateImportTrigger.
public SimpleTrigger generateImportTrigger(FormProcessor fp, UserAccountBean userAccount, StudyBean study, Date startDateTime, String locale) {
String jobName = fp.getString(JOB_NAME);
String email = fp.getString(EMAIL);
String jobDesc = fp.getString(JOB_DESC);
String directory = fp.getString(DIRECTORY);
// what kinds of periods do we have? hourly, daily, weekly?
long interval = 0;
int hours = fp.getInt("hours");
int minutes = fp.getInt("minutes");
if (hours > 0) {
long hoursInt = hours * 3600000;
interval = interval + hoursInt;
}
if (minutes > 0) {
long minutesInt = minutes * 60000;
interval = interval + minutesInt;
}
SimpleTrigger trigger = (SimpleTrigger) newTrigger().forJob(jobName, IMPORT_TRIGGER).withDescription(jobDesc).startAt(startDateTime).withSchedule(simpleSchedule().withRepeatCount(64000).withIntervalInSeconds(new Long(interval).intValue()).withMisfireHandlingInstructionNextWithExistingCount());
// set job data map
JobDataMap jobDataMap = new JobDataMap();
jobDataMap.put(EMAIL, email);
jobDataMap.put(USER_ID, userAccount.getId());
jobDataMap.put(STUDY_NAME, study.getName());
jobDataMap.put(STUDY_OID, study.getOid());
jobDataMap.put(DIRECTORY, directory);
jobDataMap.put(ExampleSpringJob.LOCALE, locale);
jobDataMap.put("hours", hours);
jobDataMap.put("minutes", minutes);
trigger.getTriggerBuilder().usingJobData(jobDataMap);
return trigger;
}
Aggregations