use of org.akaza.openclinica.bean.extract.ExtractPropertyBean in project OpenClinica by OpenClinica.
the class SystemController method getExtractModule.
/**
* @api {get} /pages/auth/api/v1/system/extract Retrieve Extract Properties
* @apiName getExtractProperties
* @apiPermission Authenticate using api-key. admin
* @apiVersion 3.8.0
* @apiGroup System
* @apiDescription Retrieves Extract Properties
* @apiSuccessExample {json} Success-Response: HTTP/1.1 200 OK
* {
* "extract.properties": {
* "extract.number": {
* "extract.number": "99"
* },
* "extract.1": {
* "zip": "true",
* "failure": "",
* "fileDescription": "CDISC ODM XML 1.3 Full with OpenClinica extensions",
* "linkText": "Run Now",
* "deleteOld": "true",
* "location": "$exportFilePath/$datasetName/ODM_1.3_Full",
* "file": "[copyXML.xsl]",
* "helpText": "CDISC ODM XML 1.3 Full with OpenClinica extensions. Includes discrepancy notes and audit trails.",
* "exportname": "[odm1.3_full$datasetName_$dateTime.xml]",
* "success": "The extract completed successfully. The file is available for download $linkURL.",
* "odmType": "full"
* },
* "extract.2": {
* "zip": "true",
* "failure": "",
* "fileDescription": "CDISC ODM XML 1.3 Clinical Data with OpenClinica extensions",
* "linkText": "Run Now",
* "deleteOld": "true",
* "location": "$exportFilePath/$datasetName/ODM_1.3_Extensions",
* "file": "[copyXML.xsl]",
* "helpText": "CDISC ODM XML 1.3 Clinical Data with OpenClinica extensions. Does not include discrepancy notes or audit trails.",
* "exportname": "[odm1.3_clinical_ext_$datasetName_$dateTime.xml]",
* "success": "Your extract job completed successfully. The file is available for download $linkURL.",
* "odmType": "clinical_data"
* }
* ...
* }
* }
*/
@RequestMapping(value = "/extract", method = RequestMethod.GET)
public ResponseEntity<HashMap> getExtractModule() throws Exception {
ResourceBundleProvider.updateLocale(new Locale("en_US"));
HashMap<String, Object> map = new HashMap<>();
ResourceBundle resLicense = ResourceBundleProvider.getLicensingBundle();
HashMap<String, Object> extractMap = new HashMap<>();
ArrayList<ExtractPropertyBean> extracts = CoreResources.getExtractProperties();
int n = 0;
for (ExtractPropertyBean extract : extracts) {
n++;
HashMap<String, String> extractmap = new HashMap<>();
extractmap.put("odmType", extract.getOdmType());
extractmap.put("file", Arrays.toString(extract.getFileName()));
extractmap.put("fileDescription", extract.getFiledescription());
extractmap.put("linkText", extract.getLinkText());
extractmap.put("helpText", extract.getHelpText());
extractmap.put("location", extract.getFileLocation());
extractmap.put("exportname", Arrays.toString(extract.getExportFileName()));
extractmap.put("zip", String.valueOf(extract.getZipFormat()));
extractmap.put("deleteOld", String.valueOf(extract.getDeleteOld()));
extractmap.put("success", extract.getSuccessMessage());
extractmap.put("failure", extract.getFailureMessage());
extractMap.put("extract." + n, extractmap);
}
HashMap<String, String> extractDatamart = new HashMap<>();
HashMap<String, String> datamartRole = new HashMap<>();
String username = CoreResources.getExtractField("db1.username");
String password = CoreResources.getExtractField("db1.password");
String url = CoreResources.getExtractField("db1.url");
extractDatamart.put("db1.username", username);
extractDatamart.put("db1.url", url);
extractDatamart.put("db1.dataBase", CoreResources.getExtractField("db1.dataBase"));
HashMap<String, String> extractNumber = new HashMap<>();
extractNumber.put("extract.number", CoreResources.getExtractField("extract.number"));
extractMap.put("extract.number", extractNumber);
// extractMap.put("DataMart", extractDatamart);
HashMap<String, String> datamartMap = new HashMap();
try (Connection conn = DriverManager.getConnection(url, username, password)) {
datamartRole = getDbRoleProperties(conn, datamartRole, username);
datamartMap.put("connection", "Open");
} catch (Exception e) {
datamartMap.put("connection", "Close");
}
// map.put("Datamart Facts", datamartMap);
map.put("extract.properties", extractMap);
return new ResponseEntity<HashMap>(map, org.springframework.http.HttpStatus.OK);
}
use of org.akaza.openclinica.bean.extract.ExtractPropertyBean in project OpenClinica by OpenClinica.
the class QuartzConfiguration method simpleTriggerFactoryBean.
@Bean
@Scope("prototype")
@Lazy
public SimpleTriggerFactoryBean simpleTriggerFactoryBean(String xslFile, String xmlFile, String endFilePath, String endFile, int datasetId, ExtractPropertyBean epBean, UserAccountBean userAccountBean, String locale, int cnt, String xsltPath) {
SimpleTriggerFactoryBean triggerFactoryBean = new SimpleTriggerFactoryBean();
triggerFactoryBean.setBeanName("trigger1");
triggerFactoryBean.setGroup("group1");
triggerFactoryBean.setRepeatInterval(1);
triggerFactoryBean.setRepeatCount(0);
//triggerFactoryBean.setStartTime(startDateTime);
triggerFactoryBean.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_FIRE_NOW);
// set job data map
JobDataMap jobDataMap = new JobDataMap();
jobDataMap.put(XSL_FILE_PATH, xslFile);
jobDataMap.put(XML_FILE_PATH, endFilePath);
jobDataMap.put(POST_FILE_PATH, endFilePath);
jobDataMap.put(POST_FILE_NAME, endFile);
jobDataMap.put(EXTRACT_PROPERTY, epBean.getId());
jobDataMap.put(USER_ID, userAccountBean.getId());
jobDataMap.put(STUDY_ID, userAccountBean.getActiveStudyId());
jobDataMap.put(LOCALE, locale);
jobDataMap.put(DATASET_ID, datasetId);
jobDataMap.put(EMAIL, userAccountBean.getEmail());
jobDataMap.put(ZIPPED, epBean.getZipFormat());
jobDataMap.put(DELETE_OLD, epBean.getDeleteOld());
jobDataMap.put(SUCCESS_MESSAGE, epBean.getSuccessMessage());
jobDataMap.put(FAILURE_MESSAGE, epBean.getFailureMessage());
jobDataMap.put(POST_PROC_DELETE_OLD, epBean.getPostProcDeleteOld());
jobDataMap.put(POST_PROC_ZIP, epBean.getPostProcZip());
jobDataMap.put(POST_PROC_LOCATION, epBean.getPostProcLocation());
jobDataMap.put(POST_PROC_EXPORT_NAME, epBean.getPostProcExportName());
jobDataMap.put(COUNT, cnt);
jobDataMap.put(XSLT_PATH, xsltPath);
// jobDataMap.put(DIRECTORY, directory);
// jobDataMap.put(ExampleSpringJob.LOCALE, locale);
jobDataMap.put(EP_BEAN, epBean);
triggerFactoryBean.setJobDataMap(jobDataMap);
return triggerFactoryBean;
}
use of org.akaza.openclinica.bean.extract.ExtractPropertyBean in project OpenClinica by OpenClinica.
the class CreateJobExportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
// TODO multi stage servlet which will create export jobs
// will accept, create, and return the ViewJob servlet
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
scheduler = getScheduler();
String action = fp.getString("action");
ExtractUtils extractUtils = new ExtractUtils();
if (StringUtil.isBlank(action)) {
// set up list of data sets
// select by ... active study
setUpServlet();
forwardPage(Page.CREATE_JOB_EXPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
// collect form information
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals("DEFAULT"));
String[] triggerNames = triggerKeys.stream().toArray(String[]::new);
HashMap errors = validateForm(fp, request, triggerNames, "");
if (!errors.isEmpty()) {
// set errors to request
request.setAttribute("formMessages", errors);
logger.info("has validation errors in the first section");
logger.info("errors found: " + errors.toString());
setUpServlet();
forwardPage(Page.CREATE_JOB_EXPORT);
} else {
logger.info("found no validation errors, continuing");
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
CoreResources cr = new CoreResources();
int datasetId = fp.getInt(DATASET_ID);
String period = fp.getString(PERIOD);
String email = fp.getString(EMAIL);
String jobName = fp.getString(JOB_NAME);
String jobDesc = fp.getString(JOB_DESC);
Date startDateTime = fp.getDateTime(DATE_START_JOB);
Integer exportFormatId = fp.getInt(FORMAT_ID);
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
// set the job in motion
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
//JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
while (i < exportFiles.length) {
temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
// need to set the dataset path here, tbh
// next, can already run jobs, translations, and then add a message to be notified later
//JN all the properties need to have the variables...
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
//String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcLocation(prePocLoc);
}
extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
SimpleTrigger trigger = null;
trigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", xsltService.getTriggerGroupNameForExportJobs());
//Updating the original trigger with user given inputs
trigger.getTriggerBuilder().withSchedule(simpleSchedule().withRepeatCount(64000).withIntervalInSeconds(new Integer(period).intValue()).withMisfireHandlingInstructionNextWithExistingCount()).startAt(startDateTime).forJob(jobName).withDescription(jobDesc);
trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
trigger.getJobDataMap().put("job_type", "exportJob");
JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
JobDetailFactoryBean.setGroup(xsltService.getTriggerGroupNameForExportJobs());
JobDetailFactoryBean.setName(trigger.getKey().getName());
JobDetailFactoryBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
JobDetailFactoryBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
JobDetailFactoryBean.setDurability(true);
// set to the scheduler
try {
Date dateStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), trigger);
logger.info("== found job date: " + dateStart.toString());
// set a success message here
} catch (SchedulerException se) {
se.printStackTrace();
setUpServlet();
addPageMessage("Error creating Job.");
forwardPage(Page.VIEW_JOB_SERVLET);
return;
}
setUpServlet();
addPageMessage("You have successfully created a new job: " + jobName + " which is now set to run at the time you specified.");
forwardPage(Page.VIEW_JOB_SERVLET);
}
} else {
forwardPage(Page.ADMIN_SYSTEM);
// forward to form
// should we even get to this part?
}
}
use of org.akaza.openclinica.bean.extract.ExtractPropertyBean in project OpenClinica by OpenClinica.
the class ScheduledJobController method listScheduledJobs.
@RequestMapping("/listCurrentScheduledJobs")
public ModelMap listScheduledJobs(HttpServletRequest request, HttpServletResponse response) throws SchedulerException {
Locale locale = LocaleResolver.getLocale(request);
ResourceBundleProvider.updateLocale(locale);
ModelMap gridMap = new ModelMap();
String[] triggerNames;
boolean showMoreLink = false;
if (request.getParameter("showMoreLink") != null) {
showMoreLink = Boolean.parseBoolean(request.getParameter("showMoreLink").toString());
} else {
showMoreLink = true;
}
request.setAttribute("showMoreLink", showMoreLink + "");
// request.setAttribute("studySubjectId",studySubjectId);
/*SubjectIdSDVFactory tableFactory = new SubjectIdSDVFactory();
* @RequestParam("studySubjectId") int studySubjectId,*/
request.setAttribute("imagePathPrefix", "../");
ArrayList<String> pageMessages = (ArrayList<String>) request.getAttribute("pageMessages");
if (pageMessages == null) {
pageMessages = new ArrayList<String>();
}
request.setAttribute("pageMessages", pageMessages);
List<JobExecutionContext> listCurrentJobs = new ArrayList<JobExecutionContext>();
listCurrentJobs = scheduler.getCurrentlyExecutingJobs();
Iterator<JobExecutionContext> itCurrentJobs = listCurrentJobs.iterator();
List<String> currentJobList = new ArrayList<String>();
while (itCurrentJobs.hasNext()) {
JobExecutionContext temp = itCurrentJobs.next();
currentJobList.add(temp.getTrigger().getJobKey().getName() + temp.getTrigger().getKey().getGroup());
}
List<String> triggerGroupNames = scheduler.getTriggerGroupNames();
String[] triggerGroups = triggerGroupNames.stream().toArray(String[]::new);
List<SimpleTrigger> simpleTriggers = new ArrayList<SimpleTrigger>();
int index1 = 0;
for (String triggerGroup : triggerGroups) {
logger.debug("Group: " + triggerGroup + " contains the following triggers");
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals(triggerGroup));
triggerNames = triggerKeys.stream().toArray(String[]::new);
for (String triggerName : triggerNames) {
Trigger.TriggerState state = scheduler.getTriggerState(TriggerKey.triggerKey(triggerName, triggerGroup));
logger.debug("- " + triggerName);
if (state != Trigger.TriggerState.PAUSED) {
simpleTriggers.add(index1, (SimpleTrigger) scheduler.getTrigger(TriggerKey.triggerKey(triggerName, triggerGroup)));
index1++;
}
}
}
List<ScheduledJobs> jobsScheduled = new ArrayList<ScheduledJobs>();
int index = 0;
for (SimpleTrigger st : simpleTriggers) {
boolean isExecuting = currentJobList.contains(st.getJobKey().getName() + st.getJobKey().getGroup());
ScheduledJobs jobs = new ScheduledJobs();
ExtractPropertyBean epBean = null;
if (st.getJobDataMap() != null) {
epBean = (ExtractPropertyBean) st.getJobDataMap().get(EP_BEAN);
}
if (epBean != null) {
StringBuilder checkbox = new StringBuilder();
checkbox.append("<input style='margin-right: 5px' type='checkbox'/>");
StringBuilder actions = new StringBuilder("<table><tr><td>");
if (isExecuting) {
actions.append(" ");
} else {
String contextPath = request.getContextPath();
StringBuilder jsCodeString = new StringBuilder("this.form.method='GET'; this.form.action='").append(contextPath).append("/pages/cancelScheduledJob").append("';").append("this.form.theJobName.value='").append(st.getJobKey().getName()).append("';").append("this.form.theJobGroupName.value='").append(st.getJobKey().getGroup()).append("';").append("this.form.theTriggerName.value='").append(st.getJobKey().getName()).append("';").append("this.form.theTriggerGroupName.value='").append(st.getJobKey().getGroup()).append("';").append("this.form.submit();");
actions.append("<td><input type=\"submit\" class=\"button\" value=\"Cancel Job\" ").append("name=\"cancelJob\" onclick=\"").append(jsCodeString.toString()).append("\" />");
}
actions.append("</td></tr></table>");
jobs.setCheckbox(checkbox.toString());
jobs.setDatasetId(epBean.getDatasetName());
String fireTime = st.getStartTime() != null ? longFormat(locale).format(st.getStartTime()) : "";
jobs.setFireTime(fireTime);
if (st.getNextFireTime() != null) {
jobs.setScheduledFireTime(longFormat(locale).format(st.getNextFireTime()));
}
jobs.setExportFileName(epBean.getExportFileName()[0]);
jobs.setAction(actions.toString());
jobs.setJobStatus(isExecuting ? "Currently Executing" : "Scheduled");
jobsScheduled.add(index, jobs);
index++;
}
}
logger.debug("totalRows" + index);
request.setAttribute("totalJobs", index);
request.setAttribute("jobs", jobsScheduled);
TableFacade facade = scheduledJobTableFactory.createTable(request, response);
String sdvMatrix = facade.render();
gridMap.addAttribute(SCHEDULED_TABLE_ATTRIBUTE, sdvMatrix);
return gridMap;
}
use of org.akaza.openclinica.bean.extract.ExtractPropertyBean in project OpenClinica by OpenClinica.
the class ExtractController method processSubmit.
/**
* process the page from whence you came, i.e. extract a dataset
* @param id, the id of the extract properties bean, gained from Core Resources
* @param datasetId, the id of the dataset, found through DatasetDAO
* @param request, http request
* @return model map, but more importantly, creates a quartz job which runs right away and generates all output there
*/
@RequestMapping(method = RequestMethod.GET)
public ModelMap processSubmit(@RequestParam("id") String id, @RequestParam("datasetId") String datasetId, HttpServletRequest request, HttpServletResponse response) {
if (!mayProceed(request)) {
try {
response.sendRedirect(request.getContextPath() + "/MainMenu?message=authentication_failed");
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
ModelMap map = new ModelMap();
ResourceBundleProvider.updateLocale(LocaleResolver.getLocale(request));
// String datasetId = (String)request.getAttribute("datasetId");
// String id = (String)request.getAttribute("id");
logger.debug("found both id " + id + " and dataset " + datasetId);
ExtractUtils extractUtils = new ExtractUtils();
// get extract id
// get dataset id
// if id is a number and dataset id is a number ...
datasetDao = new DatasetDAO(dataSource);
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
CoreResources cr = new CoreResources();
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(new Integer(id).intValue(), datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
// set the job in motion
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
SimpleTrigger simpleTrigger = null;
//TODO: if files and export names size is not same... throw an error
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
//JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
while (i < exportFiles.length) {
temp[i] = resolveVars(exportFiles[i], dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
// TODO get a user bean somehow?
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
// need to set the dataset path here, tbh
logger.debug("found odm xml file path " + generalFileDir);
// next, can already run jobs, translations, and then add a message to be notified later
//JN all the properties need to have the variables...
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = getEndFilePath(endFilePath, dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
//String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
String preProcExportPathName = resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
epBean.setPostProcLocation(prePocLoc);
}
setAllProps(epBean, dsBean, sdfDir, extractUtils);
// also need to add the status fields discussed w/ cc:
// result code, user message, optional URL, archive message, log file message
// asdf table: sort most recent at top
logger.debug("found xslt file name " + xsltPath);
// String xmlFilePath = generalFileDir + ODMXMLFileName;
simpleTrigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", this.TRIGGER_GROUP_NAME);
// System.out.println("just set locale: " + LocaleResolver.getLocale(request).getLanguage());
cnt++;
ApplicationContext context = null;
try {
context = (ApplicationContext) scheduler.getContext().get("applicationContext");
} catch (SchedulerException e) {
e.printStackTrace();
}
//WebApplicationContext context = ContextLoader.getCurrentWebApplicationContext();
JobDetailFactoryBean jobDetailFactoryBean = context.getBean(JobDetailFactoryBean.class, simpleTrigger, this.TRIGGER_GROUP_NAME);
try {
Date dateStart = scheduler.scheduleJob(jobDetailFactoryBean.getObject(), simpleTrigger);
logger.debug("== found job date: " + dateStart.toString());
} catch (SchedulerException se) {
se.printStackTrace();
}
request.setAttribute("datasetId", datasetId);
// set the job name here in the user's session, so that we can ping the scheduler to pull it out later
if (jobDetailFactoryBean != null)
request.getSession().setAttribute("jobName", jobDetailFactoryBean.getObject().getKey().getName());
if (simpleTrigger != null)
request.getSession().setAttribute("groupName", this.TRIGGER_GROUP_NAME);
request.getSession().setAttribute("datasetId", new Integer(dsBean.getId()));
return map;
}
Aggregations