use of org.akaza.openclinica.service.extract.XsltTriggerService in project OpenClinica by OpenClinica.
the class UpdateJobExportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
String action = fp.getString("action");
String triggerName = fp.getString("tname");
scheduler = getScheduler();
ExtractUtils extractUtils = new ExtractUtils();
Trigger updatingTrigger = scheduler.getTrigger(new TriggerKey(triggerName.trim(), XsltTriggerService.TRIGGER_GROUP_NAME));
if (StringUtil.isBlank(action)) {
setUpServlet(updatingTrigger);
forwardPage(Page.UPDATE_JOB_EXPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
// change and update trigger here
// validate first
// then update or send back
String name = XsltTriggerService.TRIGGER_GROUP_NAME;
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals(name));
String[] triggerNames = triggerKeys.stream().toArray(String[]::new);
HashMap errors = validateForm(fp, request, triggerNames, updatingTrigger.getKey().getName());
if (!errors.isEmpty()) {
// send back
addPageMessage("Your modifications caused an error, please see the messages for more information.");
setUpServlet(updatingTrigger);
logger.error("errors : " + errors.toString());
forwardPage(Page.UPDATE_JOB_EXPORT);
} else {
// change trigger, update in database
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
StudyBean study = (StudyBean) studyDAO.findByPK(sm.getUserBean().getActiveStudyId());
DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
CoreResources cr = new CoreResources();
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
int datasetId = fp.getInt(DATASET_ID);
String period = fp.getString(PERIOD);
String email = fp.getString(EMAIL);
String jobName = fp.getString(JOB_NAME);
String jobDesc = fp.getString(JOB_DESC);
Date startDateTime = fp.getDateTime(DATE_START_JOB);
Integer exportFormatId = fp.getInt(FORMAT_ID);
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
//JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
while (i < exportFiles.length) {
temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcLocation(prePocLoc);
}
extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
SimpleTrigger trigger = null;
trigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", TRIGGER_GROUP_JOB);
//Updating the original trigger with user given inputs
trigger.getTriggerBuilder().withDescription(jobDesc).startAt(startDateTime).forJob(jobName).withSchedule(simpleSchedule().withIntervalInSeconds(new Long(XsltTriggerService.getIntervalTime(period)).intValue()).withRepeatCount(64000).withMisfireHandlingInstructionNextWithExistingCount());
trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
JobDetailFactoryBean.setGroup(xsltService.TRIGGER_GROUP_NAME);
JobDetailFactoryBean.setName(trigger.getKey().getName());
JobDetailFactoryBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
JobDetailFactoryBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
JobDetailFactoryBean.setDurability(true);
try {
// scheduler.unscheduleJob(triggerName, "DEFAULT");
scheduler.deleteJob(new JobKey(triggerName, XsltTriggerService.TRIGGER_GROUP_NAME));
Date dataStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), trigger);
// Date dateStart = scheduler.rescheduleJob(triggerName,
// "DEFAULT", trigger);
// scheduler.rescheduleJob(triggerName, groupName,
// newTrigger)
addPageMessage("Your job has been successfully modified.");
forwardPage(Page.VIEW_JOB_SERVLET);
} catch (SchedulerException se) {
se.printStackTrace();
// set a message here with the exception message
setUpServlet(trigger);
addPageMessage("There was an unspecified error with your creation, please contact an administrator.");
forwardPage(Page.UPDATE_JOB_EXPORT);
}
}
}
}
use of org.akaza.openclinica.service.extract.XsltTriggerService in project OpenClinica by OpenClinica.
the class CreateJobExportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
// TODO multi stage servlet which will create export jobs
// will accept, create, and return the ViewJob servlet
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
scheduler = getScheduler();
String action = fp.getString("action");
ExtractUtils extractUtils = new ExtractUtils();
if (StringUtil.isBlank(action)) {
// set up list of data sets
// select by ... active study
setUpServlet();
forwardPage(Page.CREATE_JOB_EXPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
// collect form information
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals("DEFAULT"));
String[] triggerNames = triggerKeys.stream().toArray(String[]::new);
HashMap errors = validateForm(fp, request, triggerNames, "");
if (!errors.isEmpty()) {
// set errors to request
request.setAttribute("formMessages", errors);
logger.info("has validation errors in the first section");
logger.info("errors found: " + errors.toString());
setUpServlet();
forwardPage(Page.CREATE_JOB_EXPORT);
} else {
logger.info("found no validation errors, continuing");
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
CoreResources cr = new CoreResources();
int datasetId = fp.getInt(DATASET_ID);
String period = fp.getString(PERIOD);
String email = fp.getString(EMAIL);
String jobName = fp.getString(JOB_NAME);
String jobDesc = fp.getString(JOB_DESC);
Date startDateTime = fp.getDateTime(DATE_START_JOB);
Integer exportFormatId = fp.getInt(FORMAT_ID);
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
// set the job in motion
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
//JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
while (i < exportFiles.length) {
temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
// need to set the dataset path here, tbh
// next, can already run jobs, translations, and then add a message to be notified later
//JN all the properties need to have the variables...
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
//String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcLocation(prePocLoc);
}
extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
SimpleTrigger trigger = null;
trigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", xsltService.getTriggerGroupNameForExportJobs());
//Updating the original trigger with user given inputs
trigger.getTriggerBuilder().withSchedule(simpleSchedule().withRepeatCount(64000).withIntervalInSeconds(new Integer(period).intValue()).withMisfireHandlingInstructionNextWithExistingCount()).startAt(startDateTime).forJob(jobName).withDescription(jobDesc);
trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
trigger.getJobDataMap().put("job_type", "exportJob");
JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
JobDetailFactoryBean.setGroup(xsltService.getTriggerGroupNameForExportJobs());
JobDetailFactoryBean.setName(trigger.getKey().getName());
JobDetailFactoryBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
JobDetailFactoryBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
JobDetailFactoryBean.setDurability(true);
// set to the scheduler
try {
Date dateStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), trigger);
logger.info("== found job date: " + dateStart.toString());
// set a success message here
} catch (SchedulerException se) {
se.printStackTrace();
setUpServlet();
addPageMessage("Error creating Job.");
forwardPage(Page.VIEW_JOB_SERVLET);
return;
}
setUpServlet();
addPageMessage("You have successfully created a new job: " + jobName + " which is now set to run at the time you specified.");
forwardPage(Page.VIEW_JOB_SERVLET);
}
} else {
forwardPage(Page.ADMIN_SYSTEM);
// forward to form
// should we even get to this part?
}
}
use of org.akaza.openclinica.service.extract.XsltTriggerService in project OpenClinica by OpenClinica.
the class ViewJobServlet method processRequest.
@Override
protected void processRequest() throws Exception {
// TODO single stage servlet where we get the list of jobs
// and push them out to the JSP page
// related classes will be required to generate the table rows
// and eventually links to view and edit the jobs as well
FormProcessor fp = new FormProcessor(request);
// First we must get a reference to a scheduler
scheduler = getScheduler();
XsltTriggerService xsltTriggerSrvc = new XsltTriggerService();
// Scheduler sched = sfb.getScheduler();
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals(xsltTriggerSrvc.getTriggerGroupNameForExportJobs()));
String[] triggerNames = triggerKeys.stream().toArray(String[]::new);
// String[] triggerNames = scheduler.getJobNames(XsltTriggerService.TRIGGER_GROUP_NAME);
// logger.info("trigger list: "+triggerNames.length);
// logger.info("trigger names: "+triggerNames.toString());
ArrayList triggerBeans = new ArrayList();
for (String triggerName : triggerNames) {
Trigger trigger = scheduler.getTrigger(TriggerKey.triggerKey(triggerName, xsltTriggerSrvc.getTriggerGroupNameForExportJobs()));
try {
logger.debug("prev fire time " + trigger.getPreviousFireTime().toString());
logger.debug("next fire time " + trigger.getNextFireTime().toString());
logger.debug("final fire time: " + trigger.getFinalFireTime().toString());
} catch (NullPointerException npe) {
// could be nulls in the dates, etc
}
// logger.info(trigger.getDescription());
// logger.info("");//getJobDataMap()
TriggerBean triggerBean = new TriggerBean();
triggerBean.setFullName(trigger.getKey().getName());
triggerBean.setPreviousDate(trigger.getPreviousFireTime());
triggerBean.setNextDate(trigger.getNextFireTime());
if (trigger.getDescription() != null) {
triggerBean.setDescription(trigger.getDescription());
}
// setting: frequency, dataset name
JobDataMap dataMap = new JobDataMap();
DatasetDAO datasetDAO = new DatasetDAO(sm.getDataSource());
StudyDAO studyDao = new StudyDAO(sm.getDataSource());
if (trigger.getJobDataMap().size() > 0) {
dataMap = trigger.getJobDataMap();
int dsId = dataMap.getInt(ExampleSpringJob.DATASET_ID);
String periodToRun = dataMap.getString(ExampleSpringJob.PERIOD);
triggerBean.setPeriodToRun(periodToRun);
DatasetBean dataset = (DatasetBean) datasetDAO.findByPK(dsId);
triggerBean.setDataset(dataset);
triggerBean.setDatasetName(dataset.getName());
StudyBean study = (StudyBean) studyDao.findByPK(dataset.getStudyId());
triggerBean.setStudyName(study.getName());
// triggerBean.setStudyName(dataMap.getString(ExampleSpringJob.STUDY_NAME));
}
logger.debug("Trigger Priority: " + trigger.getKey().getName() + " " + trigger.getPriority());
if (scheduler.getTriggerState(TriggerKey.triggerKey(triggerName, XsltTriggerService.TRIGGER_GROUP_NAME)) == Trigger.TriggerState.PAUSED) {
triggerBean.setActive(false);
logger.debug("setting active to false for trigger: " + trigger.getKey().getName());
} else {
triggerBean.setActive(true);
logger.debug("setting active to TRUE for trigger: " + trigger.getKey().getName());
}
triggerBeans.add(triggerBean);
// our wrapper to show triggers
}
ArrayList allRows = TriggerRow.generateRowsFromBeans(triggerBeans);
EntityBeanTable table = fp.getEntityBeanTable();
String[] columns = { resword.getString("name"), resword.getString("previous_fire_time"), resword.getString("next_fire_time"), resword.getString("description"), resword.getString("period_to_run"), resword.getString("dataset"), resword.getString("study"), resword.getString("actions") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(3);
table.hideColumnLink(7);
table.setQuery("ViewJob", new HashMap());
// table.addLink("", "CreateUserAccount");
table.setSortingColumnInd(0);
table.setRows(allRows);
table.computeDisplay();
request.setAttribute("table", table);
// throw new NullPointerException("faking an error here");
forwardPage(Page.VIEW_JOB);
}
use of org.akaza.openclinica.service.extract.XsltTriggerService in project OpenClinica by OpenClinica.
the class ExtractController method processSubmit.
/**
* process the page from whence you came, i.e. extract a dataset
* @param id, the id of the extract properties bean, gained from Core Resources
* @param datasetId, the id of the dataset, found through DatasetDAO
* @param request, http request
* @return model map, but more importantly, creates a quartz job which runs right away and generates all output there
*/
@RequestMapping(method = RequestMethod.GET)
public ModelMap processSubmit(@RequestParam("id") String id, @RequestParam("datasetId") String datasetId, HttpServletRequest request, HttpServletResponse response) {
if (!mayProceed(request)) {
try {
response.sendRedirect(request.getContextPath() + "/MainMenu?message=authentication_failed");
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
ModelMap map = new ModelMap();
ResourceBundleProvider.updateLocale(LocaleResolver.getLocale(request));
// String datasetId = (String)request.getAttribute("datasetId");
// String id = (String)request.getAttribute("id");
logger.debug("found both id " + id + " and dataset " + datasetId);
ExtractUtils extractUtils = new ExtractUtils();
// get extract id
// get dataset id
// if id is a number and dataset id is a number ...
datasetDao = new DatasetDAO(dataSource);
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
CoreResources cr = new CoreResources();
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(new Integer(id).intValue(), datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
// set the job in motion
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
SimpleTrigger simpleTrigger = null;
//TODO: if files and export names size is not same... throw an error
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
//JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
while (i < exportFiles.length) {
temp[i] = resolveVars(exportFiles[i], dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
// TODO get a user bean somehow?
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
// need to set the dataset path here, tbh
logger.debug("found odm xml file path " + generalFileDir);
// next, can already run jobs, translations, and then add a message to be notified later
//JN all the properties need to have the variables...
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = getEndFilePath(endFilePath, dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
//String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
String preProcExportPathName = resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
epBean.setPostProcLocation(prePocLoc);
}
setAllProps(epBean, dsBean, sdfDir, extractUtils);
// also need to add the status fields discussed w/ cc:
// result code, user message, optional URL, archive message, log file message
// asdf table: sort most recent at top
logger.debug("found xslt file name " + xsltPath);
// String xmlFilePath = generalFileDir + ODMXMLFileName;
simpleTrigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", this.TRIGGER_GROUP_NAME);
// System.out.println("just set locale: " + LocaleResolver.getLocale(request).getLanguage());
cnt++;
ApplicationContext context = null;
try {
context = (ApplicationContext) scheduler.getContext().get("applicationContext");
} catch (SchedulerException e) {
e.printStackTrace();
}
//WebApplicationContext context = ContextLoader.getCurrentWebApplicationContext();
JobDetailFactoryBean jobDetailFactoryBean = context.getBean(JobDetailFactoryBean.class, simpleTrigger, this.TRIGGER_GROUP_NAME);
try {
Date dateStart = scheduler.scheduleJob(jobDetailFactoryBean.getObject(), simpleTrigger);
logger.debug("== found job date: " + dateStart.toString());
} catch (SchedulerException se) {
se.printStackTrace();
}
request.setAttribute("datasetId", datasetId);
// set the job name here in the user's session, so that we can ping the scheduler to pull it out later
if (jobDetailFactoryBean != null)
request.getSession().setAttribute("jobName", jobDetailFactoryBean.getObject().getKey().getName());
if (simpleTrigger != null)
request.getSession().setAttribute("groupName", this.TRIGGER_GROUP_NAME);
request.getSession().setAttribute("datasetId", new Integer(dsBean.getId()));
return map;
}
Aggregations