Search in sources :

Example 6 with JobDetailBean

use of org.springframework.scheduling.quartz.JobDetailBean in project OpenClinica by OpenClinica.

the class ExtractController method processSubmit.

/**
 * process the page from whence you came, i.e. extract a dataset
 * @param id, the id of the extract properties bean, gained from Core Resources
 * @param datasetId, the id of the dataset, found through DatasetDAO
 * @param request, http request
 * @return model map, but more importantly, creates a quartz job which runs right away and generates all output there
 */
@RequestMapping(method = RequestMethod.GET)
public ModelMap processSubmit(@RequestParam("id") String id, @RequestParam("datasetId") String datasetId, HttpServletRequest request, HttpServletResponse response) {
    if (!mayProceed(request)) {
        try {
            response.sendRedirect(request.getContextPath() + "/MainMenu?message=authentication_failed");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }
    ModelMap map = new ModelMap();
    ResourceBundleProvider.updateLocale(LocaleResolver.getLocale(request));
    // String datasetId = (String)request.getAttribute("datasetId");
    // String id = (String)request.getAttribute("id");
    logger.debug("found both id " + id + " and dataset " + datasetId);
    ExtractUtils extractUtils = new ExtractUtils();
    // get extract id
    // get dataset id
    // if id is a number and dataset id is a number ...
    datasetDao = new DatasetDAO(dataSource);
    UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
    CoreResources cr = new CoreResources();
    ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(new Integer(id).intValue(), datasetId);
    DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
    // set the job in motion
    String[] files = epBean.getFileName();
    String exportFileName;
    int fileSize = files.length;
    int cnt = 0;
    JobDetailBean jobDetailBean = new JobDetailBean();
    SimpleTrigger simpleTrigger = null;
    // TODO: if files and export names size is not same... throw an error
    dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
    String[] exportFiles = epBean.getExportFileName();
    String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
    SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
    int i = 0;
    String[] temp = new String[exportFiles.length];
    // JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
    while (i < exportFiles.length) {
        temp[i] = resolveVars(exportFiles[i], dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        i++;
    }
    epBean.setDoNotDelFiles(temp);
    epBean.setExportFileName(temp);
    XsltTriggerService xsltService = new XsltTriggerService();
    // TODO get a user bean somehow?
    String generalFileDir = SQLInitServlet.getField("filePath");
    generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
    exportFileName = epBean.getExportFileName()[cnt];
    // need to set the dataset path here, tbh
    logger.debug("found odm xml file path " + generalFileDir);
    // next, can already run jobs, translations, and then add a message to be notified later
    // JN all the properties need to have the variables...
    String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
    String endFilePath = epBean.getFileLocation();
    endFilePath = getEndFilePath(endFilePath, dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
    // exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
    if (epBean.getPostProcExportName() != null) {
        // String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
        String preProcExportPathName = resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        epBean.setPostProcExportName(preProcExportPathName);
    }
    if (epBean.getPostProcLocation() != null) {
        String prePocLoc = getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        epBean.setPostProcLocation(prePocLoc);
    }
    setAllProps(epBean, dsBean, sdfDir, extractUtils);
    // also need to add the status fields discussed w/ cc:
    // result code, user message, optional URL, archive message, log file message
    // asdf table: sort most recent at top
    logger.debug("found xslt file name " + xsltPath);
    // String xmlFilePath = generalFileDir + ODMXMLFileName;
    simpleTrigger = xsltService.generateXsltTrigger(xsltPath, // xml_file_path
    generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", this.TRIGGER_GROUP_NAME);
    // System.out.println("just set locale: " + LocaleResolver.getLocale(request).getLanguage());
    cnt++;
    jobDetailBean = new JobDetailBean();
    jobDetailBean.setGroup(this.TRIGGER_GROUP_NAME);
    jobDetailBean.setName(simpleTrigger.getName() + System.currentTimeMillis());
    jobDetailBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
    jobDetailBean.setJobDataMap(simpleTrigger.getJobDataMap());
    // need durability? YES - we will want to see if it's finished
    jobDetailBean.setDurability(true);
    jobDetailBean.setVolatility(false);
    try {
        Date dateStart = scheduler.scheduleJob(jobDetailBean, simpleTrigger);
        logger.debug("== found job date: " + dateStart.toString());
    } catch (SchedulerException se) {
        se.printStackTrace();
    }
    request.setAttribute("datasetId", datasetId);
    // set the job name here in the user's session, so that we can ping the scheduler to pull it out later
    if (jobDetailBean != null)
        request.getSession().setAttribute("jobName", jobDetailBean.getName());
    if (simpleTrigger != null)
        request.getSession().setAttribute("groupName", this.TRIGGER_GROUP_NAME);
    request.getSession().setAttribute("datasetId", new Integer(dsBean.getId()));
    return map;
}
Also used : SchedulerException(org.quartz.SchedulerException) Date(java.util.Date) ModelMap(org.springframework.ui.ModelMap) CoreResources(org.akaza.openclinica.dao.core.CoreResources) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) JobDetailBean(org.springframework.scheduling.quartz.JobDetailBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) SchedulerException(org.quartz.SchedulerException) Date(java.util.Date) ExtractUtils(org.akaza.openclinica.service.extract.ExtractUtils) UserAccountBean(org.akaza.openclinica.bean.login.UserAccountBean) ExtractPropertyBean(org.akaza.openclinica.bean.extract.ExtractPropertyBean) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) SimpleTrigger(org.quartz.SimpleTrigger) SimpleDateFormat(java.text.SimpleDateFormat) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Example 7 with JobDetailBean

use of org.springframework.scheduling.quartz.JobDetailBean in project OpenClinica by OpenClinica.

the class CreateJobImportServlet method processRequest.

@Override
protected void processRequest() throws Exception {
    // TODO multi stage servlet to generate import jobs
    // validate form, create job and return to view jobs servlet
    FormProcessor fp = new FormProcessor(request);
    TriggerService triggerService = new TriggerService();
    scheduler = getScheduler();
    String action = fp.getString("action");
    if (StringUtil.isBlank(action)) {
        // set up list of data sets
        // select by ... active study
        setUpServlet();
        forwardPage(Page.CREATE_JOB_IMPORT);
    } else if ("confirmall".equalsIgnoreCase(action)) {
        // collect form information
        HashMap errors = triggerService.validateImportJobForm(fp, request, scheduler.getTriggerNames(IMPORT_TRIGGER));
        if (!errors.isEmpty()) {
            // set errors to request
            request.setAttribute("formMessages", errors);
            logger.debug("has validation errors in the first section" + errors.toString());
            setUpServlet();
            forwardPage(Page.CREATE_JOB_IMPORT);
        } else {
            logger.info("found no validation errors, continuing");
            int studyId = fp.getInt(STUDY_ID);
            StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
            StudyBean studyBean = (StudyBean) studyDAO.findByPK(studyId);
            SimpleTrigger trigger = triggerService.generateImportTrigger(fp, sm.getUserBean(), studyBean, LocaleResolver.getLocale(request).getLanguage());
            // SimpleTrigger trigger = new SimpleTrigger();
            JobDetailBean jobDetailBean = new JobDetailBean();
            jobDetailBean.setGroup(IMPORT_TRIGGER);
            jobDetailBean.setName(trigger.getName());
            jobDetailBean.setJobClass(org.akaza.openclinica.web.job.ImportStatefulJob.class);
            jobDetailBean.setJobDataMap(trigger.getJobDataMap());
            // need durability?
            jobDetailBean.setDurability(true);
            jobDetailBean.setVolatility(false);
            // set to the scheduler
            try {
                Date dateStart = scheduler.scheduleJob(jobDetailBean, trigger);
                logger.debug("== found job date: " + dateStart.toString());
                // set a success message here
                addPageMessage("You have successfully created a new job: " + trigger.getName() + " which is now set to run at the time you specified.");
                forwardPage(Page.VIEW_IMPORT_JOB_SERVLET);
            } catch (SchedulerException se) {
                se.printStackTrace();
                // set a message here with the exception message
                setUpServlet();
                addPageMessage("There was an unspecified error with your creation, please contact an administrator.");
                forwardPage(Page.CREATE_JOB_IMPORT);
            }
        }
    } else {
        forwardPage(Page.ADMIN_SYSTEM);
    // forward to form
    // should we even get to this part?
    }
}
Also used : SchedulerException(org.quartz.SchedulerException) HashMap(java.util.HashMap) FormProcessor(org.akaza.openclinica.control.form.FormProcessor) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) JobDetailBean(org.springframework.scheduling.quartz.JobDetailBean) Date(java.util.Date) TriggerService(org.akaza.openclinica.web.job.TriggerService) SimpleTrigger(org.quartz.SimpleTrigger) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO)

Example 8 with JobDetailBean

use of org.springframework.scheduling.quartz.JobDetailBean in project OpenClinica by OpenClinica.

the class CreateJobExportServlet method processRequest.

@Override
protected void processRequest() throws Exception {
    // TODO multi stage servlet which will create export jobs
    // will accept, create, and return the ViewJob servlet
    FormProcessor fp = new FormProcessor(request);
    TriggerService triggerService = new TriggerService();
    scheduler = getScheduler();
    String action = fp.getString("action");
    ExtractUtils extractUtils = new ExtractUtils();
    if (StringUtil.isBlank(action)) {
        // set up list of data sets
        // select by ... active study
        setUpServlet();
        forwardPage(Page.CREATE_JOB_EXPORT);
    } else if ("confirmall".equalsIgnoreCase(action)) {
        // collect form information
        HashMap errors = validateForm(fp, request, scheduler.getTriggerNames(XsltTriggerService.TRIGGER_GROUP_NAME), "");
        if (!errors.isEmpty()) {
            // set errors to request
            request.setAttribute("formMessages", errors);
            logger.info("has validation errors in the first section");
            logger.info("errors found: " + errors.toString());
            setUpServlet();
            forwardPage(Page.CREATE_JOB_EXPORT);
        } else {
            logger.info("found no validation errors, continuing");
            StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
            DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
            UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
            CoreResources cr = new CoreResources();
            int datasetId = fp.getInt(DATASET_ID);
            String period = fp.getString(PERIOD);
            String email = fp.getString(EMAIL);
            String jobName = fp.getString(JOB_NAME);
            String jobDesc = fp.getString(JOB_DESC);
            Date startDateTime = fp.getDateTime(DATE_START_JOB);
            Integer exportFormatId = fp.getInt(FORMAT_ID);
            ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
            DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
            // set the job in motion
            String[] files = epBean.getFileName();
            String exportFileName;
            int fileSize = files.length;
            int cnt = 0;
            dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
            String[] exportFiles = epBean.getExportFileName();
            String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
            SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
            int i = 0;
            String[] temp = new String[exportFiles.length];
            // JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
            String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
            while (i < exportFiles.length) {
                temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
                i++;
            }
            epBean.setDoNotDelFiles(temp);
            epBean.setExportFileName(temp);
            XsltTriggerService xsltService = new XsltTriggerService();
            String generalFileDir = SQLInitServlet.getField("filePath");
            generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
            exportFileName = epBean.getExportFileName()[cnt];
            // need to set the dataset path here, tbh
            // next, can already run jobs, translations, and then add a message to be notified later
            // JN all the properties need to have the variables...
            String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
            String endFilePath = epBean.getFileLocation();
            endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
            // exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
            if (epBean.getPostProcExportName() != null) {
                // String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
                String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
                epBean.setPostProcExportName(preProcExportPathName);
            }
            if (epBean.getPostProcLocation() != null) {
                String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
                epBean.setPostProcLocation(prePocLoc);
            }
            extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
            SimpleTrigger trigger = null;
            trigger = xsltService.generateXsltTrigger(xsltPath, // xml_file_path
            generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", xsltService.getTriggerGroupNameForExportJobs());
            // Updating the original trigger with user given inputs
            trigger.setRepeatCount(64000);
            trigger.setRepeatInterval(XsltTriggerService.getIntervalTime(period));
            trigger.setDescription(jobDesc);
            // set just the start date
            trigger.setStartTime(startDateTime);
            // + datasetId);
            trigger.setName(jobName);
            trigger.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT);
            trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
            trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
            trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
            trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
            trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
            trigger.getJobDataMap().put("job_type", "exportJob");
            JobDetailBean jobDetailBean = new JobDetailBean();
            jobDetailBean.setGroup(xsltService.getTriggerGroupNameForExportJobs());
            jobDetailBean.setName(trigger.getName());
            jobDetailBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
            jobDetailBean.setJobDataMap(trigger.getJobDataMap());
            // need durability?
            jobDetailBean.setDurability(true);
            jobDetailBean.setVolatility(false);
            // set to the scheduler
            try {
                Date dateStart = scheduler.scheduleJob(jobDetailBean, trigger);
                logger.info("== found job date: " + dateStart.toString());
            // set a success message here
            } catch (SchedulerException se) {
                se.printStackTrace();
                setUpServlet();
                addPageMessage("Error creating Job.");
                forwardPage(Page.VIEW_JOB_SERVLET);
                return;
            }
            setUpServlet();
            addPageMessage("You have successfully created a new job: " + jobName + " which is now set to run at the time you specified.");
            forwardPage(Page.VIEW_JOB_SERVLET);
        }
    } else {
        forwardPage(Page.ADMIN_SYSTEM);
    // forward to form
    // should we even get to this part?
    }
}
Also used : SchedulerException(org.quartz.SchedulerException) HashMap(java.util.HashMap) FormProcessor(org.akaza.openclinica.control.form.FormProcessor) CoreResources(org.akaza.openclinica.dao.core.CoreResources) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) JobDetailBean(org.springframework.scheduling.quartz.JobDetailBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) Date(java.util.Date) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) TriggerService(org.akaza.openclinica.web.job.TriggerService) ExtractUtils(org.akaza.openclinica.service.extract.ExtractUtils) UserAccountBean(org.akaza.openclinica.bean.login.UserAccountBean) ExtractPropertyBean(org.akaza.openclinica.bean.extract.ExtractPropertyBean) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) SimpleTrigger(org.quartz.SimpleTrigger) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO) SimpleDateFormat(java.text.SimpleDateFormat)

Example 9 with JobDetailBean

use of org.springframework.scheduling.quartz.JobDetailBean in project OpenClinica by OpenClinica.

the class UpdateJobImportServlet method processRequest.

@Override
protected void processRequest() throws Exception {
    FormProcessor fp = new FormProcessor(request);
    TriggerService triggerService = new TriggerService();
    String action = fp.getString("action");
    String triggerName = fp.getString("tname");
    scheduler = getScheduler();
    logger.debug("found trigger name " + triggerName);
    Trigger trigger = scheduler.getTrigger(triggerName, TRIGGER_IMPORT_GROUP);
    // System.out.println("found trigger from the other side " + trigger.getFullName());
    if (StringUtil.isBlank(action)) {
        setUpServlet(trigger);
        forwardPage(Page.UPDATE_JOB_IMPORT);
    } else if ("confirmall".equalsIgnoreCase(action)) {
        HashMap errors = triggerService.validateImportJobForm(fp, request, scheduler.getTriggerNames("DEFAULT"), trigger.getName());
        if (!errors.isEmpty()) {
            // send back
            addPageMessage("Your modifications caused an error, please see the messages for more information.");
            setUpServlet(trigger);
            forwardPage(Page.UPDATE_JOB_IMPORT);
        } else {
            StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
            int studyId = fp.getInt(CreateJobImportServlet.STUDY_ID);
            StudyBean study = (StudyBean) studyDAO.findByPK(studyId);
            // in the place of a users' current study, tbh
            Date startDate = trigger.getStartTime();
            trigger = triggerService.generateImportTrigger(fp, sm.getUserBean(), study, startDate, LocaleResolver.getLocale(request).getLanguage());
            // scheduler = getScheduler();
            JobDetailBean jobDetailBean = new JobDetailBean();
            jobDetailBean.setGroup(TRIGGER_IMPORT_GROUP);
            jobDetailBean.setName(trigger.getName());
            jobDetailBean.setJobClass(org.akaza.openclinica.web.job.ImportStatefulJob.class);
            jobDetailBean.setJobDataMap(trigger.getJobDataMap());
            // need durability?
            jobDetailBean.setDurability(true);
            jobDetailBean.setVolatility(false);
            try {
                scheduler.deleteJob(triggerName, TRIGGER_IMPORT_GROUP);
                Date dateStart = scheduler.scheduleJob(jobDetailBean, trigger);
                addPageMessage("Your job has been successfully modified.");
                forwardPage(Page.VIEW_IMPORT_JOB_SERVLET);
            } catch (SchedulerException se) {
                se.printStackTrace();
                // set a message here with the exception message
                setUpServlet(trigger);
                addPageMessage("There was an unspecified error with your creation, please contact an administrator.");
                forwardPage(Page.UPDATE_JOB_IMPORT);
            }
        }
    }
}
Also used : SchedulerException(org.quartz.SchedulerException) HashMap(java.util.HashMap) FormProcessor(org.akaza.openclinica.control.form.FormProcessor) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) JobDetailBean(org.springframework.scheduling.quartz.JobDetailBean) Date(java.util.Date) TriggerService(org.akaza.openclinica.web.job.TriggerService) Trigger(org.quartz.Trigger) SimpleTrigger(org.quartz.SimpleTrigger) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO)

Aggregations

SimpleTrigger (org.quartz.SimpleTrigger)9 JobDetailBean (org.springframework.scheduling.quartz.JobDetailBean)9 SchedulerException (org.quartz.SchedulerException)8 Date (java.util.Date)7 HashMap (java.util.HashMap)5 FormProcessor (org.akaza.openclinica.control.form.FormProcessor)5 StudyDAO (org.akaza.openclinica.dao.managestudy.StudyDAO)5 SimpleDateFormat (java.text.SimpleDateFormat)4 DatasetBean (org.akaza.openclinica.bean.extract.DatasetBean)4 StudyBean (org.akaza.openclinica.bean.managestudy.StudyBean)4 DatasetDAO (org.akaza.openclinica.dao.extract.DatasetDAO)4 TriggerService (org.akaza.openclinica.web.job.TriggerService)4 ExtractPropertyBean (org.akaza.openclinica.bean.extract.ExtractPropertyBean)3 UserAccountBean (org.akaza.openclinica.bean.login.UserAccountBean)3 CoreResources (org.akaza.openclinica.dao.core.CoreResources)3 ExtractUtils (org.akaza.openclinica.service.extract.ExtractUtils)3 XsltTriggerService (org.akaza.openclinica.service.extract.XsltTriggerService)3 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 MifosRuntimeException (org.mifos.core.MifosRuntimeException)2