Search in sources :

Example 41 with SchedulerException

use of org.quartz.SchedulerException in project OpenClinica by OpenClinica.

the class ExportDatasetServlet method processRequest.

@Override
public void processRequest() throws Exception {
    DatasetDAO dsdao = new DatasetDAO(sm.getDataSource());
    ArchivedDatasetFileDAO asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
    FormProcessor fp = new FormProcessor(request);
    GenerateExtractFileService generateFileService = new GenerateExtractFileService(sm.getDataSource(), (CoreResources) SpringServletAccess.getApplicationContext(context).getBean("coreResources"), (RuleSetRuleDao) SpringServletAccess.getApplicationContext(context).getBean("ruleSetRuleDao"));
    String action = fp.getString("action");
    int datasetId = fp.getInt("datasetId");
    int adfId = fp.getInt("adfId");
    if (datasetId == 0) {
        try {
            DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
            datasetId = dsb.getId();
            logger.info("dataset id was zero, trying session: " + datasetId);
        } catch (NullPointerException e) {
            e.printStackTrace();
            logger.info("tripped over null pointer exception");
        }
    }
    DatasetBean db = (DatasetBean) dsdao.findByPK(datasetId);
    StudyDAO sdao = new StudyDAO(sm.getDataSource());
    StudyBean study = (StudyBean) sdao.findByPK(db.getStudyId());
    checkRoleByUserAndStudy(ub, study.getParentStudyId(), study.getId());
    //Checks if the study is current study or child of current study
    if (study.getId() != currentStudy.getId() && study.getParentStudyId() != currentStudy.getId()) {
        addPageMessage(respage.getString("no_have_correct_privilege_current_study") + " " + respage.getString("change_active_study_or_contact"));
        forwardPage(Page.MENU_SERVLET);
        return;
    }
    /**
         * @vbc 08/06/2008 NEW EXTRACT DATA IMPLEMENTATION get study_id and
         *      parentstudy_id int currentstudyid = currentStudy.getId(); int
         *      parentstudy = currentStudy.getParentStudyId(); if (parentstudy >
         *      0) { // is OK } else { // same parentstudy = currentstudyid; } //
         */
    int currentstudyid = currentStudy.getId();
    // YW 11-09-2008 << modified logic here.
    int parentstudy = currentstudyid;
    // YW 11-09-2008 >>
    StudyBean parentStudy = new StudyBean();
    if (currentStudy.getParentStudyId() > 0) {
        //StudyDAO sdao = new StudyDAO(sm.getDataSource());
        parentStudy = (StudyBean) sdao.findByPK(currentStudy.getParentStudyId());
    }
    ExtractBean eb = generateFileService.generateExtractBean(db, currentStudy, parentStudy);
    if (StringUtil.isBlank(action)) {
        loadList(db, asdfdao, datasetId, fp, eb);
        forwardPage(Page.EXPORT_DATASETS);
    } else if ("delete".equalsIgnoreCase(action) && adfId > 0) {
        boolean success = false;
        ArchivedDatasetFileBean adfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(adfId);
        File file = new File(adfBean.getFileReference());
        if (!file.canWrite()) {
            addPageMessage(respage.getString("write_protected"));
        } else {
            success = file.delete();
            if (success) {
                asdfdao.deleteArchiveDataset(adfBean);
                addPageMessage(respage.getString("file_removed"));
            } else {
                addPageMessage(respage.getString("error_removing_file"));
            }
        }
        loadList(db, asdfdao, datasetId, fp, eb);
        forwardPage(Page.EXPORT_DATASETS);
    } else {
        logger.info("**** found action ****: " + action);
        String generateReport = "";
        // generate file, and show screen export
        // String generalFileDir = DATASET_DIR + db.getId() +
        // File.separator;
        // change this up, so that we don't overwrite anything
        String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
        SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
        String generalFileDir = DATASET_DIR + db.getId() + File.separator + sdfDir.format(new java.util.Date());
        String fileName = "";
        db.setName(db.getName().replaceAll(" ", "_"));
        Page finalTarget = Page.GENERATE_DATASET;
        finalTarget = Page.EXPORT_DATA_CUSTOM;
        // now display report according to format specified
        // TODO revise final target to set to fileReference????
        long sysTimeBegin = System.currentTimeMillis();
        int fId = 0;
        if ("sas".equalsIgnoreCase(action)) {
            // generateReport =
            // dsdao.generateDataset(db,
            // ExtractBean.SAS_FORMAT,
            // currentStudy,
            // parentStudy);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            String SASFileName = db.getName() + "_sas.sas";
            // logger.info("found data set: "+generateReport);
            generateFileService.createFile(SASFileName, generalFileDir, generateReport, db, sysTimeEnd, ExportFormatBean.TXTFILE, true, ub);
            logger.info("created sas file");
            request.setAttribute("generate", generalFileDir + SASFileName);
            finalTarget.setFileName(generalFileDir + SASFileName);
            fileName = SASFileName;
        // won't work since page creator is private
        } else if ("odm".equalsIgnoreCase(action)) {
            String odmVersion = fp.getString("odmVersion");
            String ODMXMLFileName = "";
            // DRY
            // HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "");
            HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", true, true, true, null, ub);
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                ODMXMLFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            fileName = ODMXMLFileName;
            request.setAttribute("generate", generalFileDir + ODMXMLFileName);
            logger.debug("+++ set the following: " + generalFileDir + ODMXMLFileName);
            // send a link with the SQL file? put the generated SQL file with the dataset?
            if (fp.getString("xalan") != null) {
                XalanTriggerService xts = new XalanTriggerService();
                String propertiesPath = SQLInitServlet.getField("filePath");
                // the trick there, we need to open up the zipped file and get at the XML
                openZipFile(generalFileDir + ODMXMLFileName + ".zip");
                // need to find out how to copy this xml file from /bin to the generalFileDir
                SimpleTrigger simpleTrigger = xts.generateXalanTrigger(propertiesPath + File.separator + "ODMReportStylesheet.xsl", ODMXMLFileName, generalFileDir + "output.sql", db.getId());
                scheduler = getScheduler();
                JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
                JobDetailFactoryBean.setGroup(xts.TRIGGER_GROUP_NAME);
                JobDetailFactoryBean.setName(simpleTrigger.getKey().getName());
                JobDetailFactoryBean.setJobClass(org.akaza.openclinica.web.job.XalanStatefulJob.class);
                JobDetailFactoryBean.setJobDataMap(simpleTrigger.getJobDataMap());
                // need durability?
                JobDetailFactoryBean.setDurability(true);
                try {
                    Date dateStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), simpleTrigger);
                    logger.info("== found job date: " + dateStart.toString());
                } catch (SchedulerException se) {
                    se.printStackTrace();
                }
            }
        } else if ("txt".equalsIgnoreCase(action)) {
            // generateReport =
            // dsdao.generateDataset(db,
            // ExtractBean.TXT_FORMAT,
            // currentStudy,
            // parentStudy);
            // eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            String TXTFileName = "";
            HashMap answerMap = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, db, currentstudyid, parentstudy, "", ub);
            // and of course DRY
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                TXTFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            fileName = TXTFileName;
            request.setAttribute("generate", generalFileDir + TXTFileName);
            // finalTarget.setFileName(generalFileDir+TXTFileName);
            logger.debug("+++ set the following: " + generalFileDir + TXTFileName);
        } else if ("html".equalsIgnoreCase(action)) {
            // html based dataset browser
            TabReportBean answer = new TabReportBean();
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            request.setAttribute("dataset", db);
            request.setAttribute("extractBean", eb);
            finalTarget = Page.GENERATE_DATASET_HTML;
        } else if ("spss".equalsIgnoreCase(action)) {
            SPSSReportBean answer = new SPSSReportBean();
            // removed three lines here and put them in generate file
            // service, createSPSSFile method. tbh 01/2009
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            // System.out.println("*** isShowCRFversion:
            // "+db.isShowCRFversion());
            // TODO in the spirit of DRY, if this works we need to remove
            // lines 443-776 in this servlet, tbh 01/2009
            String DDLFileName = "";
            HashMap answerMap = generateFileService.createSPSSFile(db, eb, currentStudy, parentStudy, sysTimeBegin, generalFileDir, answer, "", ub);
            // hmm, DRY?
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                DDLFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            request.setAttribute("generate", generalFileDir + DDLFileName);
            logger.debug("+++ set the following: " + generalFileDir + DDLFileName);
        } else if ("csv".equalsIgnoreCase(action)) {
            CommaReportBean answer = new CommaReportBean();
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            // logger.info("found data set: "+generateReport);
            String CSVFileName = db.getName() + "_comma.txt";
            fId = generateFileService.createFile(CSVFileName, generalFileDir, answer.toString(), db, sysTimeEnd, ExportFormatBean.CSVFILE, true, ub);
            fileName = CSVFileName;
            logger.info("just created csv file");
            request.setAttribute("generate", generalFileDir + CSVFileName);
        // finalTarget.setFileName(generalFileDir+CSVFileName);
        } else if ("excel".equalsIgnoreCase(action)) {
            // HSSFWorkbook excelReport = dsdao.generateExcelDataset(db,
            // ExtractBean.XLS_FORMAT,
            // currentStudy,
            // parentStudy);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            // TODO this will change and point to a created excel
            // spreadsheet, tbh
            String excelFileName = db.getName() + "_excel.xls";
            // fId = this.createFile(excelFileName,
            // generalFileDir,
            // excelReport,
            // db, sysTimeEnd,
            // ExportFormatBean.EXCELFILE);
            // logger.info("just created csv file, for excel output");
            // response.setHeader("Content-disposition","attachment;
            // filename="+CSVFileName);
            // logger.info("csv file name: "+CSVFileName);
            finalTarget = Page.GENERATE_EXCEL_DATASET;
            // response.setContentType("application/vnd.ms-excel");
            response.setHeader("Content-Disposition", "attachment; filename=" + db.getName() + "_excel.xls");
            request.setAttribute("generate", generalFileDir + excelFileName);
            logger.info("set 'generate' to :" + generalFileDir + excelFileName);
            fileName = excelFileName;
        // excelReport.write(stream);
        // stream.flush();
        // stream.close();
        // finalTarget.setFileName(WEB_DIR+db.getId()+"/"+excelFileName);
        }
        // <%@page contentType="application/vnd.ms-excel"%>
        if (!finalTarget.equals(Page.GENERATE_EXCEL_DATASET) && !finalTarget.equals(Page.GENERATE_DATASET_HTML)) {
            // to catch all the others and try to set a new path for file
            // capture
            // tbh, 4-18-05
            // request.setAttribute("generate",finalTarget.getFileName());
            // TODO changing path to show refresh page, then window with
            // link to download file, tbh 06-08-05
            // finalTarget.setFileName(
            // "/WEB-INF/jsp/extract/generatedFileDataset.jsp");
            finalTarget.setFileName("" + "/WEB-INF/jsp/extract/generateMetadataCore.jsp");
            // also set up table here???
            asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
            ArchivedDatasetFileBean asdfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(fId);
            // *** do we need this below? tbh
            ArrayList newFileList = new ArrayList();
            newFileList.add(asdfBean);
            // request.setAttribute("filelist",newFileList);
            ArrayList filterRows = ArchivedDatasetFileRow.generateRowsFromBeans(newFileList);
            EntityBeanTable table = fp.getEntityBeanTable();
            // sort by date
            table.setSortingIfNotExplicitlySet(3, false);
            String[] columns = { resword.getString("file_name"), resword.getString("run_time"), resword.getString("file_size"), resword.getString("created_date"), resword.getString("created_by") };
            table.setColumns(new ArrayList(Arrays.asList(columns)));
            table.hideColumnLink(0);
            table.hideColumnLink(1);
            table.hideColumnLink(2);
            table.hideColumnLink(3);
            table.hideColumnLink(4);
            // table.setQuery("ExportDataset?datasetId=" +db.getId(), new
            // HashMap());
            // trying to continue...
            // session.setAttribute("newDataset",db);
            request.setAttribute("dataset", db);
            request.setAttribute("file", asdfBean);
            table.setRows(filterRows);
            table.computeDisplay();
            request.setAttribute("table", table);
        // *** do we need this above? tbh
        }
        logger.info("set first part of 'generate' to :" + generalFileDir);
        logger.info("found file name: " + finalTarget.getFileName());
        //            String del = CoreResources.getField("dataset_file_delete");
        //            if (del.equalsIgnoreCase("true") || del.equals("")) {
        //                File deleteFile = new File(generalFileDir + fileName);
        //                deleteFile.delete();
        //            }
        forwardPage(finalTarget);
    }
}
Also used : ArchivedDatasetFileDAO(org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO) SchedulerException(org.quartz.SchedulerException) HashMap(java.util.HashMap) Date(java.util.Date) EntityBeanTable(org.akaza.openclinica.web.bean.EntityBeanTable) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) ArrayList(java.util.ArrayList) Page(org.akaza.openclinica.view.Page) CommaReportBean(org.akaza.openclinica.bean.extract.CommaReportBean) ZipEntry(java.util.zip.ZipEntry) Iterator(java.util.Iterator) TabReportBean(org.akaza.openclinica.bean.extract.TabReportBean) SimpleTrigger(org.quartz.SimpleTrigger) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO) ArchivedDatasetFileBean(org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean) SPSSReportBean(org.akaza.openclinica.bean.extract.SPSSReportBean) GenerateExtractFileService(org.akaza.openclinica.service.extract.GenerateExtractFileService) FormProcessor(org.akaza.openclinica.control.form.FormProcessor) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) Date(java.util.Date) XalanTriggerService(org.akaza.openclinica.web.job.XalanTriggerService) ExtractBean(org.akaza.openclinica.bean.extract.ExtractBean) ZipFile(java.util.zip.ZipFile) File(java.io.File) SimpleDateFormat(java.text.SimpleDateFormat) HashMap(java.util.HashMap) JobDetailFactoryBean(org.springframework.scheduling.quartz.JobDetailFactoryBean)

Example 42 with SchedulerException

use of org.quartz.SchedulerException in project OpenClinica by OpenClinica.

the class SecureController method pingJobServer.

private void pingJobServer(HttpServletRequest request) {
    String jobName = (String) request.getSession().getAttribute("jobName");
    String groupName = (String) request.getSession().getAttribute("groupName");
    Integer datasetId = (Integer) request.getSession().getAttribute("datasetId");
    try {
        if (jobName != null && groupName != null) {
            Trigger.TriggerState triggerState = getScheduler(request).getTriggerState(new TriggerKey(jobName, groupName));
            org.quartz.JobDetail details = getScheduler(request).getJobDetail(new JobKey(jobName, groupName));
            List contexts = getScheduler(request).getCurrentlyExecutingJobs();
            // will we get the above, even if its completed running?
            // ProcessingResultType message = null;
            // for (int i = 0; i < contexts.size(); i++) {
            // org.quartz.JobExecutionContext context = (org.quartz.JobExecutionContext) contexts.get(i);
            // if (context.getJobDetail().getName().equals(jobName) &&
            // context.getJobDetail().getGroup().equals(groupName)) {
            // message = (ProcessingResultType) context.getResult();
            // System.out.println("found message " + message.getDescription());
            // }
            // }
            // ProcessingResultType message = (ProcessingResultType) details.getResult();
            org.quartz.JobDataMap dataMap = details.getJobDataMap();
            String failMessage = dataMap.getString("failMessage");
            if (triggerState == Trigger.TriggerState.NONE || triggerState == Trigger.TriggerState.COMPLETE) {
                // TODO i18n
                if (failMessage != null) {
                    // The extract data job failed with the message:
                    // ERROR: relation "demographics" already exists
                    // More information may be available in the log files.
                    addPageMessage("The extract data job failed with the message: <br/><br/>" + failMessage + "<br/><br/>More information may be available in the log files.");
                    request.getSession().removeAttribute("jobName");
                    request.getSession().removeAttribute("groupName");
                    request.getSession().removeAttribute("datasetId");
                } else {
                    String successMsg = dataMap.getString("SUCCESS_MESSAGE");
                    String success = dataMap.getString("successMsg");
                    if (success != null) {
                        if (successMsg.contains("$linkURL")) {
                            successMsg = decodeLINKURL(successMsg, datasetId);
                        }
                        if (successMsg != null && !successMsg.isEmpty()) {
                            addPageMessage(successMsg);
                        } else {
                            addPageMessage("Your Extract is now completed. Please go to review them at <a href='ExportDataset?datasetId=" + datasetId + "'> Here </a>.");
                        }
                        request.getSession().removeAttribute("jobName");
                        request.getSession().removeAttribute("groupName");
                        request.getSession().removeAttribute("datasetId");
                    }
                }
            } else {
            }
        }
    } catch (SchedulerException se) {
        se.printStackTrace();
    }
}
Also used : TriggerKey(org.quartz.TriggerKey) JobKey(org.quartz.JobKey) Trigger(org.quartz.Trigger) SchedulerException(org.quartz.SchedulerException) List(java.util.List) ArrayList(java.util.ArrayList)

Example 43 with SchedulerException

use of org.quartz.SchedulerException in project Dempsy by Dempsy.

the class CronOutputSchedule method start.

/* (non-Javadoc)
   * @see com.nokia.dempsy.output.OutputExecuter#start()
   */
public void start() {
    try {
        JobDetail jobDetail = super.getJobDetail();
        Trigger trigger = getCronTrigger(cronExpression);
        scheduler = StdSchedulerFactory.getDefaultScheduler();
        scheduler.scheduleJob(jobDetail, trigger);
        scheduler.start();
    } catch (SchedulerException se) {
        logger.error("Error occurred while starting the cron scheduler : " + se.getMessage(), se);
    }
}
Also used : JobDetail(org.quartz.JobDetail) Trigger(org.quartz.Trigger) SchedulerException(org.quartz.SchedulerException)

Example 44 with SchedulerException

use of org.quartz.SchedulerException in project Dempsy by Dempsy.

the class RelativeOutputSchedule method start.

/**
 * Container will invoke this method.
 */
@Override
public void start() {
    try {
        JobDetail jobDetail = super.getJobDetail();
        Trigger trigger = getSimpleTrigger(timeUnit, (int) interval);
        scheduler = StdSchedulerFactory.getDefaultScheduler();
        scheduler.scheduleJob(jobDetail, trigger);
        scheduler.start();
    } catch (SchedulerException se) {
        logger.error("Error occurred while starting the relative scheduler : " + se.getMessage(), se);
    }
}
Also used : JobDetail(org.quartz.JobDetail) Trigger(org.quartz.Trigger) SchedulerException(org.quartz.SchedulerException)

Example 45 with SchedulerException

use of org.quartz.SchedulerException in project series-rest-api by 52North.

the class JobScheduler method scheduleJob.

private void scheduleJob(ScheduledJob taskToSchedule) {
    try {
        JobDetail details = taskToSchedule.createJobDetails();
        Trigger trigger = taskToSchedule.createTrigger(details.getKey());
        scheduler.scheduleJob(details, trigger);
        if (taskToSchedule.isTriggerAtStartup()) {
            LOGGER.debug("Schedule job '{}' to run once at startup.", details.getKey());
            Trigger onceAtStartup = TriggerBuilder.newTrigger().withIdentity("onceAtStartup").forJob(details.getKey()).build();
            scheduler.scheduleJob(onceAtStartup);
        }
    } catch (SchedulerException e) {
        LOGGER.warn("Could not schdule Job '{}'.", taskToSchedule.getJobName(), e);
    }
}
Also used : JobDetail(org.quartz.JobDetail) Trigger(org.quartz.Trigger) SchedulerException(org.quartz.SchedulerException)

Aggregations

SchedulerException (org.quartz.SchedulerException)133 JobDetail (org.quartz.JobDetail)59 Trigger (org.quartz.Trigger)42 Scheduler (org.quartz.Scheduler)37 JobKey (org.quartz.JobKey)33 SimpleTrigger (org.quartz.SimpleTrigger)19 JobDataMap (org.quartz.JobDataMap)18 CronTrigger (org.quartz.CronTrigger)17 TriggerBuilder.newTrigger (org.quartz.TriggerBuilder.newTrigger)15 ApplicationContext (org.springframework.context.ApplicationContext)15 ArrayList (java.util.ArrayList)12 SchedulerContext (org.quartz.SchedulerContext)12 IOException (java.io.IOException)11 TriggerKey (org.quartz.TriggerKey)10 Date (java.util.Date)9 JobExecutionException (org.quartz.JobExecutionException)9 StdSchedulerFactory (org.quartz.impl.StdSchedulerFactory)6 ParseException (java.text.ParseException)5 Command (org.openhab.core.types.Command)5 JobSystemException (com.dangdang.ddframe.job.exception.JobSystemException)4