Search in sources :

Example 1 with ExtractUtils

use of org.akaza.openclinica.service.extract.ExtractUtils in project OpenClinica by OpenClinica.

the class UpdateJobExportServlet method processRequest.

@Override
protected void processRequest() throws Exception {
    FormProcessor fp = new FormProcessor(request);
    TriggerService triggerService = new TriggerService();
    String action = fp.getString("action");
    String triggerName = fp.getString("tname");
    scheduler = getScheduler();
    ExtractUtils extractUtils = new ExtractUtils();
    Trigger updatingTrigger = scheduler.getTrigger(new TriggerKey(triggerName.trim(), XsltTriggerService.TRIGGER_GROUP_NAME));
    if (StringUtil.isBlank(action)) {
        setUpServlet(updatingTrigger);
        forwardPage(Page.UPDATE_JOB_EXPORT);
    } else if ("confirmall".equalsIgnoreCase(action)) {
        // change and update trigger here
        // validate first
        // then update or send back
        String name = XsltTriggerService.TRIGGER_GROUP_NAME;
        Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals(name));
        String[] triggerNames = triggerKeys.stream().toArray(String[]::new);
        HashMap errors = validateForm(fp, request, triggerNames, updatingTrigger.getKey().getName());
        if (!errors.isEmpty()) {
            // send back
            addPageMessage("Your modifications caused an error, please see the messages for more information.");
            setUpServlet(updatingTrigger);
            logger.error("errors : " + errors.toString());
            forwardPage(Page.UPDATE_JOB_EXPORT);
        } else {
            // change trigger, update in database
            StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
            StudyBean study = (StudyBean) studyDAO.findByPK(sm.getUserBean().getActiveStudyId());
            DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
            CoreResources cr = new CoreResources();
            UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
            int datasetId = fp.getInt(DATASET_ID);
            String period = fp.getString(PERIOD);
            String email = fp.getString(EMAIL);
            String jobName = fp.getString(JOB_NAME);
            String jobDesc = fp.getString(JOB_DESC);
            Date startDateTime = fp.getDateTime(DATE_START_JOB);
            Integer exportFormatId = fp.getInt(FORMAT_ID);
            ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
            DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
            String[] files = epBean.getFileName();
            String exportFileName;
            int fileSize = files.length;
            int cnt = 0;
            dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
            String[] exportFiles = epBean.getExportFileName();
            String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
            SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
            int i = 0;
            String[] temp = new String[exportFiles.length];
            //JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
            String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
            while (i < exportFiles.length) {
                temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
                i++;
            }
            epBean.setDoNotDelFiles(temp);
            epBean.setExportFileName(temp);
            XsltTriggerService xsltService = new XsltTriggerService();
            String generalFileDir = SQLInitServlet.getField("filePath");
            generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
            exportFileName = epBean.getExportFileName()[cnt];
            String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
            String endFilePath = epBean.getFileLocation();
            endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
            //  exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
            if (epBean.getPostProcExportName() != null) {
                String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
                epBean.setPostProcExportName(preProcExportPathName);
            }
            if (epBean.getPostProcLocation() != null) {
                String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
                epBean.setPostProcLocation(prePocLoc);
            }
            extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
            SimpleTrigger trigger = null;
            trigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
            generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", TRIGGER_GROUP_JOB);
            //Updating the original trigger with user given inputs
            trigger.getTriggerBuilder().withDescription(jobDesc).startAt(startDateTime).forJob(jobName).withSchedule(simpleSchedule().withIntervalInSeconds(new Long(XsltTriggerService.getIntervalTime(period)).intValue()).withRepeatCount(64000).withMisfireHandlingInstructionNextWithExistingCount());
            trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
            trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
            trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
            trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
            trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
            JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
            JobDetailFactoryBean.setGroup(xsltService.TRIGGER_GROUP_NAME);
            JobDetailFactoryBean.setName(trigger.getKey().getName());
            JobDetailFactoryBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
            JobDetailFactoryBean.setJobDataMap(trigger.getJobDataMap());
            // need durability?
            JobDetailFactoryBean.setDurability(true);
            try {
                // scheduler.unscheduleJob(triggerName, "DEFAULT");
                scheduler.deleteJob(new JobKey(triggerName, XsltTriggerService.TRIGGER_GROUP_NAME));
                Date dataStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), trigger);
                // Date dateStart = scheduler.rescheduleJob(triggerName,
                // "DEFAULT", trigger);
                // scheduler.rescheduleJob(triggerName, groupName,
                // newTrigger)
                addPageMessage("Your job has been successfully modified.");
                forwardPage(Page.VIEW_JOB_SERVLET);
            } catch (SchedulerException se) {
                se.printStackTrace();
                // set a message here with the exception message
                setUpServlet(trigger);
                addPageMessage("There was an unspecified error with your creation, please contact an administrator.");
                forwardPage(Page.UPDATE_JOB_EXPORT);
            }
        }
    }
}
Also used : CoreResources(org.akaza.openclinica.dao.core.CoreResources) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) ExtractUtils(org.akaza.openclinica.service.extract.ExtractUtils) UserAccountBean(org.akaza.openclinica.bean.login.UserAccountBean) ExtractPropertyBean(org.akaza.openclinica.bean.extract.ExtractPropertyBean) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO) FormProcessor(org.akaza.openclinica.control.form.FormProcessor) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) TriggerService(org.akaza.openclinica.web.job.TriggerService) SimpleDateFormat(java.text.SimpleDateFormat) JobDetailFactoryBean(org.springframework.scheduling.quartz.JobDetailFactoryBean)

Example 2 with ExtractUtils

use of org.akaza.openclinica.service.extract.ExtractUtils in project OpenClinica by OpenClinica.

the class CreateJobExportServlet method processRequest.

@Override
protected void processRequest() throws Exception {
    // TODO multi stage servlet which will create export jobs
    // will accept, create, and return the ViewJob servlet
    FormProcessor fp = new FormProcessor(request);
    TriggerService triggerService = new TriggerService();
    scheduler = getScheduler();
    String action = fp.getString("action");
    ExtractUtils extractUtils = new ExtractUtils();
    if (StringUtil.isBlank(action)) {
        // set up list of data sets
        // select by ... active study
        setUpServlet();
        forwardPage(Page.CREATE_JOB_EXPORT);
    } else if ("confirmall".equalsIgnoreCase(action)) {
        // collect form information
        Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals("DEFAULT"));
        String[] triggerNames = triggerKeys.stream().toArray(String[]::new);
        HashMap errors = validateForm(fp, request, triggerNames, "");
        if (!errors.isEmpty()) {
            // set errors to request
            request.setAttribute("formMessages", errors);
            logger.info("has validation errors in the first section");
            logger.info("errors found: " + errors.toString());
            setUpServlet();
            forwardPage(Page.CREATE_JOB_EXPORT);
        } else {
            logger.info("found no validation errors, continuing");
            StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
            DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
            UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
            CoreResources cr = new CoreResources();
            int datasetId = fp.getInt(DATASET_ID);
            String period = fp.getString(PERIOD);
            String email = fp.getString(EMAIL);
            String jobName = fp.getString(JOB_NAME);
            String jobDesc = fp.getString(JOB_DESC);
            Date startDateTime = fp.getDateTime(DATE_START_JOB);
            Integer exportFormatId = fp.getInt(FORMAT_ID);
            ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
            DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
            // set the job in motion
            String[] files = epBean.getFileName();
            String exportFileName;
            int fileSize = files.length;
            int cnt = 0;
            dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
            String[] exportFiles = epBean.getExportFileName();
            String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
            SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
            int i = 0;
            String[] temp = new String[exportFiles.length];
            //JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
            String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
            while (i < exportFiles.length) {
                temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
                i++;
            }
            epBean.setDoNotDelFiles(temp);
            epBean.setExportFileName(temp);
            XsltTriggerService xsltService = new XsltTriggerService();
            String generalFileDir = SQLInitServlet.getField("filePath");
            generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
            exportFileName = epBean.getExportFileName()[cnt];
            // need to set the dataset path here, tbh
            // next, can already run jobs, translations, and then add a message to be notified later
            //JN all the properties need to have the variables...
            String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
            String endFilePath = epBean.getFileLocation();
            endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
            //  exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
            if (epBean.getPostProcExportName() != null) {
                //String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
                String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
                epBean.setPostProcExportName(preProcExportPathName);
            }
            if (epBean.getPostProcLocation() != null) {
                String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
                epBean.setPostProcLocation(prePocLoc);
            }
            extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
            SimpleTrigger trigger = null;
            trigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
            generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", xsltService.getTriggerGroupNameForExportJobs());
            //Updating the original trigger with user given inputs
            trigger.getTriggerBuilder().withSchedule(simpleSchedule().withRepeatCount(64000).withIntervalInSeconds(new Integer(period).intValue()).withMisfireHandlingInstructionNextWithExistingCount()).startAt(startDateTime).forJob(jobName).withDescription(jobDesc);
            trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
            trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
            trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
            trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
            trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
            trigger.getJobDataMap().put("job_type", "exportJob");
            JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
            JobDetailFactoryBean.setGroup(xsltService.getTriggerGroupNameForExportJobs());
            JobDetailFactoryBean.setName(trigger.getKey().getName());
            JobDetailFactoryBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
            JobDetailFactoryBean.setJobDataMap(trigger.getJobDataMap());
            // need durability?
            JobDetailFactoryBean.setDurability(true);
            // set to the scheduler
            try {
                Date dateStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), trigger);
                logger.info("== found job date: " + dateStart.toString());
            // set a success message here
            } catch (SchedulerException se) {
                se.printStackTrace();
                setUpServlet();
                addPageMessage("Error creating Job.");
                forwardPage(Page.VIEW_JOB_SERVLET);
                return;
            }
            setUpServlet();
            addPageMessage("You have successfully created a new job: " + jobName + " which is now set to run at the time you specified.");
            forwardPage(Page.VIEW_JOB_SERVLET);
        }
    } else {
        forwardPage(Page.ADMIN_SYSTEM);
    // forward to form
    // should we even get to this part?
    }
}
Also used : SchedulerException(org.quartz.SchedulerException) FormProcessor(org.akaza.openclinica.control.form.FormProcessor) CoreResources(org.akaza.openclinica.dao.core.CoreResources) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) TriggerService(org.akaza.openclinica.web.job.TriggerService) ExtractUtils(org.akaza.openclinica.service.extract.ExtractUtils) UserAccountBean(org.akaza.openclinica.bean.login.UserAccountBean) ExtractPropertyBean(org.akaza.openclinica.bean.extract.ExtractPropertyBean) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) SimpleTrigger(org.quartz.SimpleTrigger) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO) SimpleDateFormat(java.text.SimpleDateFormat) JobDetailFactoryBean(org.springframework.scheduling.quartz.JobDetailFactoryBean)

Example 3 with ExtractUtils

use of org.akaza.openclinica.service.extract.ExtractUtils in project OpenClinica by OpenClinica.

the class ExtractController method processSubmit.

/**
     * process the page from whence you came, i.e. extract a dataset
     * @param id, the id of the extract properties bean, gained from Core Resources
     * @param datasetId, the id of the dataset, found through DatasetDAO
     * @param request, http request
     * @return model map, but more importantly, creates a quartz job which runs right away and generates all output there
     */
@RequestMapping(method = RequestMethod.GET)
public ModelMap processSubmit(@RequestParam("id") String id, @RequestParam("datasetId") String datasetId, HttpServletRequest request, HttpServletResponse response) {
    if (!mayProceed(request)) {
        try {
            response.sendRedirect(request.getContextPath() + "/MainMenu?message=authentication_failed");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }
    ModelMap map = new ModelMap();
    ResourceBundleProvider.updateLocale(LocaleResolver.getLocale(request));
    // String datasetId = (String)request.getAttribute("datasetId");
    // String id = (String)request.getAttribute("id");
    logger.debug("found both id " + id + " and dataset " + datasetId);
    ExtractUtils extractUtils = new ExtractUtils();
    // get extract id
    // get dataset id
    // if id is a number and dataset id is a number ...
    datasetDao = new DatasetDAO(dataSource);
    UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
    CoreResources cr = new CoreResources();
    ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(new Integer(id).intValue(), datasetId);
    DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
    // set the job in motion
    String[] files = epBean.getFileName();
    String exportFileName;
    int fileSize = files.length;
    int cnt = 0;
    SimpleTrigger simpleTrigger = null;
    //TODO: if files and export names size is not same... throw an error
    dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
    String[] exportFiles = epBean.getExportFileName();
    String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
    SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
    int i = 0;
    String[] temp = new String[exportFiles.length];
    //JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
    while (i < exportFiles.length) {
        temp[i] = resolveVars(exportFiles[i], dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        i++;
    }
    epBean.setDoNotDelFiles(temp);
    epBean.setExportFileName(temp);
    XsltTriggerService xsltService = new XsltTriggerService();
    // TODO get a user bean somehow?
    String generalFileDir = SQLInitServlet.getField("filePath");
    generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
    exportFileName = epBean.getExportFileName()[cnt];
    // need to set the dataset path here, tbh
    logger.debug("found odm xml file path " + generalFileDir);
    // next, can already run jobs, translations, and then add a message to be notified later
    //JN all the properties need to have the variables...
    String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
    String endFilePath = epBean.getFileLocation();
    endFilePath = getEndFilePath(endFilePath, dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
    //  exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
    if (epBean.getPostProcExportName() != null) {
        //String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
        String preProcExportPathName = resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        epBean.setPostProcExportName(preProcExportPathName);
    }
    if (epBean.getPostProcLocation() != null) {
        String prePocLoc = getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        epBean.setPostProcLocation(prePocLoc);
    }
    setAllProps(epBean, dsBean, sdfDir, extractUtils);
    // also need to add the status fields discussed w/ cc:
    // result code, user message, optional URL, archive message, log file message
    // asdf table: sort most recent at top
    logger.debug("found xslt file name " + xsltPath);
    // String xmlFilePath = generalFileDir + ODMXMLFileName;
    simpleTrigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
    generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", this.TRIGGER_GROUP_NAME);
    // System.out.println("just set locale: " + LocaleResolver.getLocale(request).getLanguage());
    cnt++;
    ApplicationContext context = null;
    try {
        context = (ApplicationContext) scheduler.getContext().get("applicationContext");
    } catch (SchedulerException e) {
        e.printStackTrace();
    }
    //WebApplicationContext context = ContextLoader.getCurrentWebApplicationContext();
    JobDetailFactoryBean jobDetailFactoryBean = context.getBean(JobDetailFactoryBean.class, simpleTrigger, this.TRIGGER_GROUP_NAME);
    try {
        Date dateStart = scheduler.scheduleJob(jobDetailFactoryBean.getObject(), simpleTrigger);
        logger.debug("== found job date: " + dateStart.toString());
    } catch (SchedulerException se) {
        se.printStackTrace();
    }
    request.setAttribute("datasetId", datasetId);
    // set the job name here in the user's session, so that we can ping the scheduler to pull it out later
    if (jobDetailFactoryBean != null)
        request.getSession().setAttribute("jobName", jobDetailFactoryBean.getObject().getKey().getName());
    if (simpleTrigger != null)
        request.getSession().setAttribute("groupName", this.TRIGGER_GROUP_NAME);
    request.getSession().setAttribute("datasetId", new Integer(dsBean.getId()));
    return map;
}
Also used : SchedulerException(org.quartz.SchedulerException) Date(java.util.Date) ModelMap(org.springframework.ui.ModelMap) CoreResources(org.akaza.openclinica.dao.core.CoreResources) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) SchedulerException(org.quartz.SchedulerException) Date(java.util.Date) WebApplicationContext(org.springframework.web.context.WebApplicationContext) ApplicationContext(org.springframework.context.ApplicationContext) ExtractUtils(org.akaza.openclinica.service.extract.ExtractUtils) UserAccountBean(org.akaza.openclinica.bean.login.UserAccountBean) ExtractPropertyBean(org.akaza.openclinica.bean.extract.ExtractPropertyBean) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) SimpleTrigger(org.quartz.SimpleTrigger) SimpleDateFormat(java.text.SimpleDateFormat) JobDetailFactoryBean(org.springframework.scheduling.quartz.JobDetailFactoryBean) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Aggregations

SimpleDateFormat (java.text.SimpleDateFormat)3 DatasetBean (org.akaza.openclinica.bean.extract.DatasetBean)3 ExtractPropertyBean (org.akaza.openclinica.bean.extract.ExtractPropertyBean)3 UserAccountBean (org.akaza.openclinica.bean.login.UserAccountBean)3 CoreResources (org.akaza.openclinica.dao.core.CoreResources)3 DatasetDAO (org.akaza.openclinica.dao.extract.DatasetDAO)3 ExtractUtils (org.akaza.openclinica.service.extract.ExtractUtils)3 XsltTriggerService (org.akaza.openclinica.service.extract.XsltTriggerService)3 JobDetailFactoryBean (org.springframework.scheduling.quartz.JobDetailFactoryBean)3 FormProcessor (org.akaza.openclinica.control.form.FormProcessor)2 StudyDAO (org.akaza.openclinica.dao.managestudy.StudyDAO)2 TriggerService (org.akaza.openclinica.web.job.TriggerService)2 SchedulerException (org.quartz.SchedulerException)2 SimpleTrigger (org.quartz.SimpleTrigger)2 Date (java.util.Date)1 StudyBean (org.akaza.openclinica.bean.managestudy.StudyBean)1 ApplicationContext (org.springframework.context.ApplicationContext)1 ModelMap (org.springframework.ui.ModelMap)1 RequestMapping (org.springframework.web.bind.annotation.RequestMapping)1 WebApplicationContext (org.springframework.web.context.WebApplicationContext)1