Search in sources :

Example 11 with JobDataMap

use of org.quartz.JobDataMap in project cachecloud by sohutv.

the class RedisSlowLogJob method action.

@Override
public void action(JobExecutionContext context) {
    try {
        SchedulerContext schedulerContext = context.getScheduler().getContext();
        ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get(APPLICATION_CONTEXT_KEY);
        RedisCenter redisCenter = (RedisCenter) applicationContext.getBean("redisCenter");
        JobDataMap dataMap = context.getMergedJobDataMap();
        String host = dataMap.getString(ConstUtils.HOST_KEY);
        int port = dataMap.getInt(ConstUtils.PORT_KEY);
        long appId = dataMap.getLong(ConstUtils.APP_KEY);
        Trigger trigger = context.getTrigger();
        long collectTime = ScheduleUtil.getCollectTime(trigger.getPreviousFireTime());
        redisCenter.collectRedisSlowLog(appId, collectTime, host, port);
    } catch (SchedulerException e) {
        logger.error(e.getMessage(), e);
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
    }
}
Also used : ApplicationContext(org.springframework.context.ApplicationContext) JobDataMap(org.quartz.JobDataMap) Trigger(org.quartz.Trigger) SchedulerException(org.quartz.SchedulerException) RedisCenter(com.sohu.cache.redis.RedisCenter) SchedulerContext(org.quartz.SchedulerContext) SchedulerException(org.quartz.SchedulerException)

Example 12 with JobDataMap

use of org.quartz.JobDataMap in project camel by apache.

the class QuartzHelper method updateJobDataMap.

/**
     * Adds the current CamelContext name and endpoint URI to the Job's jobData
     * map.
     * 
     * @param camelContext The currently active camelContext
     * @param jobDetail The job for which the jobData map shall be updated
     * @param endpointUri URI of the endpoint name, if any. May be {@code null}
     * @param usingFixedCamelContextName If it is true, jobDataMap uses the CamelContext name;
     *  if it is false, jobDataMap uses the CamelContext management name which could be changed during the deploy time
     */
public static void updateJobDataMap(CamelContext camelContext, JobDetail jobDetail, String endpointUri, boolean usingFixedCamelContextName) {
    // Store this camelContext name into the job data
    JobDataMap jobDataMap = jobDetail.getJobDataMap();
    String camelContextName = camelContext.getName();
    if (!usingFixedCamelContextName) {
        camelContextName = QuartzHelper.getQuartzContextName(camelContext);
    }
    LOG.debug("Adding camelContextName={}, endpointUri={} into job data map.", camelContextName, endpointUri);
    jobDataMap.put(QuartzConstants.QUARTZ_CAMEL_CONTEXT_NAME, camelContextName);
    jobDataMap.put(QuartzConstants.QUARTZ_ENDPOINT_URI, endpointUri);
}
Also used : JobDataMap(org.quartz.JobDataMap)

Example 13 with JobDataMap

use of org.quartz.JobDataMap in project camel by apache.

the class QuartzScheduledPollConsumerScheduler method doStart.

@Override
protected void doStart() throws Exception {
    ObjectHelper.notEmpty(cron, "cron", this);
    if (quartzScheduler == null) {
        // get the scheduler form the quartz component
        QuartzComponent quartz = getCamelContext().getComponent("quartz2", QuartzComponent.class);
        setQuartzScheduler(quartz.getScheduler());
    }
    JobDataMap map = new JobDataMap();
    // do not store task as its not serializable, if we have route id
    if (routeId != null) {
        map.put("routeId", routeId);
    } else {
        map.put("task", runnable);
    }
    map.put(QuartzConstants.QUARTZ_TRIGGER_TYPE, "cron");
    map.put(QuartzConstants.QUARTZ_TRIGGER_CRON_EXPRESSION, getCron());
    map.put(QuartzConstants.QUARTZ_TRIGGER_CRON_TIMEZONE, getTimeZone().getID());
    job = JobBuilder.newJob(QuartzScheduledPollConsumerJob.class).usingJobData(map).build();
    // store additional information on job such as camel context etc
    QuartzHelper.updateJobDataMap(getCamelContext(), job, null);
    String id = triggerId;
    if (id == null) {
        id = "trigger-" + getCamelContext().getUuidGenerator().generateUuid();
    }
    trigger = TriggerBuilder.newTrigger().withIdentity(id, triggerGroup).withSchedule(CronScheduleBuilder.cronSchedule(getCron()).inTimeZone(getTimeZone())).build();
    LOG.debug("Scheduling job: {} with trigger: {}", job, trigger.getKey());
    quartzScheduler.scheduleJob(job, trigger);
}
Also used : JobDataMap(org.quartz.JobDataMap) QuartzComponent(org.apache.camel.component.quartz2.QuartzComponent)

Example 14 with JobDataMap

use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.

the class XsltTransformJob method postErrorMessage.

private void postErrorMessage(String message, JobExecutionContext context) {
    String SCHEDULER = "schedulerFactoryBean";
    try {
        ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext");
        StdScheduler scheduler = (StdScheduler) appContext.getBean(SCHEDULER);
        JobDetail jobDetail = context.getJobDetail();
        JobDataMap dataMap = jobDetail.getJobDataMap();
        dataMap.put("failMessage", message);
        jobDetail.getJobBuilder().usingJobData(dataMap);
        // replace the job with the extra data
        scheduler.addJob(jobDetail, true);
    } catch (SchedulerException e) {
        throw new IllegalStateException("Error processing post error message", e);
    }
}
Also used : ApplicationContext(org.springframework.context.ApplicationContext) JobDetail(org.quartz.JobDetail) JobDataMap(org.quartz.JobDataMap) SchedulerException(org.quartz.SchedulerException) StdScheduler(org.quartz.impl.StdScheduler)

Example 15 with JobDataMap

use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.

the class XsltTransformJob method executeInternal.

@Override
protected void executeInternal(JobExecutionContext context) {
    logger.info("Job " + context.getJobDetail().getDescription() + " started.");
    initDependencies(context.getScheduler());
    // need to generate a Locale for emailing users with i18n
    // TODO make dynamic?
    Locale locale = new Locale("en-US");
    ResourceBundleProvider.updateLocale(locale);
    ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle();
    List<File> markForDelete = new LinkedList<File>();
    Boolean zipped = true;
    Boolean deleteOld = true;
    Boolean exceptions = false;
    JobDataMap dataMap = context.getMergedJobDataMap();
    String localeStr = dataMap.getString(LOCALE);
    String[] doNotDeleteUntilExtract = new String[4];
    int cnt = dataMap.getInt("count");
    DatasetBean datasetBean = null;
    if (localeStr != null) {
        locale = new Locale(localeStr);
        ResourceBundleProvider.updateLocale(locale);
        pageMessages = ResourceBundleProvider.getPageMessagesBundle();
    }
    // get the file information from the job
    String alertEmail = dataMap.getString(EMAIL);
    java.io.InputStream in = null;
    FileOutputStream endFileStream = null;
    UserAccountBean userBean = null;
    try {
        // init all fields from the data map
        int userAccountId = dataMap.getInt(USER_ID);
        int studyId = dataMap.getInt(STUDY_ID);
        String outputPath = dataMap.getString(POST_FILE_PATH);
        // get all user info, generate xml
        logger.debug("found output path: " + outputPath);
        String generalFileDir = dataMap.getString(XML_FILE_PATH);
        int dsId = dataMap.getInt(DATASET_ID);
        // JN: Change from earlier versions, cannot get static reference as
        // static references don't work. Reason being for example there could be
        // datasetId as a variable which is different for each dataset and
        // that needs to be loaded dynamically
        ExtractPropertyBean epBean = (ExtractPropertyBean) dataMap.get(EP_BEAN);
        File doNotDelDir = new File(generalFileDir);
        if (doNotDelDir.isDirectory()) {
            doNotDeleteUntilExtract = doNotDelDir.list();
        }
        zipped = epBean.getZipFormat();
        deleteOld = epBean.getDeleteOld();
        long sysTimeBegin = System.currentTimeMillis();
        userBean = (UserAccountBean) userAccountDao.findByPK(userAccountId);
        StudyBean currentStudy = (StudyBean) studyDao.findByPK(studyId);
        StudyBean parentStudy = (StudyBean) studyDao.findByPK(currentStudy.getParentStudyId());
        String successMsg = epBean.getSuccessMessage();
        String failureMsg = epBean.getFailureMessage();
        final long start = System.currentTimeMillis();
        datasetBean = (DatasetBean) datasetDao.findByPK(dsId);
        ExtractBean eb = generateFileService.generateExtractBean(datasetBean, currentStudy, parentStudy);
        // generate file directory for file service
        datasetBean.setName(datasetBean.getName().replaceAll(" ", "_"));
        logger.debug("--> job starting: ");
        HashMap<String, Integer> answerMap = odmFileCreation.createODMFile(epBean.getFormat(), sysTimeBegin, generalFileDir, datasetBean, currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", (Boolean) dataMap.get(ZIPPED), false, (Boolean) dataMap.get(DELETE_OLD), epBean.getOdmType(), userBean);
        // won't save a record of the XML to db
        // won't be a zipped file, so that we can submit it for
        // transformation
        // this will have to be toggled by the export data format? no, the
        // export file will have to be zipped/not zipped
        String ODMXMLFileName = "";
        int fId = 0;
        Iterator<Entry<String, Integer>> it = answerMap.entrySet().iterator();
        while (it.hasNext()) {
            JobTerminationMonitor.check();
            Entry<String, Integer> entry = it.next();
            String key = entry.getKey();
            Integer value = entry.getValue();
            // JN: Since there is a logic to
            ODMXMLFileName = key;
            // delete all the intermittent
            // files, this file could be a zip
            // file.
            Integer fileID = value;
            fId = fileID.intValue();
            logger.debug("found " + fId + " and " + ODMXMLFileName);
        }
        logger.info("Finished ODM generation of job " + context.getJobDetail().getDescription());
        // create dirs
        File output = new File(outputPath);
        if (!output.isDirectory()) {
            output.mkdirs();
        }
        int numXLS = epBean.getFileName().length;
        int fileCntr = 0;
        String xmlFilePath = new File(generalFileDir + ODMXMLFileName).toURI().toURL().toExternalForm();
        String endFile = null;
        File oldFilesPath = new File(generalFileDir);
        while (fileCntr < numXLS) {
            JobTerminationMonitor.check();
            String xsltPath = dataMap.getString(XSLT_PATH) + File.separator + epBean.getFileName()[fileCntr];
            in = new java.io.FileInputStream(xsltPath);
            Transformer transformer = transformerFactory.newTransformer(new StreamSource(in));
            endFile = outputPath + File.separator + epBean.getExportFileName()[fileCntr];
            endFileStream = new FileOutputStream(endFile);
            transformer.transform(new StreamSource(xmlFilePath), new StreamResult(endFileStream));
            // JN...CLOSE THE STREAM...HMMMM
            in.close();
            endFileStream.close();
            fileCntr++;
            JobTerminationMonitor.check();
        }
        if (oldFilesPath.isDirectory()) {
            markForDelete = Arrays.asList(oldFilesPath.listFiles());
        // logic to prevent deleting the file being created.
        }
        final double done = setFormat(new Double(System.currentTimeMillis() - start) / 1000);
        logger.info("--> job completed in " + done + " ms");
        // run post processing
        ProcessingFunction function = epBean.getPostProcessing();
        String subject = "";
        String jobName = dataMap.getString(XsltTriggerService.JOB_NAME);
        StringBuffer emailBuffer = new StringBuffer("");
        emailBuffer.append("<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>");
        emailBuffer.append("<P>Dataset: " + datasetBean.getName() + "</P>");
        emailBuffer.append("<P>Study: " + currentStudy.getName() + "</P>");
        if (function != null && function.getClass().equals(org.akaza.openclinica.bean.service.SqlProcessingFunction.class)) {
            String dbUrl = ((org.akaza.openclinica.bean.service.SqlProcessingFunction) function).getDatabaseUrl();
            int lastIndex = dbUrl.lastIndexOf('/');
            String schemaName = dbUrl.substring(lastIndex);
            int HostIndex = dbUrl.substring(0, lastIndex).indexOf("//");
            String Host = dbUrl.substring(HostIndex, lastIndex);
            emailBuffer.append("<P>Database: " + ((org.akaza.openclinica.bean.service.SqlProcessingFunction) function).getDatabaseType() + "</P>");
            emailBuffer.append("<P>Schema: " + schemaName.replace("/", "") + "</P>");
            emailBuffer.append("<P>Host: " + Host.replace("//", "") + "</P>");
        }
        emailBuffer.append("<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2_2") + "</p>");
        if (function != null) {
            function.setTransformFileName(outputPath + File.separator + dataMap.getString(POST_FILE_NAME));
            function.setODMXMLFileName(endFile);
            function.setXslFileName(dataMap.getString(XSL_FILE_PATH));
            function.setDeleteOld((Boolean) dataMap.get(POST_PROC_DELETE_OLD));
            function.setZip((Boolean) dataMap.get(POST_PROC_ZIP));
            function.setLocation(dataMap.getString(POST_PROC_LOCATION));
            function.setExportFileName(dataMap.getString(POST_PROC_EXPORT_NAME));
            File[] oldFiles = getOldFiles(outputPath, dataMap.getString(POST_PROC_LOCATION));
            function.setOldFiles(oldFiles);
            File[] intermediateFiles = getInterFiles(dataMap.getString(POST_FILE_PATH));
            ProcessingResultType message = function.run();
            // Delete these files only in case when there is no failure
            if (message.getCode().intValue() != 2) {
                deleteOldFiles(intermediateFiles);
            }
            final long done2 = System.currentTimeMillis() - start;
            logger.info("--> postprocessing completed in " + done2 + " ms, found result type " + message.getCode());
            logger.info("--> postprocessing completed in " + done2 + " ms, found result type " + message.getCode());
            if (!function.getClass().equals(org.akaza.openclinica.bean.service.SqlProcessingFunction.class)) {
                String archivedFile = dataMap.getString(POST_FILE_NAME) + "." + function.getFileType();
                // download the zip file
                if (function.isZip()) {
                    archivedFile = archivedFile + ".zip";
                }
                // post processing as well.
                if (function.getClass().equals(org.akaza.openclinica.bean.service.PdfProcessingFunction.class)) {
                    archivedFile = function.getArchivedFileName();
                }
                ArchivedDatasetFileBean fbFinal = generateFileRecord(archivedFile, outputPath, datasetBean, done, new File(outputPath + File.separator + archivedFile).length(), ExportFormatBean.PDFFILE, userAccountId);
                if (successMsg.contains("$linkURL")) {
                    successMsg = successMsg.replace("$linkURL", "<a href=\"" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + "\">" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + " </a>");
                }
                emailBuffer.append("<p>" + successMsg + "</p>");
                logger.debug("System time begining.." + sysTimeBegin);
                logger.debug("System time end.." + System.currentTimeMillis());
                double sysTimeEnd = setFormat((System.currentTimeMillis() - sysTimeBegin) / 1000);
                logger.debug("difference" + sysTimeEnd);
                if (fbFinal != null) {
                    fbFinal.setFileSize((int) bytesToKilo(new File(archivedFile).length()));
                    fbFinal.setRunTime(sysTimeEnd);
                }
            }
            // otherwise don't do it
            if (message.getCode().intValue() == 1) {
                if (jobName != null) {
                    subject = "Success: " + jobName;
                } else {
                    subject = "Success: " + datasetBean.getName();
                }
            } else if (message.getCode().intValue() == 2) {
                if (jobName != null) {
                    subject = "Failure: " + jobName;
                } else {
                    subject = "Failure: " + datasetBean.getName();
                }
                if (failureMsg != null && !failureMsg.isEmpty()) {
                    emailBuffer.append(failureMsg);
                }
                emailBuffer.append("<P>").append(message.getDescription());
                postErrorMessage(message.getDescription(), context);
            } else if (message.getCode().intValue() == 3) {
                if (jobName != null) {
                    subject = "Update: " + jobName;
                } else {
                    subject = "Update: " + datasetBean.getName();
                }
            }
        } else {
            // extract ran but no post-processing - we send an email with
            // success and url to link to
            // generate archived dataset file bean here, and use the id to
            // build the URL
            String archivedFilename = dataMap.getString(POST_FILE_NAME);
            // the zip file
            if (zipped) {
                archivedFilename = dataMap.getString(POST_FILE_NAME) + ".zip";
            }
            // delete old files now
            List<File> intermediateFiles = generateFileService.getOldFiles();
            String[] dontDelFiles = epBean.getDoNotDelFiles();
            //JN: The following is the code for zipping up the files, in case of more than one xsl being provided.
            if (dontDelFiles.length > 1 && zipped) {
                logger.debug("count =====" + cnt + "dontDelFiles length==---" + dontDelFiles.length);
                logger.debug("Entering this?" + cnt + "dontDelFiles" + dontDelFiles);
                String path = outputPath + File.separator;
                logger.debug("path = " + path);
                logger.debug("zipName?? = " + epBean.getZipName());
                String zipName = epBean.getZipName() == null || epBean.getZipName().isEmpty() ? endFile + ".zip" : path + epBean.getZipName() + ".zip";
                archivedFilename = new File(zipName).getName();
                zipAll(path, epBean.getDoNotDelFiles(), zipName);
                String[] tempArray = { archivedFilename };
                dontDelFiles = tempArray;
                endFile = archivedFilename;
            } else if (zipped) {
                markForDelete = zipxmls(markForDelete, endFile);
                endFile = endFile + ".zip";
                String[] temp = new String[dontDelFiles.length];
                int i = 0;
                while (i < dontDelFiles.length) {
                    temp[i] = dontDelFiles[i] + ".zip";
                    i++;
                }
                dontDelFiles = temp;
                // Actually deleting all the xml files which are produced
                // since its zipped
                FilenameFilter xmlFilter = new XMLFileFilter();
                File tempFile = new File(generalFileDir);
                deleteOldFiles(tempFile.listFiles(xmlFilter));
            }
            ArchivedDatasetFileBean fbFinal = generateFileRecord(archivedFilename, outputPath, datasetBean, done, new File(outputPath + File.separator + archivedFilename).length(), ExportFormatBean.TXTFILE, userAccountId);
            if (jobName != null) {
                subject = "Job Ran: " + jobName;
            } else {
                subject = "Job Ran: " + datasetBean.getName();
            }
            if (successMsg == null || successMsg.isEmpty()) {
                logger.info("email buffer??" + emailBuffer);
            } else {
                if (successMsg.contains("$linkURL")) {
                    successMsg = successMsg.replace("$linkURL", "<a href=\"" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + "\">" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + " </a>");
                }
                emailBuffer.append("<p>" + successMsg + "</p>");
            }
            if (deleteOld) {
                deleteIntermFiles(intermediateFiles, endFile, dontDelFiles);
                deleteIntermFiles(markForDelete, endFile, dontDelFiles);
            }
        }
        // email the message to the user
        emailBuffer.append("<p>" + pageMessages.getString("html_email_body_5") + "</p>");
        try {
            // @pgawade 19-April-2011 Log the event into audit_event table
            if (null != dataMap.get("job_type") && ((String) dataMap.get("job_type")).equalsIgnoreCase("exportJob")) {
                String extractName = (String) dataMap.get(XsltTriggerService.JOB_NAME);
                TriggerBean triggerBean = new TriggerBean();
                triggerBean.setDataset(datasetBean);
                triggerBean.setUserAccount(userBean);
                triggerBean.setFullName(extractName);
                String actionMsg = "You may access the " + (String) dataMap.get(XsltTriggerService.EXPORT_FORMAT) + " file by changing your study/site to " + currentStudy.getName() + " and selecting the Export Data icon for " + datasetBean.getName() + " dataset on the View Datasets page.";
                auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, actionMsg);
            }
            mailSender.sendEmail(alertEmail, EmailEngine.getAdminEmail(), subject, emailBuffer.toString(), true);
        } catch (OpenClinicaSystemException ose) {
            // Do Nothing, In the future we might want to have an email
            // status added to system.
            logger.info("exception sending mail: " + ose.getMessage());
            logger.error("exception sending mail: " + ose.getMessage());
        }
        logger.info("just sent email to " + alertEmail + ", from " + EmailEngine.getAdminEmail());
        if (successMsg == null) {
            successMsg = " ";
        }
        postSuccessMessage(successMsg, context);
    } catch (JobInterruptedException e) {
        logger.info("Job was cancelled by the user");
        exceptions = true;
    } catch (TransformerConfigurationException e) {
        sendErrorEmail(e.getMessage(), context, alertEmail);
        postErrorMessage(e.getMessage(), context);
        logger.error("Error executing extract", e);
        exceptions = true;
    } catch (FileNotFoundException e) {
        sendErrorEmail(e.getMessage(), context, alertEmail);
        postErrorMessage(e.getMessage(), context);
        logger.error("Error executing extract", e);
        exceptions = true;
    } catch (TransformerFactoryConfigurationError e) {
        sendErrorEmail(e.getMessage(), context, alertEmail);
        postErrorMessage(e.getMessage(), context);
        logger.error("Error executing extract", e);
        exceptions = true;
    } catch (TransformerException e) {
        sendErrorEmail(e.getMessage(), context, alertEmail);
        postErrorMessage(e.getMessage(), context);
        logger.error("Error executing extract", e);
        exceptions = true;
    } catch (Exception ee) {
        sendErrorEmail(ee.getMessage(), context, alertEmail);
        postErrorMessage(ee.getMessage(), context);
        logger.error("Error executing extract", ee);
        exceptions = true;
        if (null != dataMap.get("job_type") && ((String) dataMap.get("job_type")).equalsIgnoreCase("exportJob")) {
            TriggerBean triggerBean = new TriggerBean();
            triggerBean.setUserAccount(userBean);
            triggerBean.setFullName((String) dataMap.get(XsltTriggerService.JOB_NAME));
            auditEventDAO.createRowForExtractDataJobFailure(triggerBean);
        }
    } finally {
        if (in != null)
            try {
                in.close();
            } catch (IOException e) {
                logger.error("Error executing extract", e);
            }
        if (endFileStream != null)
            try {
                endFileStream.close();
            } catch (IOException e) {
                logger.error("Error executing extract", e);
            }
        if (exceptions) {
            logger.debug("EXCEPTIONS... EVEN TEHN DELETING OFF OLD FILES");
            String generalFileDir = dataMap.getString(XML_FILE_PATH);
            File oldFilesPath = new File(generalFileDir);
            if (oldFilesPath.isDirectory()) {
                markForDelete = Arrays.asList(oldFilesPath.listFiles());
            }
            logger.debug("deleting the old files reference from archive dataset");
            if (deleteOld) {
                deleteIntermFiles(markForDelete, "", doNotDeleteUntilExtract);
            }
        }
        if (datasetBean != null)
            resetArchiveDataset(datasetBean.getId());
        logger.info("Job " + context.getJobDetail().getDescription() + " finished.");
    }
}
Also used : Locale(java.util.Locale) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) FileNotFoundException(java.io.FileNotFoundException) ExtractPropertyBean(org.akaza.openclinica.bean.extract.ExtractPropertyBean) JobDataMap(org.quartz.JobDataMap) TriggerBean(org.akaza.openclinica.bean.admin.TriggerBean) StreamResult(javax.xml.transform.stream.StreamResult) ProcessingFunction(org.akaza.openclinica.bean.service.ProcessingFunction) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) OpenClinicaSystemException(org.akaza.openclinica.exception.OpenClinicaSystemException) XMLFileFilter(org.akaza.openclinica.core.util.XMLFileFilter) LinkedList(java.util.LinkedList) ExtractBean(org.akaza.openclinica.bean.extract.ExtractBean) FileOutputStream(java.io.FileOutputStream) ResourceBundle(java.util.ResourceBundle) File(java.io.File) Transformer(javax.xml.transform.Transformer) TransformerConfigurationException(javax.xml.transform.TransformerConfigurationException) FilenameFilter(java.io.FilenameFilter) ZipEntry(java.util.zip.ZipEntry) Entry(java.util.Map.Entry) UserAccountBean(org.akaza.openclinica.bean.login.UserAccountBean) TransformerException(javax.xml.transform.TransformerException) ArchivedDatasetFileBean(org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean) TransformerFactoryConfigurationError(javax.xml.transform.TransformerFactoryConfigurationError) StreamSource(javax.xml.transform.stream.StreamSource) FileInputStream(java.io.FileInputStream) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) OpenClinicaSystemException(org.akaza.openclinica.exception.OpenClinicaSystemException) TransformerException(javax.xml.transform.TransformerException) SchedulerException(org.quartz.SchedulerException) TransformerConfigurationException(javax.xml.transform.TransformerConfigurationException) IOException(java.io.IOException) ProcessingResultType(org.akaza.openclinica.bean.service.ProcessingResultType)

Aggregations

JobDataMap (org.quartz.JobDataMap)56 Trigger (org.quartz.Trigger)21 SchedulerException (org.quartz.SchedulerException)20 JobDetail (org.quartz.JobDetail)18 TriggerBuilder.newTrigger (org.quartz.TriggerBuilder.newTrigger)11 Scheduler (org.quartz.Scheduler)10 JobExecutionException (org.quartz.JobExecutionException)9 ApplicationContext (org.springframework.context.ApplicationContext)9 ArrayList (java.util.ArrayList)6 Date (java.util.Date)6 HashMap (java.util.HashMap)6 TriggerBean (org.akaza.openclinica.bean.admin.TriggerBean)6 Command (org.openhab.core.types.Command)5 SchedulerContext (org.quartz.SchedulerContext)5 SimpleTrigger (org.quartz.SimpleTrigger)5 IOException (java.io.IOException)4 InetSocketAddress (java.net.InetSocketAddress)4 SocketChannel (java.nio.channels.SocketChannel)4 Locale (java.util.Locale)4 DatasetBean (org.akaza.openclinica.bean.extract.DatasetBean)4