Search in sources :

Example 1 with ExtractBean

use of org.akaza.openclinica.bean.extract.ExtractBean in project OpenClinica by OpenClinica.

the class XsltTransformJob method executeInternal.

@Override
protected void executeInternal(JobExecutionContext context) {
    logger.info("Job " + context.getJobDetail().getDescription() + " started.");
    initDependencies(context.getScheduler());
    // need to generate a Locale for emailing users with i18n
    // TODO make dynamic?
    Locale locale = new Locale("en-US");
    ResourceBundleProvider.updateLocale(locale);
    ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle();
    List<File> markForDelete = new LinkedList<File>();
    Boolean zipped = true;
    Boolean deleteOld = true;
    Boolean exceptions = false;
    JobDataMap dataMap = context.getMergedJobDataMap();
    String localeStr = dataMap.getString(LOCALE);
    String[] doNotDeleteUntilExtract = new String[4];
    int cnt = dataMap.getInt("count");
    DatasetBean datasetBean = null;
    if (localeStr != null) {
        locale = new Locale(localeStr);
        ResourceBundleProvider.updateLocale(locale);
        pageMessages = ResourceBundleProvider.getPageMessagesBundle();
    }
    // get the file information from the job
    String alertEmail = dataMap.getString(EMAIL);
    java.io.InputStream in = null;
    FileOutputStream endFileStream = null;
    UserAccountBean userBean = null;
    try {
        // init all fields from the data map
        int userAccountId = dataMap.getInt(USER_ID);
        int studyId = dataMap.getInt(STUDY_ID);
        String outputPath = dataMap.getString(POST_FILE_PATH);
        // get all user info, generate xml
        logger.debug("found output path: " + outputPath);
        String generalFileDir = dataMap.getString(XML_FILE_PATH);
        int dsId = dataMap.getInt(DATASET_ID);
        // JN: Change from earlier versions, cannot get static reference as
        // static references don't work. Reason being for example there could be
        // datasetId as a variable which is different for each dataset and
        // that needs to be loaded dynamically
        ExtractPropertyBean epBean = (ExtractPropertyBean) dataMap.get(EP_BEAN);
        File doNotDelDir = new File(generalFileDir);
        if (doNotDelDir.isDirectory()) {
            doNotDeleteUntilExtract = doNotDelDir.list();
        }
        zipped = epBean.getZipFormat();
        deleteOld = epBean.getDeleteOld();
        long sysTimeBegin = System.currentTimeMillis();
        userBean = (UserAccountBean) userAccountDao.findByPK(userAccountId);
        StudyBean currentStudy = (StudyBean) studyDao.findByPK(studyId);
        StudyBean parentStudy = (StudyBean) studyDao.findByPK(currentStudy.getParentStudyId());
        String successMsg = epBean.getSuccessMessage();
        String failureMsg = epBean.getFailureMessage();
        final long start = System.currentTimeMillis();
        datasetBean = (DatasetBean) datasetDao.findByPK(dsId);
        ExtractBean eb = generateFileService.generateExtractBean(datasetBean, currentStudy, parentStudy);
        // generate file directory for file service
        datasetBean.setName(datasetBean.getName().replaceAll(" ", "_"));
        logger.debug("--> job starting: ");
        HashMap<String, Integer> answerMap = odmFileCreation.createODMFile(epBean.getFormat(), sysTimeBegin, generalFileDir, datasetBean, currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", (Boolean) dataMap.get(ZIPPED), false, (Boolean) dataMap.get(DELETE_OLD), epBean.getOdmType(), userBean);
        // won't save a record of the XML to db
        // won't be a zipped file, so that we can submit it for
        // transformation
        // this will have to be toggled by the export data format? no, the
        // export file will have to be zipped/not zipped
        String ODMXMLFileName = "";
        int fId = 0;
        Iterator<Entry<String, Integer>> it = answerMap.entrySet().iterator();
        while (it.hasNext()) {
            JobTerminationMonitor.check();
            Entry<String, Integer> entry = it.next();
            String key = entry.getKey();
            Integer value = entry.getValue();
            // JN: Since there is a logic to
            ODMXMLFileName = key;
            // delete all the intermittent
            // files, this file could be a zip
            // file.
            Integer fileID = value;
            fId = fileID.intValue();
            logger.debug("found " + fId + " and " + ODMXMLFileName);
        }
        logger.info("Finished ODM generation of job " + context.getJobDetail().getDescription());
        // create dirs
        File output = new File(outputPath);
        if (!output.isDirectory()) {
            output.mkdirs();
        }
        int numXLS = epBean.getFileName().length;
        int fileCntr = 0;
        String xmlFilePath = new File(generalFileDir + ODMXMLFileName).toURI().toURL().toExternalForm();
        String endFile = null;
        File oldFilesPath = new File(generalFileDir);
        while (fileCntr < numXLS) {
            JobTerminationMonitor.check();
            String xsltPath = dataMap.getString(XSLT_PATH) + File.separator + epBean.getFileName()[fileCntr];
            in = new java.io.FileInputStream(xsltPath);
            Transformer transformer = transformerFactory.newTransformer(new StreamSource(in));
            endFile = outputPath + File.separator + epBean.getExportFileName()[fileCntr];
            endFileStream = new FileOutputStream(endFile);
            transformer.transform(new StreamSource(xmlFilePath), new StreamResult(endFileStream));
            // JN...CLOSE THE STREAM...HMMMM
            in.close();
            endFileStream.close();
            fileCntr++;
            JobTerminationMonitor.check();
        }
        if (oldFilesPath.isDirectory()) {
            markForDelete = Arrays.asList(oldFilesPath.listFiles());
        // logic to prevent deleting the file being created.
        }
        final double done = setFormat(new Double(System.currentTimeMillis() - start) / 1000);
        logger.info("--> job completed in " + done + " ms");
        // run post processing
        ProcessingFunction function = epBean.getPostProcessing();
        String subject = "";
        String jobName = dataMap.getString(XsltTriggerService.JOB_NAME);
        StringBuffer emailBuffer = new StringBuffer("");
        emailBuffer.append("<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>");
        emailBuffer.append("<P>Dataset: " + datasetBean.getName() + "</P>");
        emailBuffer.append("<P>Study: " + currentStudy.getName() + "</P>");
        if (function != null && function.getClass().equals(org.akaza.openclinica.bean.service.SqlProcessingFunction.class)) {
            String dbUrl = ((org.akaza.openclinica.bean.service.SqlProcessingFunction) function).getDatabaseUrl();
            int lastIndex = dbUrl.lastIndexOf('/');
            String schemaName = dbUrl.substring(lastIndex);
            int HostIndex = dbUrl.substring(0, lastIndex).indexOf("//");
            String Host = dbUrl.substring(HostIndex, lastIndex);
            emailBuffer.append("<P>Database: " + ((org.akaza.openclinica.bean.service.SqlProcessingFunction) function).getDatabaseType() + "</P>");
            emailBuffer.append("<P>Schema: " + schemaName.replace("/", "") + "</P>");
            emailBuffer.append("<P>Host: " + Host.replace("//", "") + "</P>");
        }
        emailBuffer.append("<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2_2") + "</p>");
        if (function != null) {
            function.setTransformFileName(outputPath + File.separator + dataMap.getString(POST_FILE_NAME));
            function.setODMXMLFileName(endFile);
            function.setXslFileName(dataMap.getString(XSL_FILE_PATH));
            function.setDeleteOld((Boolean) dataMap.get(POST_PROC_DELETE_OLD));
            function.setZip((Boolean) dataMap.get(POST_PROC_ZIP));
            function.setLocation(dataMap.getString(POST_PROC_LOCATION));
            function.setExportFileName(dataMap.getString(POST_PROC_EXPORT_NAME));
            File[] oldFiles = getOldFiles(outputPath, dataMap.getString(POST_PROC_LOCATION));
            function.setOldFiles(oldFiles);
            File[] intermediateFiles = getInterFiles(dataMap.getString(POST_FILE_PATH));
            ProcessingResultType message = function.run();
            // Delete these files only in case when there is no failure
            if (message.getCode().intValue() != 2) {
                deleteOldFiles(intermediateFiles);
            }
            final long done2 = System.currentTimeMillis() - start;
            logger.info("--> postprocessing completed in " + done2 + " ms, found result type " + message.getCode());
            logger.info("--> postprocessing completed in " + done2 + " ms, found result type " + message.getCode());
            if (!function.getClass().equals(org.akaza.openclinica.bean.service.SqlProcessingFunction.class)) {
                String archivedFile = dataMap.getString(POST_FILE_NAME) + "." + function.getFileType();
                // download the zip file
                if (function.isZip()) {
                    archivedFile = archivedFile + ".zip";
                }
                // post processing as well.
                if (function.getClass().equals(org.akaza.openclinica.bean.service.PdfProcessingFunction.class)) {
                    archivedFile = function.getArchivedFileName();
                }
                ArchivedDatasetFileBean fbFinal = generateFileRecord(archivedFile, outputPath, datasetBean, done, new File(outputPath + File.separator + archivedFile).length(), ExportFormatBean.PDFFILE, userAccountId);
                if (successMsg.contains("$linkURL")) {
                    successMsg = successMsg.replace("$linkURL", "<a href=\"" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + "\">" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + " </a>");
                }
                emailBuffer.append("<p>" + successMsg + "</p>");
                logger.debug("System time begining.." + sysTimeBegin);
                logger.debug("System time end.." + System.currentTimeMillis());
                double sysTimeEnd = setFormat((System.currentTimeMillis() - sysTimeBegin) / 1000);
                logger.debug("difference" + sysTimeEnd);
                if (fbFinal != null) {
                    fbFinal.setFileSize((int) bytesToKilo(new File(archivedFile).length()));
                    fbFinal.setRunTime(sysTimeEnd);
                }
            }
            // otherwise don't do it
            if (message.getCode().intValue() == 1) {
                if (jobName != null) {
                    subject = "Success: " + jobName;
                } else {
                    subject = "Success: " + datasetBean.getName();
                }
            } else if (message.getCode().intValue() == 2) {
                if (jobName != null) {
                    subject = "Failure: " + jobName;
                } else {
                    subject = "Failure: " + datasetBean.getName();
                }
                if (failureMsg != null && !failureMsg.isEmpty()) {
                    emailBuffer.append(failureMsg);
                }
                emailBuffer.append("<P>").append(message.getDescription());
                postErrorMessage(message.getDescription(), context);
            } else if (message.getCode().intValue() == 3) {
                if (jobName != null) {
                    subject = "Update: " + jobName;
                } else {
                    subject = "Update: " + datasetBean.getName();
                }
            }
        } else {
            // extract ran but no post-processing - we send an email with
            // success and url to link to
            // generate archived dataset file bean here, and use the id to
            // build the URL
            String archivedFilename = dataMap.getString(POST_FILE_NAME);
            // the zip file
            if (zipped) {
                archivedFilename = dataMap.getString(POST_FILE_NAME) + ".zip";
            }
            // delete old files now
            List<File> intermediateFiles = generateFileService.getOldFiles();
            String[] dontDelFiles = epBean.getDoNotDelFiles();
            //JN: The following is the code for zipping up the files, in case of more than one xsl being provided.
            if (dontDelFiles.length > 1 && zipped) {
                logger.debug("count =====" + cnt + "dontDelFiles length==---" + dontDelFiles.length);
                logger.debug("Entering this?" + cnt + "dontDelFiles" + dontDelFiles);
                String path = outputPath + File.separator;
                logger.debug("path = " + path);
                logger.debug("zipName?? = " + epBean.getZipName());
                String zipName = epBean.getZipName() == null || epBean.getZipName().isEmpty() ? endFile + ".zip" : path + epBean.getZipName() + ".zip";
                archivedFilename = new File(zipName).getName();
                zipAll(path, epBean.getDoNotDelFiles(), zipName);
                String[] tempArray = { archivedFilename };
                dontDelFiles = tempArray;
                endFile = archivedFilename;
            } else if (zipped) {
                markForDelete = zipxmls(markForDelete, endFile);
                endFile = endFile + ".zip";
                String[] temp = new String[dontDelFiles.length];
                int i = 0;
                while (i < dontDelFiles.length) {
                    temp[i] = dontDelFiles[i] + ".zip";
                    i++;
                }
                dontDelFiles = temp;
                // Actually deleting all the xml files which are produced
                // since its zipped
                FilenameFilter xmlFilter = new XMLFileFilter();
                File tempFile = new File(generalFileDir);
                deleteOldFiles(tempFile.listFiles(xmlFilter));
            }
            ArchivedDatasetFileBean fbFinal = generateFileRecord(archivedFilename, outputPath, datasetBean, done, new File(outputPath + File.separator + archivedFilename).length(), ExportFormatBean.TXTFILE, userAccountId);
            if (jobName != null) {
                subject = "Job Ran: " + jobName;
            } else {
                subject = "Job Ran: " + datasetBean.getName();
            }
            if (successMsg == null || successMsg.isEmpty()) {
                logger.info("email buffer??" + emailBuffer);
            } else {
                if (successMsg.contains("$linkURL")) {
                    successMsg = successMsg.replace("$linkURL", "<a href=\"" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + "\">" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + " </a>");
                }
                emailBuffer.append("<p>" + successMsg + "</p>");
            }
            if (deleteOld) {
                deleteIntermFiles(intermediateFiles, endFile, dontDelFiles);
                deleteIntermFiles(markForDelete, endFile, dontDelFiles);
            }
        }
        // email the message to the user
        emailBuffer.append("<p>" + pageMessages.getString("html_email_body_5") + "</p>");
        try {
            // @pgawade 19-April-2011 Log the event into audit_event table
            if (null != dataMap.get("job_type") && ((String) dataMap.get("job_type")).equalsIgnoreCase("exportJob")) {
                String extractName = (String) dataMap.get(XsltTriggerService.JOB_NAME);
                TriggerBean triggerBean = new TriggerBean();
                triggerBean.setDataset(datasetBean);
                triggerBean.setUserAccount(userBean);
                triggerBean.setFullName(extractName);
                String actionMsg = "You may access the " + (String) dataMap.get(XsltTriggerService.EXPORT_FORMAT) + " file by changing your study/site to " + currentStudy.getName() + " and selecting the Export Data icon for " + datasetBean.getName() + " dataset on the View Datasets page.";
                auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, actionMsg);
            }
            mailSender.sendEmail(alertEmail, EmailEngine.getAdminEmail(), subject, emailBuffer.toString(), true);
        } catch (OpenClinicaSystemException ose) {
            // Do Nothing, In the future we might want to have an email
            // status added to system.
            logger.info("exception sending mail: " + ose.getMessage());
            logger.error("exception sending mail: " + ose.getMessage());
        }
        logger.info("just sent email to " + alertEmail + ", from " + EmailEngine.getAdminEmail());
        if (successMsg == null) {
            successMsg = " ";
        }
        postSuccessMessage(successMsg, context);
    } catch (JobInterruptedException e) {
        logger.info("Job was cancelled by the user");
        exceptions = true;
    } catch (TransformerConfigurationException e) {
        sendErrorEmail(e.getMessage(), context, alertEmail);
        postErrorMessage(e.getMessage(), context);
        logger.error("Error executing extract", e);
        exceptions = true;
    } catch (FileNotFoundException e) {
        sendErrorEmail(e.getMessage(), context, alertEmail);
        postErrorMessage(e.getMessage(), context);
        logger.error("Error executing extract", e);
        exceptions = true;
    } catch (TransformerFactoryConfigurationError e) {
        sendErrorEmail(e.getMessage(), context, alertEmail);
        postErrorMessage(e.getMessage(), context);
        logger.error("Error executing extract", e);
        exceptions = true;
    } catch (TransformerException e) {
        sendErrorEmail(e.getMessage(), context, alertEmail);
        postErrorMessage(e.getMessage(), context);
        logger.error("Error executing extract", e);
        exceptions = true;
    } catch (Exception ee) {
        sendErrorEmail(ee.getMessage(), context, alertEmail);
        postErrorMessage(ee.getMessage(), context);
        logger.error("Error executing extract", ee);
        exceptions = true;
        if (null != dataMap.get("job_type") && ((String) dataMap.get("job_type")).equalsIgnoreCase("exportJob")) {
            TriggerBean triggerBean = new TriggerBean();
            triggerBean.setUserAccount(userBean);
            triggerBean.setFullName((String) dataMap.get(XsltTriggerService.JOB_NAME));
            auditEventDAO.createRowForExtractDataJobFailure(triggerBean);
        }
    } finally {
        if (in != null)
            try {
                in.close();
            } catch (IOException e) {
                logger.error("Error executing extract", e);
            }
        if (endFileStream != null)
            try {
                endFileStream.close();
            } catch (IOException e) {
                logger.error("Error executing extract", e);
            }
        if (exceptions) {
            logger.debug("EXCEPTIONS... EVEN TEHN DELETING OFF OLD FILES");
            String generalFileDir = dataMap.getString(XML_FILE_PATH);
            File oldFilesPath = new File(generalFileDir);
            if (oldFilesPath.isDirectory()) {
                markForDelete = Arrays.asList(oldFilesPath.listFiles());
            }
            logger.debug("deleting the old files reference from archive dataset");
            if (deleteOld) {
                deleteIntermFiles(markForDelete, "", doNotDeleteUntilExtract);
            }
        }
        if (datasetBean != null)
            resetArchiveDataset(datasetBean.getId());
        logger.info("Job " + context.getJobDetail().getDescription() + " finished.");
    }
}
Also used : Locale(java.util.Locale) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) FileNotFoundException(java.io.FileNotFoundException) ExtractPropertyBean(org.akaza.openclinica.bean.extract.ExtractPropertyBean) JobDataMap(org.quartz.JobDataMap) TriggerBean(org.akaza.openclinica.bean.admin.TriggerBean) StreamResult(javax.xml.transform.stream.StreamResult) ProcessingFunction(org.akaza.openclinica.bean.service.ProcessingFunction) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) OpenClinicaSystemException(org.akaza.openclinica.exception.OpenClinicaSystemException) XMLFileFilter(org.akaza.openclinica.core.util.XMLFileFilter) LinkedList(java.util.LinkedList) ExtractBean(org.akaza.openclinica.bean.extract.ExtractBean) FileOutputStream(java.io.FileOutputStream) ResourceBundle(java.util.ResourceBundle) File(java.io.File) Transformer(javax.xml.transform.Transformer) TransformerConfigurationException(javax.xml.transform.TransformerConfigurationException) FilenameFilter(java.io.FilenameFilter) ZipEntry(java.util.zip.ZipEntry) Entry(java.util.Map.Entry) UserAccountBean(org.akaza.openclinica.bean.login.UserAccountBean) TransformerException(javax.xml.transform.TransformerException) ArchivedDatasetFileBean(org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean) TransformerFactoryConfigurationError(javax.xml.transform.TransformerFactoryConfigurationError) StreamSource(javax.xml.transform.stream.StreamSource) FileInputStream(java.io.FileInputStream) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) OpenClinicaSystemException(org.akaza.openclinica.exception.OpenClinicaSystemException) TransformerException(javax.xml.transform.TransformerException) SchedulerException(org.quartz.SchedulerException) TransformerConfigurationException(javax.xml.transform.TransformerConfigurationException) IOException(java.io.IOException) ProcessingResultType(org.akaza.openclinica.bean.service.ProcessingResultType)

Example 2 with ExtractBean

use of org.akaza.openclinica.bean.extract.ExtractBean in project OpenClinica by OpenClinica.

the class GenerateExtractFileService method generateExtractBean.

public ExtractBean generateExtractBean(DatasetBean dsetBean, StudyBean currentStudy, StudyBean parentStudy) {
    ExtractBean eb = new ExtractBean(ds);
    eb.setDataset(dsetBean);
    eb.setShowUniqueId(CoreResources.getField("show_unique_id"));
    eb.setStudy(currentStudy);
    eb.setParentStudy(parentStudy);
    eb.setDateCreated(new java.util.Date());
    return eb;
}
Also used : ExtractBean(org.akaza.openclinica.bean.extract.ExtractBean) Date(java.util.Date)

Example 3 with ExtractBean

use of org.akaza.openclinica.bean.extract.ExtractBean in project OpenClinica by OpenClinica.

the class ExportDatasetServlet method processRequest.

@Override
public void processRequest() throws Exception {
    DatasetDAO dsdao = new DatasetDAO(sm.getDataSource());
    ArchivedDatasetFileDAO asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
    FormProcessor fp = new FormProcessor(request);
    GenerateExtractFileService generateFileService = new GenerateExtractFileService(sm.getDataSource(), (CoreResources) SpringServletAccess.getApplicationContext(context).getBean("coreResources"), (RuleSetRuleDao) SpringServletAccess.getApplicationContext(context).getBean("ruleSetRuleDao"));
    String action = fp.getString("action");
    int datasetId = fp.getInt("datasetId");
    int adfId = fp.getInt("adfId");
    if (datasetId == 0) {
        try {
            DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
            datasetId = dsb.getId();
            logger.info("dataset id was zero, trying session: " + datasetId);
        } catch (NullPointerException e) {
            e.printStackTrace();
            logger.info("tripped over null pointer exception");
        }
    }
    DatasetBean db = (DatasetBean) dsdao.findByPK(datasetId);
    StudyDAO sdao = new StudyDAO(sm.getDataSource());
    StudyBean study = (StudyBean) sdao.findByPK(db.getStudyId());
    checkRoleByUserAndStudy(ub, study.getParentStudyId(), study.getId());
    //Checks if the study is current study or child of current study
    if (study.getId() != currentStudy.getId() && study.getParentStudyId() != currentStudy.getId()) {
        addPageMessage(respage.getString("no_have_correct_privilege_current_study") + " " + respage.getString("change_active_study_or_contact"));
        forwardPage(Page.MENU_SERVLET);
        return;
    }
    /**
         * @vbc 08/06/2008 NEW EXTRACT DATA IMPLEMENTATION get study_id and
         *      parentstudy_id int currentstudyid = currentStudy.getId(); int
         *      parentstudy = currentStudy.getParentStudyId(); if (parentstudy >
         *      0) { // is OK } else { // same parentstudy = currentstudyid; } //
         */
    int currentstudyid = currentStudy.getId();
    // YW 11-09-2008 << modified logic here.
    int parentstudy = currentstudyid;
    // YW 11-09-2008 >>
    StudyBean parentStudy = new StudyBean();
    if (currentStudy.getParentStudyId() > 0) {
        //StudyDAO sdao = new StudyDAO(sm.getDataSource());
        parentStudy = (StudyBean) sdao.findByPK(currentStudy.getParentStudyId());
    }
    ExtractBean eb = generateFileService.generateExtractBean(db, currentStudy, parentStudy);
    if (StringUtil.isBlank(action)) {
        loadList(db, asdfdao, datasetId, fp, eb);
        forwardPage(Page.EXPORT_DATASETS);
    } else if ("delete".equalsIgnoreCase(action) && adfId > 0) {
        boolean success = false;
        ArchivedDatasetFileBean adfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(adfId);
        File file = new File(adfBean.getFileReference());
        if (!file.canWrite()) {
            addPageMessage(respage.getString("write_protected"));
        } else {
            success = file.delete();
            if (success) {
                asdfdao.deleteArchiveDataset(adfBean);
                addPageMessage(respage.getString("file_removed"));
            } else {
                addPageMessage(respage.getString("error_removing_file"));
            }
        }
        loadList(db, asdfdao, datasetId, fp, eb);
        forwardPage(Page.EXPORT_DATASETS);
    } else {
        logger.info("**** found action ****: " + action);
        String generateReport = "";
        // generate file, and show screen export
        // String generalFileDir = DATASET_DIR + db.getId() +
        // File.separator;
        // change this up, so that we don't overwrite anything
        String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
        SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
        String generalFileDir = DATASET_DIR + db.getId() + File.separator + sdfDir.format(new java.util.Date());
        String fileName = "";
        db.setName(db.getName().replaceAll(" ", "_"));
        Page finalTarget = Page.GENERATE_DATASET;
        finalTarget = Page.EXPORT_DATA_CUSTOM;
        // now display report according to format specified
        // TODO revise final target to set to fileReference????
        long sysTimeBegin = System.currentTimeMillis();
        int fId = 0;
        if ("sas".equalsIgnoreCase(action)) {
            // generateReport =
            // dsdao.generateDataset(db,
            // ExtractBean.SAS_FORMAT,
            // currentStudy,
            // parentStudy);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            String SASFileName = db.getName() + "_sas.sas";
            // logger.info("found data set: "+generateReport);
            generateFileService.createFile(SASFileName, generalFileDir, generateReport, db, sysTimeEnd, ExportFormatBean.TXTFILE, true, ub);
            logger.info("created sas file");
            request.setAttribute("generate", generalFileDir + SASFileName);
            finalTarget.setFileName(generalFileDir + SASFileName);
            fileName = SASFileName;
        // won't work since page creator is private
        } else if ("odm".equalsIgnoreCase(action)) {
            String odmVersion = fp.getString("odmVersion");
            String ODMXMLFileName = "";
            // DRY
            // HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "");
            HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", true, true, true, null, ub);
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                ODMXMLFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            fileName = ODMXMLFileName;
            request.setAttribute("generate", generalFileDir + ODMXMLFileName);
            logger.debug("+++ set the following: " + generalFileDir + ODMXMLFileName);
            // send a link with the SQL file? put the generated SQL file with the dataset?
            if (fp.getString("xalan") != null) {
                XalanTriggerService xts = new XalanTriggerService();
                String propertiesPath = SQLInitServlet.getField("filePath");
                // the trick there, we need to open up the zipped file and get at the XML
                openZipFile(generalFileDir + ODMXMLFileName + ".zip");
                // need to find out how to copy this xml file from /bin to the generalFileDir
                SimpleTrigger simpleTrigger = xts.generateXalanTrigger(propertiesPath + File.separator + "ODMReportStylesheet.xsl", ODMXMLFileName, generalFileDir + "output.sql", db.getId());
                scheduler = getScheduler();
                JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
                JobDetailFactoryBean.setGroup(xts.TRIGGER_GROUP_NAME);
                JobDetailFactoryBean.setName(simpleTrigger.getKey().getName());
                JobDetailFactoryBean.setJobClass(org.akaza.openclinica.web.job.XalanStatefulJob.class);
                JobDetailFactoryBean.setJobDataMap(simpleTrigger.getJobDataMap());
                // need durability?
                JobDetailFactoryBean.setDurability(true);
                try {
                    Date dateStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), simpleTrigger);
                    logger.info("== found job date: " + dateStart.toString());
                } catch (SchedulerException se) {
                    se.printStackTrace();
                }
            }
        } else if ("txt".equalsIgnoreCase(action)) {
            // generateReport =
            // dsdao.generateDataset(db,
            // ExtractBean.TXT_FORMAT,
            // currentStudy,
            // parentStudy);
            // eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            String TXTFileName = "";
            HashMap answerMap = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, db, currentstudyid, parentstudy, "", ub);
            // and of course DRY
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                TXTFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            fileName = TXTFileName;
            request.setAttribute("generate", generalFileDir + TXTFileName);
            // finalTarget.setFileName(generalFileDir+TXTFileName);
            logger.debug("+++ set the following: " + generalFileDir + TXTFileName);
        } else if ("html".equalsIgnoreCase(action)) {
            // html based dataset browser
            TabReportBean answer = new TabReportBean();
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            request.setAttribute("dataset", db);
            request.setAttribute("extractBean", eb);
            finalTarget = Page.GENERATE_DATASET_HTML;
        } else if ("spss".equalsIgnoreCase(action)) {
            SPSSReportBean answer = new SPSSReportBean();
            // removed three lines here and put them in generate file
            // service, createSPSSFile method. tbh 01/2009
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            // System.out.println("*** isShowCRFversion:
            // "+db.isShowCRFversion());
            // TODO in the spirit of DRY, if this works we need to remove
            // lines 443-776 in this servlet, tbh 01/2009
            String DDLFileName = "";
            HashMap answerMap = generateFileService.createSPSSFile(db, eb, currentStudy, parentStudy, sysTimeBegin, generalFileDir, answer, "", ub);
            // hmm, DRY?
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                DDLFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            request.setAttribute("generate", generalFileDir + DDLFileName);
            logger.debug("+++ set the following: " + generalFileDir + DDLFileName);
        } else if ("csv".equalsIgnoreCase(action)) {
            CommaReportBean answer = new CommaReportBean();
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            // logger.info("found data set: "+generateReport);
            String CSVFileName = db.getName() + "_comma.txt";
            fId = generateFileService.createFile(CSVFileName, generalFileDir, answer.toString(), db, sysTimeEnd, ExportFormatBean.CSVFILE, true, ub);
            fileName = CSVFileName;
            logger.info("just created csv file");
            request.setAttribute("generate", generalFileDir + CSVFileName);
        // finalTarget.setFileName(generalFileDir+CSVFileName);
        } else if ("excel".equalsIgnoreCase(action)) {
            // HSSFWorkbook excelReport = dsdao.generateExcelDataset(db,
            // ExtractBean.XLS_FORMAT,
            // currentStudy,
            // parentStudy);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            // TODO this will change and point to a created excel
            // spreadsheet, tbh
            String excelFileName = db.getName() + "_excel.xls";
            // fId = this.createFile(excelFileName,
            // generalFileDir,
            // excelReport,
            // db, sysTimeEnd,
            // ExportFormatBean.EXCELFILE);
            // logger.info("just created csv file, for excel output");
            // response.setHeader("Content-disposition","attachment;
            // filename="+CSVFileName);
            // logger.info("csv file name: "+CSVFileName);
            finalTarget = Page.GENERATE_EXCEL_DATASET;
            // response.setContentType("application/vnd.ms-excel");
            response.setHeader("Content-Disposition", "attachment; filename=" + db.getName() + "_excel.xls");
            request.setAttribute("generate", generalFileDir + excelFileName);
            logger.info("set 'generate' to :" + generalFileDir + excelFileName);
            fileName = excelFileName;
        // excelReport.write(stream);
        // stream.flush();
        // stream.close();
        // finalTarget.setFileName(WEB_DIR+db.getId()+"/"+excelFileName);
        }
        // <%@page contentType="application/vnd.ms-excel"%>
        if (!finalTarget.equals(Page.GENERATE_EXCEL_DATASET) && !finalTarget.equals(Page.GENERATE_DATASET_HTML)) {
            // to catch all the others and try to set a new path for file
            // capture
            // tbh, 4-18-05
            // request.setAttribute("generate",finalTarget.getFileName());
            // TODO changing path to show refresh page, then window with
            // link to download file, tbh 06-08-05
            // finalTarget.setFileName(
            // "/WEB-INF/jsp/extract/generatedFileDataset.jsp");
            finalTarget.setFileName("" + "/WEB-INF/jsp/extract/generateMetadataCore.jsp");
            // also set up table here???
            asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
            ArchivedDatasetFileBean asdfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(fId);
            // *** do we need this below? tbh
            ArrayList newFileList = new ArrayList();
            newFileList.add(asdfBean);
            // request.setAttribute("filelist",newFileList);
            ArrayList filterRows = ArchivedDatasetFileRow.generateRowsFromBeans(newFileList);
            EntityBeanTable table = fp.getEntityBeanTable();
            // sort by date
            table.setSortingIfNotExplicitlySet(3, false);
            String[] columns = { resword.getString("file_name"), resword.getString("run_time"), resword.getString("file_size"), resword.getString("created_date"), resword.getString("created_by") };
            table.setColumns(new ArrayList(Arrays.asList(columns)));
            table.hideColumnLink(0);
            table.hideColumnLink(1);
            table.hideColumnLink(2);
            table.hideColumnLink(3);
            table.hideColumnLink(4);
            // table.setQuery("ExportDataset?datasetId=" +db.getId(), new
            // HashMap());
            // trying to continue...
            // session.setAttribute("newDataset",db);
            request.setAttribute("dataset", db);
            request.setAttribute("file", asdfBean);
            table.setRows(filterRows);
            table.computeDisplay();
            request.setAttribute("table", table);
        // *** do we need this above? tbh
        }
        logger.info("set first part of 'generate' to :" + generalFileDir);
        logger.info("found file name: " + finalTarget.getFileName());
        //            String del = CoreResources.getField("dataset_file_delete");
        //            if (del.equalsIgnoreCase("true") || del.equals("")) {
        //                File deleteFile = new File(generalFileDir + fileName);
        //                deleteFile.delete();
        //            }
        forwardPage(finalTarget);
    }
}
Also used : ArchivedDatasetFileDAO(org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO) SchedulerException(org.quartz.SchedulerException) HashMap(java.util.HashMap) Date(java.util.Date) EntityBeanTable(org.akaza.openclinica.web.bean.EntityBeanTable) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) ArrayList(java.util.ArrayList) Page(org.akaza.openclinica.view.Page) CommaReportBean(org.akaza.openclinica.bean.extract.CommaReportBean) ZipEntry(java.util.zip.ZipEntry) Iterator(java.util.Iterator) TabReportBean(org.akaza.openclinica.bean.extract.TabReportBean) SimpleTrigger(org.quartz.SimpleTrigger) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO) ArchivedDatasetFileBean(org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean) SPSSReportBean(org.akaza.openclinica.bean.extract.SPSSReportBean) GenerateExtractFileService(org.akaza.openclinica.service.extract.GenerateExtractFileService) FormProcessor(org.akaza.openclinica.control.form.FormProcessor) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) Date(java.util.Date) XalanTriggerService(org.akaza.openclinica.web.job.XalanTriggerService) ExtractBean(org.akaza.openclinica.bean.extract.ExtractBean) ZipFile(java.util.zip.ZipFile) File(java.io.File) SimpleDateFormat(java.text.SimpleDateFormat) HashMap(java.util.HashMap) JobDetailFactoryBean(org.springframework.scheduling.quartz.JobDetailFactoryBean)

Example 4 with ExtractBean

use of org.akaza.openclinica.bean.extract.ExtractBean in project OpenClinica by OpenClinica.

the class ExampleSpringJob method executeInternal.

@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
    // need to generate a Locale so that user beans and other things will
    // generate normally
    Locale locale = new Locale("en-US");
    ResourceBundleProvider.updateLocale(locale);
    ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle();
    // logger.debug("--");
    // logger.debug("-- executing a job " + message + " at " + new
    // java.util.Date().toString());
    JobDataMap dataMap = context.getMergedJobDataMap();
    SimpleTrigger trigger = (SimpleTrigger) context.getTrigger();
    try {
        ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext");
        String studySubjectNumber = ((CoreResources) appContext.getBean("coreResources")).getField("extract.number");
        coreResources = (CoreResources) appContext.getBean("coreResources");
        ruleSetRuleDao = (RuleSetRuleDao) appContext.getBean("ruleSetRuleDao");
        dataSource = (DataSource) appContext.getBean("dataSource");
        mailSender = (OpenClinicaMailSender) appContext.getBean("openClinicaMailSender");
        AuditEventDAO auditEventDAO = new AuditEventDAO(dataSource);
        // Scheduler scheduler = context.getScheduler();
        // JobDetail detail = context.getJobDetail();
        // jobDetailBean = (JobDetailBean) detail;
        /*
             * data map here should coincide with the job data map found in
             * CreateJobExportServlet, with the following code: jobDataMap = new
             * JobDataMap(); jobDataMap.put(DATASET_ID, datasetId);
             * jobDataMap.put(PERIOD, period); jobDataMap.put(EMAIL, email);
             * jobDataMap.put(TAB, tab); jobDataMap.put(CDISC, cdisc);
             * jobDataMap.put(SPSS, spss);
             */
        String alertEmail = dataMap.getString(EMAIL);
        String localeStr = dataMap.getString(LOCALE);
        if (localeStr != null) {
            locale = new Locale(localeStr);
            ResourceBundleProvider.updateLocale(locale);
            pageMessages = ResourceBundleProvider.getPageMessagesBundle();
        }
        int dsId = dataMap.getInt(DATASET_ID);
        String tab = dataMap.getString(TAB);
        String cdisc = dataMap.getString(CDISC);
        String cdisc12 = dataMap.getString(CDISC12);
        if (cdisc12 == null) {
            cdisc12 = "0";
        }
        String cdisc13 = dataMap.getString(CDISC13);
        if (cdisc13 == null) {
            cdisc13 = "0";
        }
        String cdisc13oc = dataMap.getString(CDISC13OC);
        if (cdisc13oc == null) {
            cdisc13oc = "0";
        }
        String spss = dataMap.getString(SPSS);
        int userId = dataMap.getInt(USER_ID);
        int studyId = dataMap.getInt(STUDY_ID);
        // String datasetId = dataMap.getString(DATASET_ID);
        // int dsId = new Integer(datasetId).intValue();
        // String userAcctId = dataMap.getString(USER_ID);
        // int userId = new Integer(userAcctId).intValue();
        // why the flip-flop? if one property is set to 'true' we can
        // see jobs in another screen but all properties have to be
        // strings
        logger.debug("-- found the job: " + dsId + " dataset id");
        // for (Iterator it = dataMap.entrySet().iterator(); it.hasNext();)
        // {
        // java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
        // Object key = entry.getKey();
        // Object value = entry.getValue();
        // // logger.debug("-- found datamap property: " + key.toString() +
        // // " : " + value.toString());
        // }
        HashMap fileName = new HashMap<String, Integer>();
        if (dsId > 0) {
            // trying to not throw an error if there's no dataset id
            DatasetDAO dsdao = new DatasetDAO(dataSource);
            DatasetBean datasetBean = (DatasetBean) dsdao.findByPK(dsId);
            StudyDAO studyDao = new StudyDAO(dataSource);
            UserAccountDAO userAccountDAO = new UserAccountDAO(dataSource);
            // hmm, three lines in the if block DRY?
            String generalFileDir = "";
            String generalFileDirCopy = "";
            String exportFilePath = SQLInitServlet.getField("exportFilePath");
            String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
            SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
            generalFileDir = DATASET_DIR + datasetBean.getId() + File.separator + sdfDir.format(new java.util.Date());
            if (!"".equals(exportFilePath)) {
                generalFileDirCopy = SQLInitServlet.getField("filePath") + exportFilePath + File.separator;
            }
            // logger.debug("-- created the following dir: " +
            // generalFileDir);
            long sysTimeBegin = System.currentTimeMillis();
            // set up the user bean here, tbh
            // logger.debug("-- gen tab file 00");
            userBean = (UserAccountBean) userAccountDAO.findByPK(userId);
            // needs to also be captured by the servlet, tbh
            // logger.debug("-- gen tab file 00");
            generateFileService = new GenerateExtractFileService(dataSource, coreResources, ruleSetRuleDao);
            // logger.debug("-- gen tab file 00");
            // tbh #5796 - covers a bug when the user changes studies, 10/2010
            StudyBean activeStudy = (StudyBean) studyDao.findByPK(studyId);
            StudyBean parentStudy = new StudyBean();
            logger.debug("active study: " + studyId + " parent study: " + activeStudy.getParentStudyId());
            if (activeStudy.getParentStudyId() > 0) {
                // StudyDAO sdao = new StudyDAO(sm.getDataSource());
                parentStudy = (StudyBean) studyDao.findByPK(activeStudy.getParentStudyId());
            } else {
                parentStudy = activeStudy;
            // covers a bug in tab file creation, tbh 01/2009
            }
            logger.debug("-- found extract bean ");
            ExtractBean eb = generateFileService.generateExtractBean(datasetBean, activeStudy, parentStudy);
            MessageFormat mf = new MessageFormat("");
            StringBuffer message = new StringBuffer();
            StringBuffer auditMessage = new StringBuffer();
            // use resource bundle page messages to generate the email, tbh
            // 02/2009
            // message.append(pageMessages.getString("html_email_header_1")
            // + " " + alertEmail +
            // pageMessages.getString("html_email_header_2") + "<br/>");
            message.append("<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>");
            message.append("<P>Dataset: " + datasetBean.getName() + "</P>");
            message.append("<P>Study: " + activeStudy.getName() + "</P>");
            message.append("<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2") + SQLInitServlet.getField("sysURL") + pageMessages.getString("html_email_body_3") + "</p>");
            // logger.debug("-- gen tab file 00");
            if ("1".equals(tab)) {
                logger.debug("-- gen tab file 01");
                fileName = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, datasetBean, activeStudy.getId(), parentStudy.getId(), generalFileDirCopy, userBean);
                message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
                // MessageFormat mf = new MessageFormat("");
                // mf.applyPattern(pageMessages.getString(
                // "you_can_access_tab_delimited"));
                // Object[] arguments = { getFileIdInt(fileName) };
                // auditMessage.append(mf.format(arguments));
                // auditMessage.append(
                // "You can access your tab-delimited file <a href='AccessFile?fileId="
                // + getFileIdInt(fileName) + "'>here</a>.<br/>");
                auditMessage.append(pageMessages.getString("you_can_access_tab_delimited") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
            }
            if ("1".equals(cdisc)) {
                String odmVersion = "oc1.2";
                fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
                logger.debug("-- gen odm file");
                message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
                // MessageFormat mf = new MessageFormat("");
                // mf.applyPattern(pageMessages.getString(
                // "you_can_access_odm_12"));
                // Object[] arguments = { getFileIdInt(fileName) };
                // auditMessage.append(mf.format(arguments));
                // auditMessage.append(
                // "You can access your ODM 1.2 w/OpenClinica Extension XML file <a href='AccessFile?fileId="
                // + getFileIdInt(fileName)
                // + "'>here</a>.<br/>");
                auditMessage.append(pageMessages.getString("you_can_access_odm_12") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
            }
            if ("1".equals(cdisc12)) {
                String odmVersion = "1.2";
                fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
                logger.debug("-- gen odm file 1.2 default");
                message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
                // mf.applyPattern(pageMessages.getString(
                // "you_can_access_odm_12_xml"));
                // Object[] arguments = { getFileIdInt(fileName) };
                // auditMessage.append(mf.format(arguments));
                // // auditMessage.append(
                // "You can access your ODM 1.2 XML file <a href='AccessFile?fileId="
                // + getFileIdInt(fileName) + "'>here</a>.<br/>");
                auditMessage.append(pageMessages.getString("you_can_access_odm_12_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
            }
            if ("1".equals(cdisc13)) {
                String odmVersion = "1.3";
                fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
                logger.debug("-- gen odm file 1.3");
                message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
                // MessageFormat mf = new MessageFormat("");
                // mf.applyPattern(pageMessages.getString(
                // "you_can_access_odm_13"));
                // Object[] arguments = { getFileIdInt(fileName) };
                // auditMessage.append(mf.format(arguments));
                // auditMessage.append(
                // "You can access your ODM 1.3 XML file <a href='AccessFile?fileId="
                // + getFileIdInt(fileName) + "'>here</a>.<br/>");
                auditMessage.append(pageMessages.getString("you_can_access_odm_13") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
            }
            if ("1".equals(cdisc13oc)) {
                String odmVersion = "oc1.3";
                fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
                logger.debug("-- gen odm file 1.3 oc");
                message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
                // MessageFormat mf = new MessageFormat("");
                // mf.applyPattern(pageMessages.getString(
                // "you_can_access_odm_13_xml"));
                // Object[] arguments = { getFileIdInt(fileName) };
                // auditMessage.append(mf.format(arguments));
                // auditMessage.append(
                // "You can access your ODM 1.3 w/OpenClinica Extension XML file <a href='AccessFile?fileId="
                // + getFileIdInt(fileName)
                // + "'>here</a>.<br/>");
                auditMessage.append(pageMessages.getString("you_can_access_odm_13_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
            }
            if ("1".equals(spss)) {
                SPSSReportBean answer = new SPSSReportBean();
                fileName = generateFileService.createSPSSFile(datasetBean, eb, activeStudy, parentStudy, sysTimeBegin, generalFileDir, answer, generalFileDirCopy, userBean);
                logger.debug("-- gen spss file");
                message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
                // MessageFormat mf = new MessageFormat("");
                // mf.applyPattern(pageMessages.getString(
                // "you_can_access_spss"));
                // Object[] arguments = { getFileIdInt(fileName) };
                // auditMessage.append(mf.format(arguments));
                // auditMessage.append(
                // "You can access your SPSS files <a href='AccessFile?fileId="
                // + getFileIdInt(fileName) + "'>here</a>.<br/>");
                auditMessage.append(pageMessages.getString("you_can_access_spss") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
            }
            // wrap up the message, and send the email
            message.append("<p>" + pageMessages.getString("html_email_body_5") + "</P><P>" + pageMessages.getString("email_footer"));
            try {
                mailSender.sendEmail(alertEmail.trim(), pageMessages.getString("job_ran_for") + " " + datasetBean.getName(), message.toString(), true);
            } catch (OpenClinicaSystemException ose) {
            // Do Nothing, In the future we might want to have an email
            // status added to system.
            }
            TriggerBean triggerBean = new TriggerBean();
            triggerBean.setDataset(datasetBean);
            triggerBean.setUserAccount(userBean);
            triggerBean.setFullName(trigger.getKey().getName());
            auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, auditMessage.toString());
        } else {
            TriggerBean triggerBean = new TriggerBean();
            // triggerBean.setDataset(datasetBean);
            triggerBean.setUserAccount(userBean);
            triggerBean.setFullName(trigger.getKey().getName());
            auditEventDAO.createRowForExtractDataJobFailure(triggerBean);
        // logger.debug("-- made it here for some reason, ds id: "
        // + dsId);
        }
    // logger.debug("-- generated file: " + fileNameStr);
    // dataSource.
    } catch (Exception e) {
        // TODO Auto-generated catch block -- ideally should generate a fail
        // msg here, tbh 02/2009
        logger.debug("-- found exception: " + e.getMessage());
        e.printStackTrace();
    }
}
Also used : Locale(java.util.Locale) HashMap(java.util.HashMap) CoreResources(org.akaza.openclinica.dao.core.CoreResources) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) ApplicationContext(org.springframework.context.ApplicationContext) SimpleTrigger(org.quartz.SimpleTrigger) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO) SPSSReportBean(org.akaza.openclinica.bean.extract.SPSSReportBean) JobDataMap(org.quartz.JobDataMap) GenerateExtractFileService(org.akaza.openclinica.service.extract.GenerateExtractFileService) TriggerBean(org.akaza.openclinica.bean.admin.TriggerBean) MessageFormat(java.text.MessageFormat) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) AuditEventDAO(org.akaza.openclinica.dao.admin.AuditEventDAO) OpenClinicaSystemException(org.akaza.openclinica.exception.OpenClinicaSystemException) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) UserAccountDAO(org.akaza.openclinica.dao.login.UserAccountDAO) JobExecutionException(org.quartz.JobExecutionException) OpenClinicaSystemException(org.akaza.openclinica.exception.OpenClinicaSystemException) ExtractBean(org.akaza.openclinica.bean.extract.ExtractBean) ResourceBundle(java.util.ResourceBundle) SimpleDateFormat(java.text.SimpleDateFormat)

Example 5 with ExtractBean

use of org.akaza.openclinica.bean.extract.ExtractBean in project OpenClinica by OpenClinica.

the class StudyInfoPanel method setData.

/**
     * setData, the external function which creates data for the panel to
     * reflect.
     *
     * @param page
     * @param session
     * @param request
     */
public void setData(Page page, HttpSession session, HttpServletRequest request) {
    Locale locale = request.getLocale();
    resword = ResourceBundleProvider.getWordsBundle();
    local_sdf = new SimpleDateFormat(ResourceBundleProvider.getFormatBundle(locale).getString("date_format_string"));
    try {
        // defaults, can be reset by mistake by running through one page,
        // tbh
        this.setStudyInfoShown(true);
        this.setOrderedData(false);
        // try to avoid errors, tbh
        if (page.equals(Page.CREATE_DATASET_1)) {
            this.reset();
        // this.setData("Number of Steps", "5");
        } else if (page.equals(Page.CREATE_DATASET_2) || page.equals(Page.CREATE_DATASET_EVENT_ATTR) || page.equals(Page.CREATE_DATASET_SUB_ATTR) || page.equals(Page.CREATE_DATASET_CRF_ATTR) || page.equals(Page.CREATE_DATASET_GROUP_ATTR) || page.equals(Page.CREATE_DATASET_VIEW_SELECTED)) {
            HashMap eventlist = (HashMap) request.getAttribute("eventlist");
            ArrayList displayData = generateEventTree(eventlist);
            this.reset();
            this.setUserOrderedData(displayData);
            this.setStudyInfoShown(false);
            this.setOrderedData(true);
            this.setCreateDataset(true);
            this.setSubmitDataModule(false);
            this.setExtractData(false);
        } else if (page.equals(Page.CREATE_DATASET_3)) {
            this.reset();
            this.setStudyInfoShown(false);
            this.setOrderedData(false);
            this.setCreateDataset(true);
            this.setSubmitDataModule(false);
            this.setExtractData(false);
            DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
            int ev_count = dsb.getItemIds().size();
            this.setData(resword.getString("items_selected"), new Integer(ev_count).toString());
        } else if (page.equals(Page.CREATE_DATASET_4)) {
            this.reset();
            this.setStudyInfoShown(false);
            this.setOrderedData(false);
            this.setCreateDataset(true);
            this.setSubmitDataModule(false);
            this.setExtractData(false);
            this.removeData(resword.getString("beginning_date"));
            this.removeData(resword.getString("ending_date"));
            DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
            int ev_count = dsb.getItemIds().size();
            this.setData(resword.getString("items_selected"), new Integer(ev_count).toString());
            if ("01/01/1900".equals(english_sdf.format(dsb.getDateStart()))) {
                this.setData(resword.getString("beginning_date"), resword.getString("not_specified"));
            } else {
                this.setData(resword.getString("beginning_date"), local_sdf.format(dsb.getDateStart()));
            }
            if ("12/31/2100".equals(english_sdf.format(dsb.getDateEnd()))) {
                this.setData(resword.getString("ending_date"), resword.getString("not_specified"));
            } else {
                this.setData(resword.getString("ending_date"), local_sdf.format(dsb.getDateEnd()));
            }
            FilterBean fb = (FilterBean) session.getAttribute("newFilter");
            if (fb != null) {
                this.setData("Added Filter", fb.getName());
            }
        } else if (page.equals(Page.APPLY_FILTER)) {
            DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
            this.setData(resword.getString("beginning_date"), local_sdf.format(dsb.getDateStart()));
            this.setData(resword.getString("ending_date"), local_sdf.format(dsb.getDateEnd()));
        } else if (page.equals(Page.CONFIRM_DATASET)) {
            this.reset();
            this.setStudyInfoShown(false);
            this.setOrderedData(false);
            this.setCreateDataset(true);
            this.setSubmitDataModule(false);
            this.setExtractData(false);
            DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
            this.setData(resword.getString("dataset_name"), dsb.getName());
            this.setData(resword.getString("dataset_description"), dsb.getDescription());
            int ev_count = dsb.getItemIds().size();
            this.setData(resword.getString("items_selected"), new Integer(ev_count).toString());
            if ("01/01/1900".equals(english_sdf.format(dsb.getDateStart()))) {
                this.setData(resword.getString("beginning_date"), resword.getString("not_specified"));
            } else {
                this.setData(resword.getString("beginning_date"), local_sdf.format(dsb.getDateStart()));
            }
            if ("12/31/2100".equals(english_sdf.format(dsb.getDateEnd()))) {
                this.setData(resword.getString("ending_date"), resword.getString("not_specified"));
            } else {
                this.setData(resword.getString("ending_date"), local_sdf.format(dsb.getDateEnd()));
            }
            FilterBean fb = (FilterBean) session.getAttribute("newFilter");
            if (fb != null) {
                this.setData(resword.getString("added_filter"), fb.getName());
            }
        } else if (page.equals(Page.CREATE_FILTER_SCREEN_3_1)) {
            CRFVersionBean cvBean = (CRFVersionBean) session.getAttribute("cvBean");
            this.setData(resword.getString("CRF_version_selected"), cvBean.getName());
        } else if (page.equals(Page.CREATE_FILTER_SCREEN_3_2)) {
            SectionBean secBean = (SectionBean) session.getAttribute("secBean");
            this.setData(resword.getString("section_selected"), secBean.getName());
            Collection metadatas = (Collection) request.getAttribute("metadatas");
            this.setData(resword.getString("number_of_questions"), new Integer(metadatas.size()).toString());
        } else if (page.equals(Page.CREATE_FILTER_SCREEN_4)) {
        } else if (page.equals(Page.CREATE_FILTER_SCREEN_5)) {
        // blank here to prevent data reset, tbh
        } else if (page.equals(Page.ADMIN_SYSTEM)) {
        // blank here , info set in servlet itself
        } else if (page.equals(Page.VIEW_STUDY_SUBJECT) || page.equals(Page.LIST_EVENTS_FOR_SUBJECT)) {
            // special case, unlocks study name, subject name, and
            // visits
            // TODO set all this up, tbh
            /*
                 * set up the side info panel to create the following upon entry
                 * from the ViewStudyServlet Study X Subject Y StudyEventDef Z1
                 * StudyEventDef Z2 <status-tag> CRF A1 <status-tag> CRF A2 Z1
                 * should be collapsible/expandible, etc.
                 *
                 * We can pull things from the session and the request:
                 */
            /*
                 * StudyBean study = (StudyBean) request.getAttribute("study");
                 * StudySubjectBean studySubject = (StudySubjectBean)
                 * request.getAttribute("studySub"); EntityBeanTable table =
                 * (EntityBeanTable) request.getAttribute("table"); EventCRFBean
                 * ecb = (EventCRFBean)request.getAttribute("eventCRF");
                 * this.reset(); ArrayList rows = table.getRows(); ArrayList
                 * beans = DisplayStudyEventBean.generateBeansFromRows(rows);
                 *
                 *
                 * addStudyEventTree(study, studySubject, beans, ecb);
                 */
            // this.setIconInfoShown(false);
            // this.setManageSubject(true);
            this.reset();
            this.setStudyInfoShown(true);
            this.setOrderedData(true);
            this.setExtractData(false);
            this.setSubmitDataModule(false);
            this.setCreateDataset(false);
            this.setIconInfoShown(false);
            this.setManageSubject(true);
            request.setAttribute("showDDEIcon", Boolean.TRUE);
        } else if (page.equals(Page.ENTER_DATA_FOR_STUDY_EVENT) || page.equals(Page.ENTER_DATA_FOR_STUDY_EVENT_SERVLET)) {
            StudyBean study = (StudyBean) session.getAttribute("study");
            StudySubjectBean studySubject = (StudySubjectBean) request.getAttribute("studySubject");
            ArrayList beans = (ArrayList) request.getAttribute("beans");
            EventCRFBean ecb = (EventCRFBean) request.getAttribute("eventCRF");
            this.reset();
            addStudyEventTree(study, studySubject, beans, ecb, true);
            this.setStudyInfoShown(false);
            this.setOrderedData(true);
            this.setSubmitDataModule(true);
            this.setExtractData(false);
            this.setCreateDataset(false);
            this.setIconInfoShown(false);
        } else if (page.equals(Page.INTERVIEWER) || page.equals(Page.TABLE_OF_CONTENTS) || page.equals(Page.TABLE_OF_CONTENTS_SERVLET) || page.equals(Page.INITIAL_DATA_ENTRY) || page.equals(Page.INITIAL_DATA_ENTRY_SERVLET) || page.equals(Page.DOUBLE_DATA_ENTRY) || page.equals(Page.DOUBLE_DATA_ENTRY_SERVLET) || page.equals(Page.ADMIN_EDIT) || page.equals(Page.ADMIN_EDIT_SERVLET)) {
            /*
                 * pages designed to also follow the above format; check to see
                 * if they are in the session already, and does not refresh.
                 * TODO refine and test
                 */
            StudyBean study = (StudyBean) session.getAttribute("study");
            StudySubjectBean studySubject = (StudySubjectBean) request.getAttribute("studySubject");
            ArrayList beans = (ArrayList) request.getAttribute("beans");
            EventCRFBean ecb = (EventCRFBean) request.getAttribute("eventCRF");
            this.reset();
            addStudyEventTree(study, studySubject, beans, ecb, false);
            this.setStudyInfoShown(false);
            this.setOrderedData(true);
            this.setSubmitDataModule(true);
            this.setExtractData(false);
            this.setCreateDataset(false);
            this.setIconInfoShown(true);
        } else if (page.equals(Page.EDIT_DATASET)) {
            this.reset();
            // HashMap eventlist = (HashMap)
            // request.getAttribute("eventlist");
            HashMap eventlist = (LinkedHashMap) session.getAttribute("eventsForCreateDataset");
            ArrayList displayData = generateEventTree(eventlist);
            this.setCreateDataset(true);
            this.setOrderedData(true);
            this.setUserOrderedData(displayData);
            this.setStudyInfoShown(true);
            this.setSubmitDataModule(false);
            this.setExtractData(false);
            DatasetBean dsb = (DatasetBean) request.getAttribute("dataset");
            this.setData(resword.getString("dataset_name"), dsb.getName());
            this.setData(resword.getString("date_created"), local_sdf.format(dsb.getCreatedDate()));
            this.setData(resword.getString("dataset_owner"), dsb.getOwner().getName());
            this.setData(resword.getString("date_last_run"), local_sdf.format(dsb.getDateLastRun()));
        } else if (page.equals(Page.EXPORT_DATASETS)) {
            this.setCreateDataset(false);
        } else if (page.equals(Page.GENERATE_DATASET_HTML)) {
            DatasetBean db = (DatasetBean) request.getAttribute("dataset");
            ExtractBean exbean = (ExtractBean) request.getAttribute("extractBean");
            this.reset();
            ArrayList displayData = new ArrayList();
            displayData = generateDatasetTree(exbean, db);
            this.setUserOrderedData(displayData);
            this.setStudyInfoShown(false);
            this.setOrderedData(true);
            this.setExtractData(true);
            this.setSubmitDataModule(false);
            this.setCreateDataset(false);
        } else if (page.equals(Page.LIST_STUDY_SUBJECT) || page.equals(Page.LIST_STUDY_SUBJECTS) || page.equals(Page.SUBMIT_DATA) || page.equals(Page.SUBMIT_DATA_SERVLET)) {
            this.reset();
            this.setStudyInfoShown(true);
            this.setOrderedData(true);
            this.setExtractData(false);
            this.setSubmitDataModule(false);
            this.setCreateDataset(false);
            this.setIconInfoShown(false);
            this.setManageSubject(true);
            // don't want to show DDE icon key for subject matrix page
            request.setAttribute("showDDEIcon", Boolean.FALSE);
        } else if (page.equals(Page.VIEW_SECTION_DATA_ENTRY) || page.equals(Page.VIEW_SECTION_DATA_ENTRY_SERVLET)) {
            this.reset();
            this.setStudyInfoShown(true);
            this.setOrderedData(true);
            this.setExtractData(false);
            this.setSubmitDataModule(false);
            this.setCreateDataset(false);
            this.setIconInfoShown(true);
            this.setManageSubject(false);
        } else if (page.equals(Page.CREATE_SUBJECT_GROUP_CLASS) || page.equals(Page.CREATE_SUBJECT_GROUP_CLASS_CONFIRM) || page.equals(Page.UPDATE_SUBJECT_GROUP_CLASS) || page.equals(Page.UPDATE_SUBJECT_GROUP_CLASS_CONFIRM)) {
            this.reset();
            this.setStudyInfoShown(true);
            this.setOrderedData(true);
            this.setExtractData(false);
            this.setSubmitDataModule(false);
            this.setCreateDataset(false);
            this.setIconInfoShown(true);
            this.setManageSubject(false);
        } else {
            // automatically reset if we don't know what's happening
            this.reset();
            this.setStudyInfoShown(true);
            this.setOrderedData(true);
            this.setExtractData(false);
            this.setSubmitDataModule(false);
            this.setCreateDataset(false);
            this.setIconInfoShown(true);
            this.setManageSubject(false);
        }
    } catch (Exception e) {
        this.reset();
    }
}
Also used : Locale(java.util.Locale) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) SectionBean(org.akaza.openclinica.bean.submit.SectionBean) EventCRFBean(org.akaza.openclinica.bean.submit.EventCRFBean) DisplayEventCRFBean(org.akaza.openclinica.bean.submit.DisplayEventCRFBean) StudySubjectBean(org.akaza.openclinica.bean.managestudy.StudySubjectBean) ExtractBean(org.akaza.openclinica.bean.extract.ExtractBean) Collection(java.util.Collection) CRFVersionBean(org.akaza.openclinica.bean.submit.CRFVersionBean) SimpleDateFormat(java.text.SimpleDateFormat) FilterBean(org.akaza.openclinica.bean.extract.FilterBean)

Aggregations

ExtractBean (org.akaza.openclinica.bean.extract.ExtractBean)5 DatasetBean (org.akaza.openclinica.bean.extract.DatasetBean)4 StudyBean (org.akaza.openclinica.bean.managestudy.StudyBean)4 SimpleDateFormat (java.text.SimpleDateFormat)3 HashMap (java.util.HashMap)3 Locale (java.util.Locale)3 File (java.io.File)2 ArrayList (java.util.ArrayList)2 Date (java.util.Date)2 ResourceBundle (java.util.ResourceBundle)2 ZipEntry (java.util.zip.ZipEntry)2 TriggerBean (org.akaza.openclinica.bean.admin.TriggerBean)2 ArchivedDatasetFileBean (org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean)2 SPSSReportBean (org.akaza.openclinica.bean.extract.SPSSReportBean)2 DatasetDAO (org.akaza.openclinica.dao.extract.DatasetDAO)2 StudyDAO (org.akaza.openclinica.dao.managestudy.StudyDAO)2 OpenClinicaSystemException (org.akaza.openclinica.exception.OpenClinicaSystemException)2 GenerateExtractFileService (org.akaza.openclinica.service.extract.GenerateExtractFileService)2 JobDataMap (org.quartz.JobDataMap)2 SchedulerException (org.quartz.SchedulerException)2