Search in sources :

Example 1 with ArchivedDatasetFileDAO

use of org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO in project OpenClinica by OpenClinica.

the class XsltTransformJob method initDependencies.

/**
 * Initializes the dependencies of this job with the components from the Spring application context.
 *
 * @param scheduler
 */
private void initDependencies(Scheduler scheduler) {
    try {
        ApplicationContext ctx = (ApplicationContext) scheduler.getContext().get("applicationContext");
        DataSource dataSource = ctx.getBean(DataSource.class);
        mailSender = ctx.getBean(OpenClinicaMailSender.class);
        auditEventDAO = ctx.getBean(AuditEventDAO.class);
        datasetDao = ctx.getBean(DatasetDAO.class);
        userAccountDao = ctx.getBean(UserAccountDAO.class);
        studyDao = new StudyDAO(dataSource);
        archivedDatasetFileDao = ctx.getBean(ArchivedDatasetFileDAO.class);
        generateFileService = ctx.getBean(GenerateExtractFileService.class);
        odmFileCreation = ctx.getBean(OdmFileCreation.class);
    } catch (SchedulerException e) {
        throw new IllegalStateException("Could not load dependencies from scheduler context", e);
    }
}
Also used : ArchivedDatasetFileDAO(org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO) ApplicationContext(org.springframework.context.ApplicationContext) GenerateExtractFileService(org.akaza.openclinica.service.extract.GenerateExtractFileService) SchedulerException(org.quartz.SchedulerException) OpenClinicaMailSender(org.akaza.openclinica.core.OpenClinicaMailSender) AuditEventDAO(org.akaza.openclinica.dao.admin.AuditEventDAO) OdmFileCreation(org.akaza.openclinica.service.extract.OdmFileCreation) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) UserAccountDAO(org.akaza.openclinica.dao.login.UserAccountDAO) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO) DataSource(javax.sql.DataSource)

Example 2 with ArchivedDatasetFileDAO

use of org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO in project OpenClinica by OpenClinica.

the class GenerateExtractFileService method createFileK.

public int createFileK(String name, String dir, String content, DatasetBean datasetBean, long time, ExportFormatBean efb, boolean saveToDB, boolean zipped, boolean deleteOld, UserAccountBean userBean) {
    ArchivedDatasetFileBean fbFinal = new ArchivedDatasetFileBean();
    // >> tbh 04/2010 #4915 replace all names' spaces with underscores
    name = name.replaceAll(" ", "_");
    fbFinal.setId(0);
    BufferedWriter w = null;
    try {
        File complete = new File(dir);
        if (!complete.isDirectory()) {
            complete.mkdirs();
        }
        // else  if(deleteOld)// so directory exists check if the files are there
        // {
        // deleteDirectory(complete);
        // }
        // File newFile = new File(complete, name);
        // newFile.setLastModified(System.currentTimeMillis());
        File oldFile = new File(complete, name);
        File newFile = null;
        if (oldFile.exists()) {
            newFile = oldFile;
            if (oldFiles != null || !oldFiles.isEmpty())
                oldFiles.remove(oldFile);
        } else {
            newFile = new File(complete, name);
        }
        // File
        newFile.setLastModified(System.currentTimeMillis());
        w = new BufferedWriter(new FileWriter(newFile, true));
        w.write(content);
        w.close();
        logger.info("finished writing the text file...");
        // set up the zip to go into the database
        if (saveToDB) {
            ArchivedDatasetFileBean fb = new ArchivedDatasetFileBean();
            if (zipped) {
                fb.setName(name + ".zip");
                fb.setFileReference(dir + name + ".zip");
            } else {
                fb.setName(name);
                fb.setFileReference(dir + name);
            }
            // logger.info("ODM filename: " + name + ".zip");
            // logger.info("ODM fileReference: " + dir + name + ".zip");
            // current location of the file on the system
            fb.setFileSize((int) newFile.length());
            // logger.info("ODM setFileSize: " + (int)newFile.length() );
            // set the above to compressed size?
            fb.setRunTime((int) time);
            // logger.info("ODM setRunTime: " + (int)time );
            // need to set this in milliseconds, get it passed from above
            // methods?
            fb.setDatasetId(datasetBean.getId());
            // logger.info("ODM setDatasetid: " + ds.getId() );
            fb.setExportFormatBean(efb);
            // logger.info("ODM setExportFormatBean: success" );
            fb.setExportFormatId(efb.getId());
            // logger.info("ODM setExportFormatId: " + efb.getId());
            fb.setOwner(userBean);
            // logger.info("ODM setOwner: " + sm.getUserBean());
            fb.setOwnerId(userBean.getId());
            // logger.info("ODM setOwnerId: " + sm.getUserBean().getId() );
            fb.setDateCreated(new Date(System.currentTimeMillis()));
            boolean write = true;
            ArchivedDatasetFileDAO asdfDAO = new ArchivedDatasetFileDAO(ds);
            // eliminating all checks so that we create multiple files, tbh 6-7
            if (write) {
                fbFinal = (ArchivedDatasetFileBean) asdfDAO.create(fb);
            } else {
                logger.info("duplicate found: " + fb.getName());
            }
        }
    // created in database!
    } catch (Exception e) {
        logger.error(e.getMessage());
        e.printStackTrace();
    } finally {
        if (w != null)
            try {
                w.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
    }
    return fbFinal.getId();
}
Also used : ArchivedDatasetFileDAO(org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO) FileWriter(java.io.FileWriter) IOException(java.io.IOException) File(java.io.File) Date(java.util.Date) IOException(java.io.IOException) ArchivedDatasetFileBean(org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean) BufferedWriter(java.io.BufferedWriter)

Example 3 with ArchivedDatasetFileDAO

use of org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO in project OpenClinica by OpenClinica.

the class OdmFileCreation method createFileK.

public int createFileK(String name, String dir, String content, DatasetBean datasetBean, long time, ExportFormatBean efb, boolean saveToDB, boolean zipped, boolean deleteOld, UserAccountBean userBean) {
    ArchivedDatasetFileBean fbFinal = new ArchivedDatasetFileBean();
    // >> tbh 04/2010 #4915 replace all names' spaces with underscores
    name = name.replaceAll(" ", "_");
    fbFinal.setId(0);
    BufferedWriter w = null;
    try {
        File complete = new File(dir);
        if (!complete.isDirectory()) {
            complete.mkdirs();
        }
        // else  if(deleteOld)// so directory exists check if the files are there
        // {
        // deleteDirectory(complete);
        // }
        // File newFile = new File(complete, name);
        // newFile.setLastModified(System.currentTimeMillis());
        File oldFile = new File(complete, name);
        File newFile = null;
        if (oldFile.exists()) {
            newFile = oldFile;
            if (oldFiles != null || !oldFiles.isEmpty())
                oldFiles.remove(oldFile);
        } else {
            newFile = new File(complete, name);
        }
        // File
        newFile.setLastModified(System.currentTimeMillis());
        w = new BufferedWriter(new FileWriter(newFile, true));
        w.write(content);
        w.close();
        LOG.info("finished writing the text file...");
        // set up the zip to go into the database
        if (saveToDB) {
            ArchivedDatasetFileBean fb = new ArchivedDatasetFileBean();
            if (zipped) {
                fb.setName(name + ".zip");
                fb.setFileReference(dir + name + ".zip");
            } else {
                fb.setName(name);
                fb.setFileReference(dir + name);
            }
            // logger.info("ODM filename: " + name + ".zip");
            // logger.info("ODM fileReference: " + dir + name + ".zip");
            // current location of the file on the system
            fb.setFileSize((int) newFile.length());
            // logger.info("ODM setFileSize: " + (int)newFile.length() );
            // set the above to compressed size?
            fb.setRunTime((int) time);
            // logger.info("ODM setRunTime: " + (int)time );
            // need to set this in milliseconds, get it passed from above
            // methods?
            fb.setDatasetId(datasetBean.getId());
            // logger.info("ODM setDatasetid: " + ds.getId() );
            fb.setExportFormatBean(efb);
            // logger.info("ODM setExportFormatBean: success" );
            fb.setExportFormatId(efb.getId());
            // logger.info("ODM setExportFormatId: " + efb.getId());
            fb.setOwner(userBean);
            // logger.info("ODM setOwner: " + sm.getUserBean());
            fb.setOwnerId(userBean.getId());
            // logger.info("ODM setOwnerId: " + sm.getUserBean().getId() );
            fb.setDateCreated(new Date(System.currentTimeMillis()));
            boolean write = true;
            ArchivedDatasetFileDAO asdfDAO = new ArchivedDatasetFileDAO(dataSource);
            // eliminating all checks so that we create multiple files, tbh 6-7
            if (write) {
                fbFinal = (ArchivedDatasetFileBean) asdfDAO.create(fb);
            } else {
                LOG.info("duplicate found: " + fb.getName());
            }
        }
    // created in database!
    } catch (Exception e) {
        LOG.error(e.getMessage());
        e.printStackTrace();
    } finally {
        if (w != null)
            try {
                w.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
    }
    return fbFinal.getId();
}
Also used : ArchivedDatasetFileDAO(org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO) FileWriter(java.io.FileWriter) IOException(java.io.IOException) File(java.io.File) Date(java.util.Date) IOException(java.io.IOException) ArchivedDatasetFileBean(org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean) BufferedWriter(java.io.BufferedWriter)

Example 4 with ArchivedDatasetFileDAO

use of org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO in project OpenClinica by OpenClinica.

the class ExportDatasetServlet method processRequest.

@Override
public void processRequest() throws Exception {
    DatasetDAO dsdao = new DatasetDAO(sm.getDataSource());
    ArchivedDatasetFileDAO asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
    FormProcessor fp = new FormProcessor(request);
    GenerateExtractFileService generateFileService = new GenerateExtractFileService(sm.getDataSource(), (CoreResources) SpringServletAccess.getApplicationContext(context).getBean("coreResources"), (RuleSetRuleDao) SpringServletAccess.getApplicationContext(context).getBean("ruleSetRuleDao"));
    String action = fp.getString("action");
    int datasetId = fp.getInt("datasetId");
    int adfId = fp.getInt("adfId");
    if (datasetId == 0) {
        try {
            DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
            datasetId = dsb.getId();
            logger.info("dataset id was zero, trying session: " + datasetId);
        } catch (NullPointerException e) {
            e.printStackTrace();
            logger.info("tripped over null pointer exception");
        }
    }
    DatasetBean db = (DatasetBean) dsdao.findByPK(datasetId);
    StudyDAO sdao = new StudyDAO(sm.getDataSource());
    StudyBean study = (StudyBean) sdao.findByPK(db.getStudyId());
    checkRoleByUserAndStudy(ub, study.getParentStudyId(), study.getId());
    // Checks if the study is current study or child of current study
    if (study.getId() != currentStudy.getId() && study.getParentStudyId() != currentStudy.getId()) {
        addPageMessage(respage.getString("no_have_correct_privilege_current_study") + " " + respage.getString("change_active_study_or_contact"));
        forwardPage(Page.MENU_SERVLET);
        return;
    }
    /**
     * @vbc 08/06/2008 NEW EXTRACT DATA IMPLEMENTATION get study_id and
     *      parentstudy_id int currentstudyid = currentStudy.getId(); int
     *      parentstudy = currentStudy.getParentStudyId(); if (parentstudy >
     *      0) { // is OK } else { // same parentstudy = currentstudyid; } //
     */
    int currentstudyid = currentStudy.getId();
    // YW 11-09-2008 << modified logic here.
    int parentstudy = currentstudyid;
    // YW 11-09-2008 >>
    StudyBean parentStudy = new StudyBean();
    if (currentStudy.getParentStudyId() > 0) {
        // StudyDAO sdao = new StudyDAO(sm.getDataSource());
        parentStudy = (StudyBean) sdao.findByPK(currentStudy.getParentStudyId());
    }
    ExtractBean eb = generateFileService.generateExtractBean(db, currentStudy, parentStudy);
    if (StringUtil.isBlank(action)) {
        loadList(db, asdfdao, datasetId, fp, eb);
        forwardPage(Page.EXPORT_DATASETS);
    } else if ("delete".equalsIgnoreCase(action) && adfId > 0) {
        boolean success = false;
        ArchivedDatasetFileBean adfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(adfId);
        File file = new File(adfBean.getFileReference());
        if (!file.canWrite()) {
            addPageMessage(respage.getString("write_protected"));
        } else {
            success = file.delete();
            if (success) {
                asdfdao.deleteArchiveDataset(adfBean);
                addPageMessage(respage.getString("file_removed"));
            } else {
                addPageMessage(respage.getString("error_removing_file"));
            }
        }
        loadList(db, asdfdao, datasetId, fp, eb);
        forwardPage(Page.EXPORT_DATASETS);
    } else {
        logger.info("**** found action ****: " + action);
        String generateReport = "";
        // generate file, and show screen export
        // String generalFileDir = DATASET_DIR + db.getId() +
        // File.separator;
        // change this up, so that we don't overwrite anything
        String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
        SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
        String generalFileDir = DATASET_DIR + db.getId() + File.separator + sdfDir.format(new java.util.Date());
        String fileName = "";
        db.setName(db.getName().replaceAll(" ", "_"));
        Page finalTarget = Page.GENERATE_DATASET;
        finalTarget = Page.EXPORT_DATA_CUSTOM;
        // now display report according to format specified
        // TODO revise final target to set to fileReference????
        long sysTimeBegin = System.currentTimeMillis();
        int fId = 0;
        if ("sas".equalsIgnoreCase(action)) {
            // generateReport =
            // dsdao.generateDataset(db,
            // ExtractBean.SAS_FORMAT,
            // currentStudy,
            // parentStudy);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            String SASFileName = db.getName() + "_sas.sas";
            // logger.info("found data set: "+generateReport);
            generateFileService.createFile(SASFileName, generalFileDir, generateReport, db, sysTimeEnd, ExportFormatBean.TXTFILE, true, ub);
            logger.info("created sas file");
            request.setAttribute("generate", generalFileDir + SASFileName);
            finalTarget.setFileName(generalFileDir + SASFileName);
            fileName = SASFileName;
        // won't work since page creator is private
        } else if ("odm".equalsIgnoreCase(action)) {
            String odmVersion = fp.getString("odmVersion");
            String ODMXMLFileName = "";
            // DRY
            // HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "");
            HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", true, true, true, null, ub);
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                ODMXMLFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            fileName = ODMXMLFileName;
            request.setAttribute("generate", generalFileDir + ODMXMLFileName);
            logger.debug("+++ set the following: " + generalFileDir + ODMXMLFileName);
            // send a link with the SQL file? put the generated SQL file with the dataset?
            if (fp.getString("xalan") != null) {
                XalanTriggerService xts = new XalanTriggerService();
                String propertiesPath = SQLInitServlet.getField("filePath");
                // the trick there, we need to open up the zipped file and get at the XML
                openZipFile(generalFileDir + ODMXMLFileName + ".zip");
                // need to find out how to copy this xml file from /bin to the generalFileDir
                SimpleTrigger simpleTrigger = xts.generateXalanTrigger(propertiesPath + File.separator + "ODMReportStylesheet.xsl", ODMXMLFileName, generalFileDir + "output.sql", db.getId());
                scheduler = getScheduler();
                JobDetailBean jobDetailBean = new JobDetailBean();
                jobDetailBean.setGroup(xts.TRIGGER_GROUP_NAME);
                jobDetailBean.setName(simpleTrigger.getName());
                jobDetailBean.setJobClass(org.akaza.openclinica.web.job.XalanStatefulJob.class);
                jobDetailBean.setJobDataMap(simpleTrigger.getJobDataMap());
                // need durability?
                jobDetailBean.setDurability(true);
                jobDetailBean.setVolatility(false);
                try {
                    Date dateStart = scheduler.scheduleJob(jobDetailBean, simpleTrigger);
                    logger.info("== found job date: " + dateStart.toString());
                } catch (SchedulerException se) {
                    se.printStackTrace();
                }
            }
        } else if ("txt".equalsIgnoreCase(action)) {
            // generateReport =
            // dsdao.generateDataset(db,
            // ExtractBean.TXT_FORMAT,
            // currentStudy,
            // parentStudy);
            // eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            String TXTFileName = "";
            HashMap answerMap = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, db, currentstudyid, parentstudy, "", ub);
            // and of course DRY
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                TXTFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            fileName = TXTFileName;
            request.setAttribute("generate", generalFileDir + TXTFileName);
            // finalTarget.setFileName(generalFileDir+TXTFileName);
            logger.debug("+++ set the following: " + generalFileDir + TXTFileName);
        } else if ("html".equalsIgnoreCase(action)) {
            // html based dataset browser
            TabReportBean answer = new TabReportBean();
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            request.setAttribute("dataset", db);
            request.setAttribute("extractBean", eb);
            finalTarget = Page.GENERATE_DATASET_HTML;
        } else if ("spss".equalsIgnoreCase(action)) {
            SPSSReportBean answer = new SPSSReportBean();
            // removed three lines here and put them in generate file
            // service, createSPSSFile method. tbh 01/2009
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            // System.out.println("*** isShowCRFversion:
            // "+db.isShowCRFversion());
            // TODO in the spirit of DRY, if this works we need to remove
            // lines 443-776 in this servlet, tbh 01/2009
            String DDLFileName = "";
            HashMap answerMap = generateFileService.createSPSSFile(db, eb, currentStudy, parentStudy, sysTimeBegin, generalFileDir, answer, "", ub);
            // hmm, DRY?
            for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
                java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
                Object key = entry.getKey();
                Object value = entry.getValue();
                DDLFileName = (String) key;
                Integer fileID = (Integer) value;
                fId = fileID.intValue();
            }
            request.setAttribute("generate", generalFileDir + DDLFileName);
            logger.debug("+++ set the following: " + generalFileDir + DDLFileName);
        } else if ("csv".equalsIgnoreCase(action)) {
            CommaReportBean answer = new CommaReportBean();
            eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
            eb.getMetadata();
            eb.computeReport(answer);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            // logger.info("found data set: "+generateReport);
            String CSVFileName = db.getName() + "_comma.txt";
            fId = generateFileService.createFile(CSVFileName, generalFileDir, answer.toString(), db, sysTimeEnd, ExportFormatBean.CSVFILE, true, ub);
            fileName = CSVFileName;
            logger.info("just created csv file");
            request.setAttribute("generate", generalFileDir + CSVFileName);
        // finalTarget.setFileName(generalFileDir+CSVFileName);
        } else if ("excel".equalsIgnoreCase(action)) {
            // HSSFWorkbook excelReport = dsdao.generateExcelDataset(db,
            // ExtractBean.XLS_FORMAT,
            // currentStudy,
            // parentStudy);
            long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
            // TODO this will change and point to a created excel
            // spreadsheet, tbh
            String excelFileName = db.getName() + "_excel.xls";
            // fId = this.createFile(excelFileName,
            // generalFileDir,
            // excelReport,
            // db, sysTimeEnd,
            // ExportFormatBean.EXCELFILE);
            // logger.info("just created csv file, for excel output");
            // response.setHeader("Content-disposition","attachment;
            // filename="+CSVFileName);
            // logger.info("csv file name: "+CSVFileName);
            finalTarget = Page.GENERATE_EXCEL_DATASET;
            // response.setContentType("application/vnd.ms-excel");
            response.setHeader("Content-Disposition", "attachment; filename=" + db.getName() + "_excel.xls");
            request.setAttribute("generate", generalFileDir + excelFileName);
            logger.info("set 'generate' to :" + generalFileDir + excelFileName);
            fileName = excelFileName;
        // excelReport.write(stream);
        // stream.flush();
        // stream.close();
        // finalTarget.setFileName(WEB_DIR+db.getId()+"/"+excelFileName);
        }
        // <%@page contentType="application/vnd.ms-excel"%>
        if (!finalTarget.equals(Page.GENERATE_EXCEL_DATASET) && !finalTarget.equals(Page.GENERATE_DATASET_HTML)) {
            // to catch all the others and try to set a new path for file
            // capture
            // tbh, 4-18-05
            // request.setAttribute("generate",finalTarget.getFileName());
            // TODO changing path to show refresh page, then window with
            // link to download file, tbh 06-08-05
            // finalTarget.setFileName(
            // "/WEB-INF/jsp/extract/generatedFileDataset.jsp");
            finalTarget.setFileName("" + "/WEB-INF/jsp/extract/generateMetadataCore.jsp");
            // also set up table here???
            asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
            ArchivedDatasetFileBean asdfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(fId);
            // *** do we need this below? tbh
            ArrayList newFileList = new ArrayList();
            newFileList.add(asdfBean);
            // request.setAttribute("filelist",newFileList);
            ArrayList filterRows = ArchivedDatasetFileRow.generateRowsFromBeans(newFileList);
            EntityBeanTable table = fp.getEntityBeanTable();
            // sort by date
            table.setSortingIfNotExplicitlySet(3, false);
            String[] columns = { resword.getString("file_name"), resword.getString("run_time"), resword.getString("file_size"), resword.getString("created_date"), resword.getString("created_by") };
            table.setColumns(new ArrayList(Arrays.asList(columns)));
            table.hideColumnLink(0);
            table.hideColumnLink(1);
            table.hideColumnLink(2);
            table.hideColumnLink(3);
            table.hideColumnLink(4);
            // table.setQuery("ExportDataset?datasetId=" +db.getId(), new
            // HashMap());
            // trying to continue...
            // session.setAttribute("newDataset",db);
            request.setAttribute("dataset", db);
            request.setAttribute("file", asdfBean);
            table.setRows(filterRows);
            table.computeDisplay();
            request.setAttribute("table", table);
        // *** do we need this above? tbh
        }
        logger.info("set first part of 'generate' to :" + generalFileDir);
        logger.info("found file name: " + finalTarget.getFileName());
        // String del = CoreResources.getField("dataset_file_delete");
        // if (del.equalsIgnoreCase("true") || del.equals("")) {
        // File deleteFile = new File(generalFileDir + fileName);
        // deleteFile.delete();
        // }
        forwardPage(finalTarget);
    }
}
Also used : ArchivedDatasetFileDAO(org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO) SchedulerException(org.quartz.SchedulerException) HashMap(java.util.HashMap) Date(java.util.Date) EntityBeanTable(org.akaza.openclinica.web.bean.EntityBeanTable) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) ArrayList(java.util.ArrayList) Page(org.akaza.openclinica.view.Page) CommaReportBean(org.akaza.openclinica.bean.extract.CommaReportBean) ZipEntry(java.util.zip.ZipEntry) Iterator(java.util.Iterator) TabReportBean(org.akaza.openclinica.bean.extract.TabReportBean) SimpleTrigger(org.quartz.SimpleTrigger) StudyDAO(org.akaza.openclinica.dao.managestudy.StudyDAO) ArchivedDatasetFileBean(org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean) SPSSReportBean(org.akaza.openclinica.bean.extract.SPSSReportBean) GenerateExtractFileService(org.akaza.openclinica.service.extract.GenerateExtractFileService) FormProcessor(org.akaza.openclinica.control.form.FormProcessor) StudyBean(org.akaza.openclinica.bean.managestudy.StudyBean) JobDetailBean(org.springframework.scheduling.quartz.JobDetailBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) Date(java.util.Date) XalanTriggerService(org.akaza.openclinica.web.job.XalanTriggerService) ExtractBean(org.akaza.openclinica.bean.extract.ExtractBean) ZipFile(java.util.zip.ZipFile) File(java.io.File) SimpleDateFormat(java.text.SimpleDateFormat) HashMap(java.util.HashMap)

Example 5 with ArchivedDatasetFileDAO

use of org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO in project OpenClinica by OpenClinica.

the class GenerateExtractFileService method createFile.

public int createFile(String name, String dir, String content, DatasetBean datasetBean, long time, ExportFormatBean efb, boolean saveToDB, UserAccountBean userBean) {
    ArchivedDatasetFileBean fbFinal = new ArchivedDatasetFileBean();
    // >> tbh 04/2010 #4915 replace all names' spaces with underscores
    name = name.replaceAll(" ", "_");
    fbFinal.setId(0);
    try {
        File complete = new File(dir);
        if (!complete.isDirectory()) {
            complete.mkdirs();
        }
        File newFile = new File(complete, name);
        newFile.setLastModified(System.currentTimeMillis());
        BufferedWriter w = new BufferedWriter(new FileWriter(newFile));
        w.write(content);
        w.close();
        logger.info("finished writing the text file...");
        // now, we write the file to the zip file
        FileInputStream is = new FileInputStream(newFile);
        ZipOutputStream z = new ZipOutputStream(new FileOutputStream(new File(complete, name + ".zip")));
        logger.info("created zip output stream...");
        // we write over the content no matter what
        // we then check to make sure there are no duplicates
        // TODO need to change the above -- save all content!
        // z.write(content);
        z.putNextEntry(new java.util.zip.ZipEntry(name));
        // int length = (int) newFile.length();
        int bytesRead;
        byte[] buff = new byte[512];
        // while (-1 != (bytesRead = bis.read(buff, 0, buff.length))) {
        while ((bytesRead = is.read(buff)) != -1) {
            z.write(buff, 0, bytesRead);
        }
        logger.info("writing buffer...");
        // }
        z.closeEntry();
        z.finish();
        // w2.close();
        if (is != null) {
            try {
                is.close();
            } catch (java.io.IOException ie) {
                ie.printStackTrace();
            }
        }
        logger.info("finished zipping up file...");
        // set up the zip to go into the database
        if (saveToDB) {
            ArchivedDatasetFileBean fb = new ArchivedDatasetFileBean();
            fb.setName(name + ".zip");
            // logger.info("ODM filename: " + name + ".zip");
            fb.setFileReference(dir + name + ".zip");
            // logger.info("ODM fileReference: " + dir + name + ".zip");
            // current location of the file on the system
            fb.setFileSize((int) newFile.length());
            // logger.info("ODM setFileSize: " + (int)newFile.length() );
            // set the above to compressed size?
            fb.setRunTime((int) time);
            // logger.info("ODM setRunTime: " + (int)time );
            // need to set this in milliseconds, get it passed from above
            // methods?
            fb.setDatasetId(datasetBean.getId());
            // logger.info("ODM setDatasetid: " + ds.getId() );
            fb.setExportFormatBean(efb);
            // logger.info("ODM setExportFormatBean: success" );
            fb.setExportFormatId(efb.getId());
            // logger.info("ODM setExportFormatId: " + efb.getId());
            fb.setOwner(userBean);
            // logger.info("ODM setOwner: " + sm.getUserBean());
            fb.setOwnerId(userBean.getId());
            // logger.info("ODM setOwnerId: " + sm.getUserBean().getId() );
            fb.setDateCreated(new Date(System.currentTimeMillis()));
            boolean write = true;
            ArchivedDatasetFileDAO asdfDAO = new ArchivedDatasetFileDAO(ds);
            // eliminating all checks so that we create multiple files, tbh 6-7
            if (write) {
                fbFinal = (ArchivedDatasetFileBean) asdfDAO.create(fb);
            } else {
                logger.info("duplicate found: " + fb.getName());
            }
        }
    // created in database!
    } catch (Exception e) {
        logger.error("-- exception thrown at createFile: " + e.getMessage());
        e.printStackTrace();
    }
    return fbFinal.getId();
}
Also used : ArchivedDatasetFileDAO(org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO) FileWriter(java.io.FileWriter) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) Date(java.util.Date) IOException(java.io.IOException) BufferedWriter(java.io.BufferedWriter) ZipOutputStream(java.util.zip.ZipOutputStream) FileOutputStream(java.io.FileOutputStream) File(java.io.File) ArchivedDatasetFileBean(org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean)

Aggregations

ArchivedDatasetFileDAO (org.akaza.openclinica.dao.extract.ArchivedDatasetFileDAO)10 ArchivedDatasetFileBean (org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean)9 File (java.io.File)5 Date (java.util.Date)5 BufferedWriter (java.io.BufferedWriter)4 FileWriter (java.io.FileWriter)4 IOException (java.io.IOException)4 DatasetDAO (org.akaza.openclinica.dao.extract.DatasetDAO)4 DatasetBean (org.akaza.openclinica.bean.extract.DatasetBean)3 FormProcessor (org.akaza.openclinica.control.form.FormProcessor)3 StudyDAO (org.akaza.openclinica.dao.managestudy.StudyDAO)3 Page (org.akaza.openclinica.view.Page)3 FileInputStream (java.io.FileInputStream)2 FileOutputStream (java.io.FileOutputStream)2 ArrayList (java.util.ArrayList)2 ZipOutputStream (java.util.zip.ZipOutputStream)2 StudyBean (org.akaza.openclinica.bean.managestudy.StudyBean)2 GenerateExtractFileService (org.akaza.openclinica.service.extract.GenerateExtractFileService)2 EntityBeanTable (org.akaza.openclinica.web.bean.EntityBeanTable)2 SchedulerException (org.quartz.SchedulerException)2