Search in sources :

Example 6 with CoreResources

use of org.akaza.openclinica.dao.core.CoreResources in project OpenClinica by OpenClinica.

the class ExtractController method processSubmit.

/**
     * process the page from whence you came, i.e. extract a dataset
     * @param id, the id of the extract properties bean, gained from Core Resources
     * @param datasetId, the id of the dataset, found through DatasetDAO
     * @param request, http request
     * @return model map, but more importantly, creates a quartz job which runs right away and generates all output there
     */
@RequestMapping(method = RequestMethod.GET)
public ModelMap processSubmit(@RequestParam("id") String id, @RequestParam("datasetId") String datasetId, HttpServletRequest request, HttpServletResponse response) {
    if (!mayProceed(request)) {
        try {
            response.sendRedirect(request.getContextPath() + "/MainMenu?message=authentication_failed");
        } catch (Exception e) {
            e.printStackTrace();
        }
        return null;
    }
    ModelMap map = new ModelMap();
    ResourceBundleProvider.updateLocale(LocaleResolver.getLocale(request));
    // String datasetId = (String)request.getAttribute("datasetId");
    // String id = (String)request.getAttribute("id");
    logger.debug("found both id " + id + " and dataset " + datasetId);
    ExtractUtils extractUtils = new ExtractUtils();
    // get extract id
    // get dataset id
    // if id is a number and dataset id is a number ...
    datasetDao = new DatasetDAO(dataSource);
    UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
    CoreResources cr = new CoreResources();
    ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(new Integer(id).intValue(), datasetId);
    DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
    // set the job in motion
    String[] files = epBean.getFileName();
    String exportFileName;
    int fileSize = files.length;
    int cnt = 0;
    SimpleTrigger simpleTrigger = null;
    //TODO: if files and export names size is not same... throw an error
    dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
    String[] exportFiles = epBean.getExportFileName();
    String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
    SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
    int i = 0;
    String[] temp = new String[exportFiles.length];
    //JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
    while (i < exportFiles.length) {
        temp[i] = resolveVars(exportFiles[i], dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        i++;
    }
    epBean.setDoNotDelFiles(temp);
    epBean.setExportFileName(temp);
    XsltTriggerService xsltService = new XsltTriggerService();
    // TODO get a user bean somehow?
    String generalFileDir = SQLInitServlet.getField("filePath");
    generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
    exportFileName = epBean.getExportFileName()[cnt];
    // need to set the dataset path here, tbh
    logger.debug("found odm xml file path " + generalFileDir);
    // next, can already run jobs, translations, and then add a message to be notified later
    //JN all the properties need to have the variables...
    String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
    String endFilePath = epBean.getFileLocation();
    endFilePath = getEndFilePath(endFilePath, dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
    //  exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
    if (epBean.getPostProcExportName() != null) {
        //String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
        String preProcExportPathName = resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        epBean.setPostProcExportName(preProcExportPathName);
    }
    if (epBean.getPostProcLocation() != null) {
        String prePocLoc = getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, SQLInitServlet.getField("filePath"), extractUtils);
        epBean.setPostProcLocation(prePocLoc);
    }
    setAllProps(epBean, dsBean, sdfDir, extractUtils);
    // also need to add the status fields discussed w/ cc:
    // result code, user message, optional URL, archive message, log file message
    // asdf table: sort most recent at top
    logger.debug("found xslt file name " + xsltPath);
    // String xmlFilePath = generalFileDir + ODMXMLFileName;
    simpleTrigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
    generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", this.TRIGGER_GROUP_NAME);
    // System.out.println("just set locale: " + LocaleResolver.getLocale(request).getLanguage());
    cnt++;
    ApplicationContext context = null;
    try {
        context = (ApplicationContext) scheduler.getContext().get("applicationContext");
    } catch (SchedulerException e) {
        e.printStackTrace();
    }
    //WebApplicationContext context = ContextLoader.getCurrentWebApplicationContext();
    JobDetailFactoryBean jobDetailFactoryBean = context.getBean(JobDetailFactoryBean.class, simpleTrigger, this.TRIGGER_GROUP_NAME);
    try {
        Date dateStart = scheduler.scheduleJob(jobDetailFactoryBean.getObject(), simpleTrigger);
        logger.debug("== found job date: " + dateStart.toString());
    } catch (SchedulerException se) {
        se.printStackTrace();
    }
    request.setAttribute("datasetId", datasetId);
    // set the job name here in the user's session, so that we can ping the scheduler to pull it out later
    if (jobDetailFactoryBean != null)
        request.getSession().setAttribute("jobName", jobDetailFactoryBean.getObject().getKey().getName());
    if (simpleTrigger != null)
        request.getSession().setAttribute("groupName", this.TRIGGER_GROUP_NAME);
    request.getSession().setAttribute("datasetId", new Integer(dsBean.getId()));
    return map;
}
Also used : SchedulerException(org.quartz.SchedulerException) Date(java.util.Date) ModelMap(org.springframework.ui.ModelMap) CoreResources(org.akaza.openclinica.dao.core.CoreResources) DatasetBean(org.akaza.openclinica.bean.extract.DatasetBean) DatasetDAO(org.akaza.openclinica.dao.extract.DatasetDAO) SchedulerException(org.quartz.SchedulerException) Date(java.util.Date) WebApplicationContext(org.springframework.web.context.WebApplicationContext) ApplicationContext(org.springframework.context.ApplicationContext) ExtractUtils(org.akaza.openclinica.service.extract.ExtractUtils) UserAccountBean(org.akaza.openclinica.bean.login.UserAccountBean) ExtractPropertyBean(org.akaza.openclinica.bean.extract.ExtractPropertyBean) XsltTriggerService(org.akaza.openclinica.service.extract.XsltTriggerService) SimpleTrigger(org.quartz.SimpleTrigger) SimpleDateFormat(java.text.SimpleDateFormat) JobDetailFactoryBean(org.springframework.scheduling.quartz.JobDetailFactoryBean) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Example 7 with CoreResources

use of org.akaza.openclinica.dao.core.CoreResources in project OpenClinica by OpenClinica.

the class SQLInitServlet method init.

@Override
public void init() throws ServletException {
    context = getServletContext();
    CoreResources cr = (CoreResources) SpringServletAccess.getApplicationContext(context).getBean("coreResources");
    params = cr.getDATAINFO();
    entParams = cr.getDATAINFO();
    //        params = (Properties) SpringServletAccess.getApplicationContext(context).getBean("dataInfo");
    //        entParams = (Properties) SpringServletAccess.getApplicationContext(context).getBean("enterpriseInfo");
    ConfigurationDao configurationDao = SpringServletAccess.getApplicationContext(context).getBean(ConfigurationDao.class);
    Role.COORDINATOR.setDescription(getField("coordinator"));
    Role.STUDYDIRECTOR.setDescription(getField("director"));
    Role.INVESTIGATOR.setDescription(getField("investigator"));
    Role.RESEARCHASSISTANT.setDescription(getField("ra"));
    Role.RESEARCHASSISTANT2.setDescription(getField("ra2"));
    Role.MONITOR.setDescription(getField("monitor"));
    Page.INITIAL_DATA_ENTRY_NW.getFileName();
    //The crf/original/CRF Template  will be created if not exist.
    String theDir = getField("filePath");
    String dir1 = "crf" + File.separator;
    String dir2 = "original" + File.separator;
    String dirRules = "rules";
    // Creating rules directory if not exist mantis issue 6584.
    if (!(new File(theDir)).isDirectory() || !(new File(dirRules)).isDirectory()) {
        (new File(theDir + dirRules)).mkdirs();
    }
    if (!(new File(theDir)).isDirectory() || !(new File(dir1)).isDirectory() || !(new File(dir2)).isDirectory()) {
        (new File(theDir + dir1 + dir2)).mkdirs();
        copyTemplate(theDir + dir1 + dir2 + DownloadVersionSpreadSheetServlet.CRF_VERSION_TEMPLATE);
    }
    theDir = theDir + dir1 + dir2;
    File excelFile = new File(theDir + DownloadVersionSpreadSheetServlet.CRF_VERSION_TEMPLATE);
    if (!excelFile.isFile()) {
        copyTemplate(theDir);
    }
    // 'passwd_expiration_time' and 'change_passwd_required' are now defined in the database
    // Here the values in the datainfo.properites file (if any) are overridden.
    overridePropertyFromDatabase(configurationDao, "pwd.expiration.days", params, "passwd_expiration_time");
    overridePropertyFromDatabase(configurationDao, "pwd.change.required", params, "change_passwd_required");
}
Also used : ConfigurationDao(org.akaza.openclinica.dao.hibernate.ConfigurationDao) CoreResources(org.akaza.openclinica.dao.core.CoreResources) File(java.io.File)

Example 8 with CoreResources

use of org.akaza.openclinica.dao.core.CoreResources in project OpenClinica by OpenClinica.

the class OCServletContextListener method contextInitialized.

@Override
public void contextInitialized(ServletContextEvent event) {
    logger.debug("OCServletContextListener -> contextInitialized");
    CoreResources cr = (CoreResources) SpringServletAccess.getApplicationContext(event.getServletContext()).getBean("coreResources");
    // @pgawade 25-March-2011 changes for sending usage statistics from
    // OpenClinica instance
    ServletContext context = event.getServletContext();
    // Save OpenClinica version to database
    getOpenClinicaVersionDAO(context).saveOCVersionToDB(CoreResources.getField(OpenClinicaVersion));
    // Fetch the OpenClinica started event details
    Map<String, String> OCStartEventDetails = getEventDetailsOCStart(context);
    // JsonLog usage statistics event OpenClinca started
    LogUsageStatsService.logEventOCStart(OCStartEventDetails);
    // Save the OpenClinica start time into database
    getUsageStatsServiceDAO(context).saveOCStartTimeToDB();
}
Also used : CoreResources(org.akaza.openclinica.dao.core.CoreResources) ServletContext(javax.servlet.ServletContext)

Aggregations

CoreResources (org.akaza.openclinica.dao.core.CoreResources)8 SimpleDateFormat (java.text.SimpleDateFormat)4 DatasetBean (org.akaza.openclinica.bean.extract.DatasetBean)4 UserAccountBean (org.akaza.openclinica.bean.login.UserAccountBean)4 DatasetDAO (org.akaza.openclinica.dao.extract.DatasetDAO)4 ExtractPropertyBean (org.akaza.openclinica.bean.extract.ExtractPropertyBean)3 StudyDAO (org.akaza.openclinica.dao.managestudy.StudyDAO)3 ExtractUtils (org.akaza.openclinica.service.extract.ExtractUtils)3 XsltTriggerService (org.akaza.openclinica.service.extract.XsltTriggerService)3 SimpleTrigger (org.quartz.SimpleTrigger)3 JobDetailFactoryBean (org.springframework.scheduling.quartz.JobDetailFactoryBean)3 File (java.io.File)2 StudyBean (org.akaza.openclinica.bean.managestudy.StudyBean)2 FormProcessor (org.akaza.openclinica.control.form.FormProcessor)2 TriggerService (org.akaza.openclinica.web.job.TriggerService)2 SchedulerException (org.quartz.SchedulerException)2 ApplicationContext (org.springframework.context.ApplicationContext)2 FileOutputStream (java.io.FileOutputStream)1 InputStream (java.io.InputStream)1 OutputStream (java.io.OutputStream)1