use of org.akaza.openclinica.bean.admin.TriggerBean in project OpenClinica by OpenClinica.
the class XsltTransformJob method executeInternal.
@Override
protected void executeInternal(JobExecutionContext context) {
logger.info("Job " + context.getJobDetail().getDescription() + " started.");
initDependencies(context.getScheduler());
// need to generate a Locale for emailing users with i18n
// TODO make dynamic?
Locale locale = new Locale("en-US");
ResourceBundleProvider.updateLocale(locale);
ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle();
List<File> markForDelete = new LinkedList<File>();
Boolean zipped = true;
Boolean deleteOld = true;
Boolean exceptions = false;
JobDataMap dataMap = context.getMergedJobDataMap();
String localeStr = dataMap.getString(LOCALE);
String[] doNotDeleteUntilExtract = new String[4];
int cnt = dataMap.getInt("count");
DatasetBean datasetBean = null;
if (localeStr != null) {
locale = new Locale(localeStr);
ResourceBundleProvider.updateLocale(locale);
pageMessages = ResourceBundleProvider.getPageMessagesBundle();
}
// get the file information from the job
String alertEmail = dataMap.getString(EMAIL);
java.io.InputStream in = null;
FileOutputStream endFileStream = null;
UserAccountBean userBean = null;
try {
// init all fields from the data map
int userAccountId = dataMap.getInt(USER_ID);
int studyId = dataMap.getInt(STUDY_ID);
String outputPath = dataMap.getString(POST_FILE_PATH);
// get all user info, generate xml
logger.debug("found output path: " + outputPath);
String generalFileDir = dataMap.getString(XML_FILE_PATH);
int dsId = dataMap.getInt(DATASET_ID);
// JN: Change from earlier versions, cannot get static reference as
// static references don't work. Reason being for example there could be
// datasetId as a variable which is different for each dataset and
// that needs to be loaded dynamically
ExtractPropertyBean epBean = (ExtractPropertyBean) dataMap.get(EP_BEAN);
File doNotDelDir = new File(generalFileDir);
if (doNotDelDir.isDirectory()) {
doNotDeleteUntilExtract = doNotDelDir.list();
}
zipped = epBean.getZipFormat();
deleteOld = epBean.getDeleteOld();
long sysTimeBegin = System.currentTimeMillis();
userBean = (UserAccountBean) userAccountDao.findByPK(userAccountId);
StudyBean currentStudy = (StudyBean) studyDao.findByPK(studyId);
StudyBean parentStudy = (StudyBean) studyDao.findByPK(currentStudy.getParentStudyId());
String successMsg = epBean.getSuccessMessage();
String failureMsg = epBean.getFailureMessage();
final long start = System.currentTimeMillis();
datasetBean = (DatasetBean) datasetDao.findByPK(dsId);
ExtractBean eb = generateFileService.generateExtractBean(datasetBean, currentStudy, parentStudy);
// generate file directory for file service
datasetBean.setName(datasetBean.getName().replaceAll(" ", "_"));
logger.debug("--> job starting: ");
HashMap<String, Integer> answerMap = odmFileCreation.createODMFile(epBean.getFormat(), sysTimeBegin, generalFileDir, datasetBean, currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", (Boolean) dataMap.get(ZIPPED), false, (Boolean) dataMap.get(DELETE_OLD), epBean.getOdmType(), userBean);
// won't save a record of the XML to db
// won't be a zipped file, so that we can submit it for
// transformation
// this will have to be toggled by the export data format? no, the
// export file will have to be zipped/not zipped
String ODMXMLFileName = "";
int fId = 0;
Iterator<Entry<String, Integer>> it = answerMap.entrySet().iterator();
while (it.hasNext()) {
JobTerminationMonitor.check();
Entry<String, Integer> entry = it.next();
String key = entry.getKey();
Integer value = entry.getValue();
// JN: Since there is a logic to
ODMXMLFileName = key;
// delete all the intermittent
// files, this file could be a zip
// file.
Integer fileID = value;
fId = fileID.intValue();
logger.debug("found " + fId + " and " + ODMXMLFileName);
}
logger.info("Finished ODM generation of job " + context.getJobDetail().getDescription());
// create dirs
File output = new File(outputPath);
if (!output.isDirectory()) {
output.mkdirs();
}
int numXLS = epBean.getFileName().length;
int fileCntr = 0;
String xmlFilePath = new File(generalFileDir + ODMXMLFileName).toURI().toURL().toExternalForm();
String endFile = null;
File oldFilesPath = new File(generalFileDir);
while (fileCntr < numXLS) {
JobTerminationMonitor.check();
String xsltPath = dataMap.getString(XSLT_PATH) + File.separator + epBean.getFileName()[fileCntr];
in = new java.io.FileInputStream(xsltPath);
Transformer transformer = transformerFactory.newTransformer(new StreamSource(in));
endFile = outputPath + File.separator + epBean.getExportFileName()[fileCntr];
endFileStream = new FileOutputStream(endFile);
transformer.transform(new StreamSource(xmlFilePath), new StreamResult(endFileStream));
// JN...CLOSE THE STREAM...HMMMM
in.close();
endFileStream.close();
fileCntr++;
JobTerminationMonitor.check();
}
if (oldFilesPath.isDirectory()) {
markForDelete = Arrays.asList(oldFilesPath.listFiles());
// logic to prevent deleting the file being created.
}
final double done = setFormat(new Double(System.currentTimeMillis() - start) / 1000);
logger.info("--> job completed in " + done + " ms");
// run post processing
ProcessingFunction function = epBean.getPostProcessing();
String subject = "";
String jobName = dataMap.getString(XsltTriggerService.JOB_NAME);
StringBuffer emailBuffer = new StringBuffer("");
emailBuffer.append("<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>");
emailBuffer.append("<P>Dataset: " + datasetBean.getName() + "</P>");
emailBuffer.append("<P>Study: " + currentStudy.getName() + "</P>");
if (function != null && function.getClass().equals(org.akaza.openclinica.bean.service.SqlProcessingFunction.class)) {
String dbUrl = ((org.akaza.openclinica.bean.service.SqlProcessingFunction) function).getDatabaseUrl();
int lastIndex = dbUrl.lastIndexOf('/');
String schemaName = dbUrl.substring(lastIndex);
int HostIndex = dbUrl.substring(0, lastIndex).indexOf("//");
String Host = dbUrl.substring(HostIndex, lastIndex);
emailBuffer.append("<P>Database: " + ((org.akaza.openclinica.bean.service.SqlProcessingFunction) function).getDatabaseType() + "</P>");
emailBuffer.append("<P>Schema: " + schemaName.replace("/", "") + "</P>");
emailBuffer.append("<P>Host: " + Host.replace("//", "") + "</P>");
}
emailBuffer.append("<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2_2") + "</p>");
if (function != null) {
function.setTransformFileName(outputPath + File.separator + dataMap.getString(POST_FILE_NAME));
function.setODMXMLFileName(endFile);
function.setXslFileName(dataMap.getString(XSL_FILE_PATH));
function.setDeleteOld((Boolean) dataMap.get(POST_PROC_DELETE_OLD));
function.setZip((Boolean) dataMap.get(POST_PROC_ZIP));
function.setLocation(dataMap.getString(POST_PROC_LOCATION));
function.setExportFileName(dataMap.getString(POST_PROC_EXPORT_NAME));
File[] oldFiles = getOldFiles(outputPath, dataMap.getString(POST_PROC_LOCATION));
function.setOldFiles(oldFiles);
File[] intermediateFiles = getInterFiles(dataMap.getString(POST_FILE_PATH));
ProcessingResultType message = function.run();
// Delete these files only in case when there is no failure
if (message.getCode().intValue() != 2) {
deleteOldFiles(intermediateFiles);
}
final long done2 = System.currentTimeMillis() - start;
logger.info("--> postprocessing completed in " + done2 + " ms, found result type " + message.getCode());
logger.info("--> postprocessing completed in " + done2 + " ms, found result type " + message.getCode());
if (!function.getClass().equals(org.akaza.openclinica.bean.service.SqlProcessingFunction.class)) {
String archivedFile = dataMap.getString(POST_FILE_NAME) + "." + function.getFileType();
// download the zip file
if (function.isZip()) {
archivedFile = archivedFile + ".zip";
}
// post processing as well.
if (function.getClass().equals(org.akaza.openclinica.bean.service.PdfProcessingFunction.class)) {
archivedFile = function.getArchivedFileName();
}
ArchivedDatasetFileBean fbFinal = generateFileRecord(archivedFile, outputPath, datasetBean, done, new File(outputPath + File.separator + archivedFile).length(), ExportFormatBean.PDFFILE, userAccountId);
if (successMsg.contains("$linkURL")) {
successMsg = successMsg.replace("$linkURL", "<a href=\"" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + "\">" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + " </a>");
}
emailBuffer.append("<p>" + successMsg + "</p>");
logger.debug("System time begining.." + sysTimeBegin);
logger.debug("System time end.." + System.currentTimeMillis());
double sysTimeEnd = setFormat((System.currentTimeMillis() - sysTimeBegin) / 1000);
logger.debug("difference" + sysTimeEnd);
if (fbFinal != null) {
fbFinal.setFileSize((int) bytesToKilo(new File(archivedFile).length()));
fbFinal.setRunTime(sysTimeEnd);
}
}
// otherwise don't do it
if (message.getCode().intValue() == 1) {
if (jobName != null) {
subject = "Success: " + jobName;
} else {
subject = "Success: " + datasetBean.getName();
}
} else if (message.getCode().intValue() == 2) {
if (jobName != null) {
subject = "Failure: " + jobName;
} else {
subject = "Failure: " + datasetBean.getName();
}
if (failureMsg != null && !failureMsg.isEmpty()) {
emailBuffer.append(failureMsg);
}
emailBuffer.append("<P>").append(message.getDescription());
postErrorMessage(message.getDescription(), context);
} else if (message.getCode().intValue() == 3) {
if (jobName != null) {
subject = "Update: " + jobName;
} else {
subject = "Update: " + datasetBean.getName();
}
}
} else {
// extract ran but no post-processing - we send an email with
// success and url to link to
// generate archived dataset file bean here, and use the id to
// build the URL
String archivedFilename = dataMap.getString(POST_FILE_NAME);
// the zip file
if (zipped) {
archivedFilename = dataMap.getString(POST_FILE_NAME) + ".zip";
}
// delete old files now
List<File> intermediateFiles = generateFileService.getOldFiles();
String[] dontDelFiles = epBean.getDoNotDelFiles();
//JN: The following is the code for zipping up the files, in case of more than one xsl being provided.
if (dontDelFiles.length > 1 && zipped) {
logger.debug("count =====" + cnt + "dontDelFiles length==---" + dontDelFiles.length);
logger.debug("Entering this?" + cnt + "dontDelFiles" + dontDelFiles);
String path = outputPath + File.separator;
logger.debug("path = " + path);
logger.debug("zipName?? = " + epBean.getZipName());
String zipName = epBean.getZipName() == null || epBean.getZipName().isEmpty() ? endFile + ".zip" : path + epBean.getZipName() + ".zip";
archivedFilename = new File(zipName).getName();
zipAll(path, epBean.getDoNotDelFiles(), zipName);
String[] tempArray = { archivedFilename };
dontDelFiles = tempArray;
endFile = archivedFilename;
} else if (zipped) {
markForDelete = zipxmls(markForDelete, endFile);
endFile = endFile + ".zip";
String[] temp = new String[dontDelFiles.length];
int i = 0;
while (i < dontDelFiles.length) {
temp[i] = dontDelFiles[i] + ".zip";
i++;
}
dontDelFiles = temp;
// Actually deleting all the xml files which are produced
// since its zipped
FilenameFilter xmlFilter = new XMLFileFilter();
File tempFile = new File(generalFileDir);
deleteOldFiles(tempFile.listFiles(xmlFilter));
}
ArchivedDatasetFileBean fbFinal = generateFileRecord(archivedFilename, outputPath, datasetBean, done, new File(outputPath + File.separator + archivedFilename).length(), ExportFormatBean.TXTFILE, userAccountId);
if (jobName != null) {
subject = "Job Ran: " + jobName;
} else {
subject = "Job Ran: " + datasetBean.getName();
}
if (successMsg == null || successMsg.isEmpty()) {
logger.info("email buffer??" + emailBuffer);
} else {
if (successMsg.contains("$linkURL")) {
successMsg = successMsg.replace("$linkURL", "<a href=\"" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + "\">" + CoreResources.getField("sysURL.base") + "AccessFile?fileId=" + fbFinal.getId() + " </a>");
}
emailBuffer.append("<p>" + successMsg + "</p>");
}
if (deleteOld) {
deleteIntermFiles(intermediateFiles, endFile, dontDelFiles);
deleteIntermFiles(markForDelete, endFile, dontDelFiles);
}
}
// email the message to the user
emailBuffer.append("<p>" + pageMessages.getString("html_email_body_5") + "</p>");
try {
// @pgawade 19-April-2011 Log the event into audit_event table
if (null != dataMap.get("job_type") && ((String) dataMap.get("job_type")).equalsIgnoreCase("exportJob")) {
String extractName = (String) dataMap.get(XsltTriggerService.JOB_NAME);
TriggerBean triggerBean = new TriggerBean();
triggerBean.setDataset(datasetBean);
triggerBean.setUserAccount(userBean);
triggerBean.setFullName(extractName);
String actionMsg = "You may access the " + (String) dataMap.get(XsltTriggerService.EXPORT_FORMAT) + " file by changing your study/site to " + currentStudy.getName() + " and selecting the Export Data icon for " + datasetBean.getName() + " dataset on the View Datasets page.";
auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, actionMsg);
}
mailSender.sendEmail(alertEmail, EmailEngine.getAdminEmail(), subject, emailBuffer.toString(), true);
} catch (OpenClinicaSystemException ose) {
// Do Nothing, In the future we might want to have an email
// status added to system.
logger.info("exception sending mail: " + ose.getMessage());
logger.error("exception sending mail: " + ose.getMessage());
}
logger.info("just sent email to " + alertEmail + ", from " + EmailEngine.getAdminEmail());
if (successMsg == null) {
successMsg = " ";
}
postSuccessMessage(successMsg, context);
} catch (JobInterruptedException e) {
logger.info("Job was cancelled by the user");
exceptions = true;
} catch (TransformerConfigurationException e) {
sendErrorEmail(e.getMessage(), context, alertEmail);
postErrorMessage(e.getMessage(), context);
logger.error("Error executing extract", e);
exceptions = true;
} catch (FileNotFoundException e) {
sendErrorEmail(e.getMessage(), context, alertEmail);
postErrorMessage(e.getMessage(), context);
logger.error("Error executing extract", e);
exceptions = true;
} catch (TransformerFactoryConfigurationError e) {
sendErrorEmail(e.getMessage(), context, alertEmail);
postErrorMessage(e.getMessage(), context);
logger.error("Error executing extract", e);
exceptions = true;
} catch (TransformerException e) {
sendErrorEmail(e.getMessage(), context, alertEmail);
postErrorMessage(e.getMessage(), context);
logger.error("Error executing extract", e);
exceptions = true;
} catch (Exception ee) {
sendErrorEmail(ee.getMessage(), context, alertEmail);
postErrorMessage(ee.getMessage(), context);
logger.error("Error executing extract", ee);
exceptions = true;
if (null != dataMap.get("job_type") && ((String) dataMap.get("job_type")).equalsIgnoreCase("exportJob")) {
TriggerBean triggerBean = new TriggerBean();
triggerBean.setUserAccount(userBean);
triggerBean.setFullName((String) dataMap.get(XsltTriggerService.JOB_NAME));
auditEventDAO.createRowForExtractDataJobFailure(triggerBean);
}
} finally {
if (in != null)
try {
in.close();
} catch (IOException e) {
logger.error("Error executing extract", e);
}
if (endFileStream != null)
try {
endFileStream.close();
} catch (IOException e) {
logger.error("Error executing extract", e);
}
if (exceptions) {
logger.debug("EXCEPTIONS... EVEN TEHN DELETING OFF OLD FILES");
String generalFileDir = dataMap.getString(XML_FILE_PATH);
File oldFilesPath = new File(generalFileDir);
if (oldFilesPath.isDirectory()) {
markForDelete = Arrays.asList(oldFilesPath.listFiles());
}
logger.debug("deleting the old files reference from archive dataset");
if (deleteOld) {
deleteIntermFiles(markForDelete, "", doNotDeleteUntilExtract);
}
}
if (datasetBean != null)
resetArchiveDataset(datasetBean.getId());
logger.info("Job " + context.getJobDetail().getDescription() + " finished.");
}
}
use of org.akaza.openclinica.bean.admin.TriggerBean in project OpenClinica by OpenClinica.
the class ViewImportJobServlet method processRequest.
@Override
protected void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
// First we must get a reference to a scheduler
scheduler = getScheduler();
// then we pull all the triggers that are specifically named
// IMPORT_TRIGGER.
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.groupEquals(IMPORT_TRIGGER));
// the next bit goes out and processes all the triggers
ArrayList triggerBeans = new ArrayList<TriggerBean>();
for (TriggerKey triggerKey : triggerKeys) {
String triggerName = triggerKey.getName();
Trigger trigger = scheduler.getTrigger(triggerKey);
logger.debug("found trigger, full name: " + triggerName);
try {
logger.debug("prev fire time " + trigger.getPreviousFireTime().toString());
logger.debug("next fire time " + trigger.getNextFireTime().toString());
logger.debug("final fire time: " + trigger.getFinalFireTime().toString());
} catch (NullPointerException npe) {
// could be nulls in the dates, etc
}
TriggerBean triggerBean = new TriggerBean();
triggerBean.setFullName(triggerName);
triggerBean.setPreviousDate(trigger.getPreviousFireTime());
triggerBean.setNextDate(trigger.getNextFireTime());
if (trigger.getDescription() != null) {
triggerBean.setDescription(trigger.getDescription());
}
// this next bit of code looks at the job data map and pulls out
// specific items
JobDataMap dataMap = new JobDataMap();
if (trigger.getJobDataMap().size() > 0) {
dataMap = trigger.getJobDataMap();
triggerBean.setStudyName(dataMap.getString(ExampleSpringJob.STUDY_NAME));
String oid = dataMap.getString("study_oid");
}
// this next bit of code looks to see if the trigger is paused
logger.debug("Trigger Priority: " + triggerName + " " + trigger.getPriority());
if (scheduler.getTriggerState(new TriggerKey(triggerName, IMPORT_TRIGGER)) == Trigger.TriggerState.PAUSED) {
triggerBean.setActive(false);
logger.debug("setting active to false for trigger: " + triggerName);
} else {
triggerBean.setActive(true);
logger.debug("setting active to TRUE for trigger: " + triggerName);
}
triggerBeans.add(triggerBean);
// our wrapper to show triggers
}
// set up the table here and get ready to send to the web page
ArrayList allRows = TriggerRow.generateRowsFromBeans(triggerBeans);
EntityBeanTable table = fp.getEntityBeanTable();
String[] columns = { resword.getString("name"), resword.getString("previous_fire_time"), resword.getString("next_fire_time"), resword.getString("description"), resword.getString("study"), resword.getString("actions") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(3);
table.hideColumnLink(5);
table.setQuery("ViewImportJob", new HashMap());
// table.addLink("", "CreateUserAccount");
table.setSortingColumnInd(0);
table.setRows(allRows);
table.computeDisplay();
request.setAttribute("table", table);
forwardPage(Page.VIEW_IMPORT_JOB);
}
use of org.akaza.openclinica.bean.admin.TriggerBean in project OpenClinica by OpenClinica.
the class ExampleSpringJob method executeInternal.
@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
// need to generate a Locale so that user beans and other things will
// generate normally
Locale locale = new Locale("en-US");
ResourceBundleProvider.updateLocale(locale);
ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle();
// logger.debug("--");
// logger.debug("-- executing a job " + message + " at " + new
// java.util.Date().toString());
JobDataMap dataMap = context.getMergedJobDataMap();
SimpleTrigger trigger = (SimpleTrigger) context.getTrigger();
try {
ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext");
String studySubjectNumber = ((CoreResources) appContext.getBean("coreResources")).getField("extract.number");
coreResources = (CoreResources) appContext.getBean("coreResources");
ruleSetRuleDao = (RuleSetRuleDao) appContext.getBean("ruleSetRuleDao");
dataSource = (DataSource) appContext.getBean("dataSource");
mailSender = (OpenClinicaMailSender) appContext.getBean("openClinicaMailSender");
AuditEventDAO auditEventDAO = new AuditEventDAO(dataSource);
// Scheduler scheduler = context.getScheduler();
// JobDetail detail = context.getJobDetail();
// jobDetailBean = (JobDetailBean) detail;
/*
* data map here should coincide with the job data map found in
* CreateJobExportServlet, with the following code: jobDataMap = new
* JobDataMap(); jobDataMap.put(DATASET_ID, datasetId);
* jobDataMap.put(PERIOD, period); jobDataMap.put(EMAIL, email);
* jobDataMap.put(TAB, tab); jobDataMap.put(CDISC, cdisc);
* jobDataMap.put(SPSS, spss);
*/
String alertEmail = dataMap.getString(EMAIL);
String localeStr = dataMap.getString(LOCALE);
if (localeStr != null) {
locale = new Locale(localeStr);
ResourceBundleProvider.updateLocale(locale);
pageMessages = ResourceBundleProvider.getPageMessagesBundle();
}
int dsId = dataMap.getInt(DATASET_ID);
String tab = dataMap.getString(TAB);
String cdisc = dataMap.getString(CDISC);
String cdisc12 = dataMap.getString(CDISC12);
if (cdisc12 == null) {
cdisc12 = "0";
}
String cdisc13 = dataMap.getString(CDISC13);
if (cdisc13 == null) {
cdisc13 = "0";
}
String cdisc13oc = dataMap.getString(CDISC13OC);
if (cdisc13oc == null) {
cdisc13oc = "0";
}
String spss = dataMap.getString(SPSS);
int userId = dataMap.getInt(USER_ID);
int studyId = dataMap.getInt(STUDY_ID);
// String datasetId = dataMap.getString(DATASET_ID);
// int dsId = new Integer(datasetId).intValue();
// String userAcctId = dataMap.getString(USER_ID);
// int userId = new Integer(userAcctId).intValue();
// why the flip-flop? if one property is set to 'true' we can
// see jobs in another screen but all properties have to be
// strings
logger.debug("-- found the job: " + dsId + " dataset id");
// for (Iterator it = dataMap.entrySet().iterator(); it.hasNext();)
// {
// java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
// Object key = entry.getKey();
// Object value = entry.getValue();
// // logger.debug("-- found datamap property: " + key.toString() +
// // " : " + value.toString());
// }
HashMap fileName = new HashMap<String, Integer>();
if (dsId > 0) {
// trying to not throw an error if there's no dataset id
DatasetDAO dsdao = new DatasetDAO(dataSource);
DatasetBean datasetBean = (DatasetBean) dsdao.findByPK(dsId);
StudyDAO studyDao = new StudyDAO(dataSource);
UserAccountDAO userAccountDAO = new UserAccountDAO(dataSource);
// hmm, three lines in the if block DRY?
String generalFileDir = "";
String generalFileDirCopy = "";
String exportFilePath = SQLInitServlet.getField("exportFilePath");
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
generalFileDir = DATASET_DIR + datasetBean.getId() + File.separator + sdfDir.format(new java.util.Date());
if (!"".equals(exportFilePath)) {
generalFileDirCopy = SQLInitServlet.getField("filePath") + exportFilePath + File.separator;
}
// logger.debug("-- created the following dir: " +
// generalFileDir);
long sysTimeBegin = System.currentTimeMillis();
// set up the user bean here, tbh
// logger.debug("-- gen tab file 00");
userBean = (UserAccountBean) userAccountDAO.findByPK(userId);
// needs to also be captured by the servlet, tbh
// logger.debug("-- gen tab file 00");
generateFileService = new GenerateExtractFileService(dataSource, coreResources, ruleSetRuleDao);
// logger.debug("-- gen tab file 00");
// tbh #5796 - covers a bug when the user changes studies, 10/2010
StudyBean activeStudy = (StudyBean) studyDao.findByPK(studyId);
StudyBean parentStudy = new StudyBean();
logger.debug("active study: " + studyId + " parent study: " + activeStudy.getParentStudyId());
if (activeStudy.getParentStudyId() > 0) {
// StudyDAO sdao = new StudyDAO(sm.getDataSource());
parentStudy = (StudyBean) studyDao.findByPK(activeStudy.getParentStudyId());
} else {
parentStudy = activeStudy;
// covers a bug in tab file creation, tbh 01/2009
}
logger.debug("-- found extract bean ");
ExtractBean eb = generateFileService.generateExtractBean(datasetBean, activeStudy, parentStudy);
MessageFormat mf = new MessageFormat("");
StringBuffer message = new StringBuffer();
StringBuffer auditMessage = new StringBuffer();
// use resource bundle page messages to generate the email, tbh
// 02/2009
// message.append(pageMessages.getString("html_email_header_1")
// + " " + alertEmail +
// pageMessages.getString("html_email_header_2") + "<br/>");
message.append("<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>");
message.append("<P>Dataset: " + datasetBean.getName() + "</P>");
message.append("<P>Study: " + activeStudy.getName() + "</P>");
message.append("<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2") + SQLInitServlet.getField("sysURL") + pageMessages.getString("html_email_body_3") + "</p>");
// logger.debug("-- gen tab file 00");
if ("1".equals(tab)) {
logger.debug("-- gen tab file 01");
fileName = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, datasetBean, activeStudy.getId(), parentStudy.getId(), generalFileDirCopy, userBean);
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_tab_delimited"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your tab-delimited file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_tab_delimited") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc)) {
String odmVersion = "oc1.2";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_12"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.2 w/OpenClinica Extension XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName)
// + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_12") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc12)) {
String odmVersion = "1.2";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.2 default");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_12_xml"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// // auditMessage.append(
// "You can access your ODM 1.2 XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_12_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc13)) {
String odmVersion = "1.3";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.3");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_13"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.3 XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_13") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc13oc)) {
String odmVersion = "oc1.3";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.3 oc");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_13_xml"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.3 w/OpenClinica Extension XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName)
// + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_13_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(spss)) {
SPSSReportBean answer = new SPSSReportBean();
fileName = generateFileService.createSPSSFile(datasetBean, eb, activeStudy, parentStudy, sysTimeBegin, generalFileDir, answer, generalFileDirCopy, userBean);
logger.debug("-- gen spss file");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_spss"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your SPSS files <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_spss") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
// wrap up the message, and send the email
message.append("<p>" + pageMessages.getString("html_email_body_5") + "</P><P>" + pageMessages.getString("email_footer"));
try {
mailSender.sendEmail(alertEmail.trim(), pageMessages.getString("job_ran_for") + " " + datasetBean.getName(), message.toString(), true);
} catch (OpenClinicaSystemException ose) {
// Do Nothing, In the future we might want to have an email
// status added to system.
}
TriggerBean triggerBean = new TriggerBean();
triggerBean.setDataset(datasetBean);
triggerBean.setUserAccount(userBean);
triggerBean.setFullName(trigger.getKey().getName());
auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, auditMessage.toString());
} else {
TriggerBean triggerBean = new TriggerBean();
// triggerBean.setDataset(datasetBean);
triggerBean.setUserAccount(userBean);
triggerBean.setFullName(trigger.getKey().getName());
auditEventDAO.createRowForExtractDataJobFailure(triggerBean);
// logger.debug("-- made it here for some reason, ds id: "
// + dsId);
}
// logger.debug("-- generated file: " + fileNameStr);
// dataSource.
} catch (Exception e) {
// TODO Auto-generated catch block -- ideally should generate a fail
// msg here, tbh 02/2009
logger.debug("-- found exception: " + e.getMessage());
e.printStackTrace();
}
}
use of org.akaza.openclinica.bean.admin.TriggerBean in project OpenClinica by OpenClinica.
the class ImportSpringJob method executeInternalInTransaction.
protected void executeInternalInTransaction(JobExecutionContext context) {
locale = new Locale("en-US");
ResourceBundleProvider.updateLocale(locale);
respage = ResourceBundleProvider.getPageMessagesBundle();
resword = ResourceBundleProvider.getWordsBundle();
triggerService = new TriggerService();
JobDataMap dataMap = context.getMergedJobDataMap();
SimpleTrigger trigger = (SimpleTrigger) context.getTrigger();
TriggerBean triggerBean = new TriggerBean();
triggerBean.setFullName(trigger.getKey().getName());
String contactEmail = dataMap.getString(EMAIL);
logger.debug("=== starting to run trigger " + trigger.getKey().getName() + " ===");
try {
ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext");
dataSource = (DataSource) appContext.getBean("dataSource");
mailSender = (OpenClinicaMailSender) appContext.getBean("openClinicaMailSender");
RuleSetServiceInterface ruleSetService = (RuleSetServiceInterface) appContext.getBean("ruleSetService");
itemDataDao = new ItemDataDAO(dataSource);
eventCrfDao = new EventCRFDAO(dataSource);
auditEventDAO = new AuditEventDAO(dataSource);
int userId = dataMap.getInt(USER_ID);
UserAccountDAO userAccountDAO = new UserAccountDAO(dataSource);
UserAccountBean ub = (UserAccountBean) userAccountDAO.findByPK(userId);
triggerBean.setUserAccount(ub);
String directory = dataMap.getString(DIRECTORY);
String studyName = dataMap.getString(STUDY_NAME);
String studyOid = dataMap.getString(STUDY_OID);
String localeStr = dataMap.getString(ExampleSpringJob.LOCALE);
if (localeStr != null) {
locale = new Locale(localeStr);
ResourceBundleProvider.updateLocale(locale);
respage = ResourceBundleProvider.getPageMessagesBundle();
resword = ResourceBundleProvider.getWordsBundle();
}
StudyDAO studyDAO = new StudyDAO(dataSource);
StudyBean studyBean;
if (studyOid != null) {
studyBean = studyDAO.findByOid(studyOid);
} else {
studyBean = (StudyBean) studyDAO.findByName(studyName);
}
// might also need study id here for the data service?
File fileDirectory = new File(SQLInitServlet.getField("filePath") + DIR_PATH + File.separator);
// File fileDirectory = new File(IMPORT_DIR);
if ("".equals(directory)) {
// avoid NPEs
// do nothing here?
} else {
// there is a separator at the end of IMPORT_DIR already...
// fileDirectory = new File(IMPORT_DIR + directory +
// File.separator);
fileDirectory = new File(SQLInitServlet.getField("filePath") + DIR_PATH + File.separator + directory + File.separator);
}
if (!fileDirectory.isDirectory()) {
fileDirectory.mkdirs();
}
// this is necessary the first time this is run, tbh
// File destDirectory = new File(IMPORT_DIR_2);
File destDirectory = new File(SQLInitServlet.getField("filePath") + DEST_DIR + File.separator);
if (!destDirectory.isDirectory()) {
destDirectory.mkdirs();
}
// look at directory, if there are new files, move them over and
// read them
// File fileDirectory = new File(directory);
String[] files = fileDirectory.list();
logger.debug("found " + files.length + " files under directory " + SQLInitServlet.getField("filePath") + DIR_PATH + File.separator + directory);
File[] target = new File[files.length];
File[] destination = new File[files.length];
for (int i = 0; i < files.length; i++) {
// hmm
if (!new File(fileDirectory + File.separator + files[i]).isDirectory()) {
File f = new File(fileDirectory + File.separator + files[i]);
if (f == null || f.getName() == null) {
logger.debug("found a null file");
} else if (f.getName().indexOf(".xml") < 0 && f.getName().indexOf(".XML") < 0) {
logger.debug("does not seem to be an xml file");
// we need a place holder to avoid 'gaps' in the file
// list
} else {
logger.debug("adding: " + f.getName());
// new File(IMPORT_DIR +
target[i] = f;
// directory +
// File.separator + files[i]);
// destination[i] = new File(IMPORT_DIR_2 + files[i]);
destination[i] = new File(SQLInitServlet.getField("filePath") + DEST_DIR + File.separator + files[i]);
}
}
}
if (target.length > 0 && destination.length > 0) {
cutAndPaste(target, destination);
// @pgawade 28-June-2012: Fix for issue #13964 - Remove the null
// elements from destination array of files
// which might be created because of presense of sub-directories
// or non-xml files under scheduled_data_import directory
// which are non-usable files for import.
destination = removeNullElements(destination);
// do everything else here with 'destination'
ArrayList<String> auditMessages = processData(destination, dataSource, respage, resword, ub, studyBean, destDirectory, triggerBean, ruleSetService);
auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, auditMessages.get(1));
try {
if (contactEmail != null && !"".equals(contactEmail)) {
mailSender.sendEmail(contactEmail, respage.getString("job_ran_for") + " " + triggerBean.getFullName(), generateMsg(auditMessages.get(0), contactEmail), true);
logger.debug("email body: " + auditMessages.get(1));
}
} catch (OpenClinicaSystemException e) {
// Do nothing
logger.error("=== throw an ocse === " + e.getMessage());
e.printStackTrace();
}
} else {
logger.debug("no real files found");
auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, respage.getString("job_ran_but_no_files"));
// no email here, tbh
}
// use the business logic to go through each one and import that
// data
// check to see if they were imported before?
// using the four methods:
// importCRFDataServce.validateStudyMetadata,
// service.lookupValidationErrors, service.fetchEventCRFBeans(?),
// and
// service.generateSummaryStatsBean(for the email we send out later)
} catch (Exception e) {
// more detailed reporting here
logger.error("found a fail exception: " + e.getMessage());
e.printStackTrace();
auditEventDAO.createRowForExtractDataJobFailure(triggerBean, e.getMessage());
try {
mailSender.sendEmail(contactEmail, respage.getString("job_failure_for") + " " + triggerBean.getFullName(), e.getMessage(), true);
} catch (OpenClinicaSystemException ose) {
// Do nothing
logger.error("=== throw an ocse: " + ose.getMessage());
}
}
}
use of org.akaza.openclinica.bean.admin.TriggerBean in project OpenClinica by OpenClinica.
the class TriggerRow method compareColumn.
@Override
protected int compareColumn(Object row, int sortingColumn) {
if (!row.getClass().equals(TriggerRow.class)) {
return 0;
}
TriggerBean thisTrigger = (TriggerBean) bean;
TriggerBean argTrigger = (TriggerBean) ((TriggerRow) row).bean;
int answer = 0;
switch(sortingColumn) {
case COL_TRIGGER_NAME:
answer = thisTrigger.getFullName().toLowerCase().compareTo(argTrigger.getFullName().toLowerCase());
break;
case COL_LAST_FIRED_DATE:
answer = thisTrigger.getPreviousDate().compareTo(argTrigger.getPreviousDate());
break;
case COL_NEXT_FIRED_DATE:
answer = thisTrigger.getNextDate().compareTo(argTrigger.getNextDate());
break;
case COL_DESCRIPTION:
answer = thisTrigger.getDescription().compareTo(argTrigger.getDescription());
break;
case COL_PERIOD:
answer = thisTrigger.getPeriodToRun().compareTo(argTrigger.getPeriodToRun());
break;
case COL_DATASET_NAME:
answer = thisTrigger.getDatasetName().compareTo(argTrigger.getDatasetName());
break;
case COL_STUDY_NAME:
answer = thisTrigger.getStudyName().compareTo(argTrigger.getStudyName());
}
return answer;
}
Aggregations