use of org.akaza.openclinica.web.bean.EntityBeanTable in project OpenClinica by OpenClinica.
the class SignStudySubjectServlet method processRequest.
@Override
public void processRequest() throws Exception {
SubjectDAO sdao = new SubjectDAO(sm.getDataSource());
StudySubjectDAO subdao = new StudySubjectDAO(sm.getDataSource());
FormProcessor fp = new FormProcessor(request);
String action = fp.getString("action");
// studySubjectId
int studySubId = fp.getInt("id", true);
String module = fp.getString(MODULE);
request.setAttribute(MODULE, module);
if (studySubId == 0) {
addPageMessage(respage.getString("please_choose_a_subject_to_view"));
forwardPage(Page.LIST_STUDY_SUBJECTS);
return;
}
StudySubjectBean studySub = (StudySubjectBean) subdao.findByPK(studySubId);
if (!permitSign(studySub, sm.getDataSource())) {
addPageMessage(respage.getString("subject_event_cannot_signed"));
// forwardPage(Page.SUBMIT_DATA_SERVLET);
forwardPage(Page.LIST_STUDY_SUBJECTS_SERVLET);
// >> changed tbh, 06/2009
return;
}
if (action.equalsIgnoreCase("confirm")) {
String username = request.getParameter("j_user");
String password = request.getParameter("j_pass");
SecurityManager securityManager = ((SecurityManager) SpringServletAccess.getApplicationContext(context).getBean("securityManager"));
// String encodedUserPass =
// org.akaza.openclinica.core.SecurityManager
// .getInstance().encrytPassword(password);
UserAccountBean ub = (UserAccountBean) session.getAttribute("userBean");
if (securityManager.verifyPassword(password, getUserDetails()) && ub.getName().equals(username)) {
if (signSubjectEvents(studySub, sm.getDataSource(), ub)) {
// Making the StudySubject signed as all the events have
// become signed.
studySub.setStatus(Status.SIGNED);
studySub.setUpdater(ub);
subdao.update(studySub);
addPageMessage(respage.getString("subject_event_signed"));
// forwardPage(Page.SUBMIT_DATA_SERVLET);
forwardPage(Page.LIST_STUDY_SUBJECTS_SERVLET);
// >> changed tbh, 06/2009
return;
} else {
addPageMessage(respage.getString("errors_in_submission_see_below"));
forwardPage(Page.LIST_STUDY_SUBJECTS);
return;
}
} else {
request.setAttribute("id", new Integer(studySubId).toString());
addPageMessage(restext.getString("password_match"));
forwardPage(Page.LIST_STUDY_SUBJECTS);
return;
}
}
request.setAttribute("studySub", studySub);
int studyId = studySub.getStudyId();
int subjectId = studySub.getSubjectId();
SubjectBean subject = (SubjectBean) sdao.findByPK(subjectId);
if (currentStudy.getStudyParameterConfig().getCollectDob().equals("2")) {
Date dob = subject.getDateOfBirth();
if (dob != null) {
Calendar cal = Calendar.getInstance();
cal.setTime(dob);
int year = cal.get(Calendar.YEAR);
request.setAttribute("yearOfBirth", new Integer(year));
} else {
request.setAttribute("yearOfBirth", "");
}
}
request.setAttribute("subject", subject);
StudyDAO studydao = new StudyDAO(sm.getDataSource());
StudyBean study = (StudyBean) studydao.findByPK(studyId);
StudyParameterValueDAO spvdao = new StudyParameterValueDAO(sm.getDataSource());
study.getStudyParameterConfig().setCollectDob(spvdao.findByHandleAndStudy(studyId, "collectDob").getValue());
if (study.getParentStudyId() > 0) {
// this is a site,find parent
StudyBean parentStudy = (StudyBean) studydao.findByPK(study.getParentStudyId());
request.setAttribute("parentStudy", parentStudy);
} else {
request.setAttribute("parentStudy", new StudyBean());
}
ArrayList children = (ArrayList) sdao.findAllChildrenByPK(subjectId);
request.setAttribute("children", children);
// find study events
StudyEventDAO sedao = new StudyEventDAO(sm.getDataSource());
StudyEventDefinitionDAO seddao = new StudyEventDefinitionDAO(sm.getDataSource());
EventDefinitionCRFDAO edcdao = new EventDefinitionCRFDAO(sm.getDataSource());
// find all eventcrfs for each event
EventCRFDAO ecdao = new EventCRFDAO(sm.getDataSource());
ArrayList<DisplayStudyEventBean> displayEvents = getDisplayStudyEventsForStudySubject(study, studySub, sm.getDataSource(), ub, currentRole);
DiscrepancyNoteUtil discNoteUtil = new DiscrepancyNoteUtil();
// Don't filter for now; disc note beans are returned with eventCRFId
// set
discNoteUtil.injectParentDiscNotesIntoDisplayStudyEvents(displayEvents, new HashSet(), sm.getDataSource(), 0);
// All the displaystudyevents for one subject
request.setAttribute("displayStudyEvents", displayEvents);
// Set up a Map for the JSP view, mapping the eventCRFId to another Map:
// the
// inner Map maps the resolution status name to the number of notes for
// that
// eventCRF id, as in New --> 2
Map discNoteByEventCRFid = discNoteUtil.createDiscNoteMapByEventCRF(displayEvents);
request.setAttribute("discNoteByEventCRFid", discNoteByEventCRFid);
EntityBeanTable table = fp.getEntityBeanTable();
// sort by start date,
table.setSortingIfNotExplicitlySet(1, false);
// desc
ArrayList allEventRows = DisplayStudyEventRow.generateRowsFromBeans(displayEvents);
String[] columns = { resword.getString("event") + " (" + resword.getString("occurrence_number") + ")", resword.getString("start_date1"), resword.getString("location"), resword.getString("status"), resword.getString("actions"), resword.getString("CRFs_atrib") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(4);
table.hideColumnLink(5);
if (!"removed".equalsIgnoreCase(studySub.getStatus().getName()) && !"auto-removed".equalsIgnoreCase(studySub.getStatus().getName())) {
table.addLink(resword.getString("add_new_event"), "CreateNewStudyEvent?" + CreateNewStudyEventServlet.INPUT_STUDY_SUBJECT_ID_FROM_VIEWSUBJECT + "=" + studySub.getId());
}
HashMap args = new HashMap();
args.put("id", new Integer(studySubId).toString());
table.setQuery("ViewStudySubject", args);
table.setRows(allEventRows);
table.computeDisplay();
request.setAttribute("table", table);
SubjectGroupMapDAO sgmdao = new SubjectGroupMapDAO(sm.getDataSource());
ArrayList groupMaps = (ArrayList) sgmdao.findAllByStudySubject(studySubId);
request.setAttribute("groups", groupMaps);
AuditEventDAO aedao = new AuditEventDAO(sm.getDataSource());
ArrayList logs = aedao.findEventStatusLogByStudySubject(studySubId);
UserAccountDAO udao = new UserAccountDAO(sm.getDataSource());
ArrayList eventLogs = new ArrayList();
for (int i = 0; i < logs.size(); i++) {
AuditEventBean avb = (AuditEventBean) logs.get(i);
StudyEventAuditBean sea = new StudyEventAuditBean();
sea.setAuditEvent(avb);
StudyEventBean se = (StudyEventBean) sedao.findByPK(avb.getEntityId());
StudyEventDefinitionBean sed = (StudyEventDefinitionBean) seddao.findByPK(se.getStudyEventDefinitionId());
sea.setDefinition(sed);
String old = avb.getOldValue().trim();
try {
if (!StringUtil.isBlank(old)) {
SubjectEventStatus oldStatus = SubjectEventStatus.get(new Integer(old).intValue());
sea.setOldSubjectEventStatus(oldStatus);
}
String newValue = avb.getNewValue().trim();
if (!StringUtil.isBlank(newValue)) {
SubjectEventStatus newStatus = SubjectEventStatus.get(new Integer(newValue).intValue());
sea.setNewSubjectEventStatus(newStatus);
}
} catch (NumberFormatException e) {
// TODO Auto-generated catch block
e.printStackTrace();
// logger.warning("^^^ caught NFE");
}
UserAccountBean updater = (UserAccountBean) udao.findByPK(avb.getUserId());
sea.setUpdater(updater);
eventLogs.add(sea);
}
// logger.warning("^^^ finished iteration");
request.setAttribute("eventLogs", eventLogs);
forwardPage(Page.SIGN_STUDY_SUBJECT);
}
use of org.akaza.openclinica.web.bean.EntityBeanTable in project OpenClinica by OpenClinica.
the class ViewImportJobServlet method processRequest.
@Override
protected void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
// First we must get a reference to a scheduler
scheduler = getScheduler();
// then we pull all the triggers that are specifically named
// IMPORT_TRIGGER.
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.groupEquals(IMPORT_TRIGGER));
// the next bit goes out and processes all the triggers
ArrayList triggerBeans = new ArrayList<TriggerBean>();
for (TriggerKey triggerKey : triggerKeys) {
String triggerName = triggerKey.getName();
Trigger trigger = scheduler.getTrigger(triggerKey);
logger.debug("found trigger, full name: " + triggerName);
try {
logger.debug("prev fire time " + trigger.getPreviousFireTime().toString());
logger.debug("next fire time " + trigger.getNextFireTime().toString());
logger.debug("final fire time: " + trigger.getFinalFireTime().toString());
} catch (NullPointerException npe) {
// could be nulls in the dates, etc
}
TriggerBean triggerBean = new TriggerBean();
triggerBean.setFullName(triggerName);
triggerBean.setPreviousDate(trigger.getPreviousFireTime());
triggerBean.setNextDate(trigger.getNextFireTime());
if (trigger.getDescription() != null) {
triggerBean.setDescription(trigger.getDescription());
}
// this next bit of code looks at the job data map and pulls out
// specific items
JobDataMap dataMap = new JobDataMap();
if (trigger.getJobDataMap().size() > 0) {
dataMap = trigger.getJobDataMap();
triggerBean.setStudyName(dataMap.getString(ExampleSpringJob.STUDY_NAME));
String oid = dataMap.getString("study_oid");
}
// this next bit of code looks to see if the trigger is paused
logger.debug("Trigger Priority: " + triggerName + " " + trigger.getPriority());
if (scheduler.getTriggerState(new TriggerKey(triggerName, IMPORT_TRIGGER)) == Trigger.TriggerState.PAUSED) {
triggerBean.setActive(false);
logger.debug("setting active to false for trigger: " + triggerName);
} else {
triggerBean.setActive(true);
logger.debug("setting active to TRUE for trigger: " + triggerName);
}
triggerBeans.add(triggerBean);
// our wrapper to show triggers
}
// set up the table here and get ready to send to the web page
ArrayList allRows = TriggerRow.generateRowsFromBeans(triggerBeans);
EntityBeanTable table = fp.getEntityBeanTable();
String[] columns = { resword.getString("name"), resword.getString("previous_fire_time"), resword.getString("next_fire_time"), resword.getString("description"), resword.getString("study"), resword.getString("actions") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(3);
table.hideColumnLink(5);
table.setQuery("ViewImportJob", new HashMap());
// table.addLink("", "CreateUserAccount");
table.setSortingColumnInd(0);
table.setRows(allRows);
table.computeDisplay();
request.setAttribute("table", table);
forwardPage(Page.VIEW_IMPORT_JOB);
}
use of org.akaza.openclinica.web.bean.EntityBeanTable in project OpenClinica by OpenClinica.
the class ExportDatasetServlet method processRequest.
@Override
public void processRequest() throws Exception {
DatasetDAO dsdao = new DatasetDAO(sm.getDataSource());
ArchivedDatasetFileDAO asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
FormProcessor fp = new FormProcessor(request);
GenerateExtractFileService generateFileService = new GenerateExtractFileService(sm.getDataSource(), (CoreResources) SpringServletAccess.getApplicationContext(context).getBean("coreResources"), (RuleSetRuleDao) SpringServletAccess.getApplicationContext(context).getBean("ruleSetRuleDao"));
String action = fp.getString("action");
int datasetId = fp.getInt("datasetId");
int adfId = fp.getInt("adfId");
if (datasetId == 0) {
try {
DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
datasetId = dsb.getId();
logger.info("dataset id was zero, trying session: " + datasetId);
} catch (NullPointerException e) {
e.printStackTrace();
logger.info("tripped over null pointer exception");
}
}
DatasetBean db = (DatasetBean) dsdao.findByPK(datasetId);
StudyDAO sdao = new StudyDAO(sm.getDataSource());
StudyBean study = (StudyBean) sdao.findByPK(db.getStudyId());
checkRoleByUserAndStudy(ub, study.getParentStudyId(), study.getId());
//Checks if the study is current study or child of current study
if (study.getId() != currentStudy.getId() && study.getParentStudyId() != currentStudy.getId()) {
addPageMessage(respage.getString("no_have_correct_privilege_current_study") + " " + respage.getString("change_active_study_or_contact"));
forwardPage(Page.MENU_SERVLET);
return;
}
/**
* @vbc 08/06/2008 NEW EXTRACT DATA IMPLEMENTATION get study_id and
* parentstudy_id int currentstudyid = currentStudy.getId(); int
* parentstudy = currentStudy.getParentStudyId(); if (parentstudy >
* 0) { // is OK } else { // same parentstudy = currentstudyid; } //
*/
int currentstudyid = currentStudy.getId();
// YW 11-09-2008 << modified logic here.
int parentstudy = currentstudyid;
// YW 11-09-2008 >>
StudyBean parentStudy = new StudyBean();
if (currentStudy.getParentStudyId() > 0) {
//StudyDAO sdao = new StudyDAO(sm.getDataSource());
parentStudy = (StudyBean) sdao.findByPK(currentStudy.getParentStudyId());
}
ExtractBean eb = generateFileService.generateExtractBean(db, currentStudy, parentStudy);
if (StringUtil.isBlank(action)) {
loadList(db, asdfdao, datasetId, fp, eb);
forwardPage(Page.EXPORT_DATASETS);
} else if ("delete".equalsIgnoreCase(action) && adfId > 0) {
boolean success = false;
ArchivedDatasetFileBean adfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(adfId);
File file = new File(adfBean.getFileReference());
if (!file.canWrite()) {
addPageMessage(respage.getString("write_protected"));
} else {
success = file.delete();
if (success) {
asdfdao.deleteArchiveDataset(adfBean);
addPageMessage(respage.getString("file_removed"));
} else {
addPageMessage(respage.getString("error_removing_file"));
}
}
loadList(db, asdfdao, datasetId, fp, eb);
forwardPage(Page.EXPORT_DATASETS);
} else {
logger.info("**** found action ****: " + action);
String generateReport = "";
// generate file, and show screen export
// String generalFileDir = DATASET_DIR + db.getId() +
// File.separator;
// change this up, so that we don't overwrite anything
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
String generalFileDir = DATASET_DIR + db.getId() + File.separator + sdfDir.format(new java.util.Date());
String fileName = "";
db.setName(db.getName().replaceAll(" ", "_"));
Page finalTarget = Page.GENERATE_DATASET;
finalTarget = Page.EXPORT_DATA_CUSTOM;
// now display report according to format specified
// TODO revise final target to set to fileReference????
long sysTimeBegin = System.currentTimeMillis();
int fId = 0;
if ("sas".equalsIgnoreCase(action)) {
// generateReport =
// dsdao.generateDataset(db,
// ExtractBean.SAS_FORMAT,
// currentStudy,
// parentStudy);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
String SASFileName = db.getName() + "_sas.sas";
// logger.info("found data set: "+generateReport);
generateFileService.createFile(SASFileName, generalFileDir, generateReport, db, sysTimeEnd, ExportFormatBean.TXTFILE, true, ub);
logger.info("created sas file");
request.setAttribute("generate", generalFileDir + SASFileName);
finalTarget.setFileName(generalFileDir + SASFileName);
fileName = SASFileName;
// won't work since page creator is private
} else if ("odm".equalsIgnoreCase(action)) {
String odmVersion = fp.getString("odmVersion");
String ODMXMLFileName = "";
// DRY
// HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "");
HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", true, true, true, null, ub);
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
ODMXMLFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
fileName = ODMXMLFileName;
request.setAttribute("generate", generalFileDir + ODMXMLFileName);
logger.debug("+++ set the following: " + generalFileDir + ODMXMLFileName);
// send a link with the SQL file? put the generated SQL file with the dataset?
if (fp.getString("xalan") != null) {
XalanTriggerService xts = new XalanTriggerService();
String propertiesPath = SQLInitServlet.getField("filePath");
// the trick there, we need to open up the zipped file and get at the XML
openZipFile(generalFileDir + ODMXMLFileName + ".zip");
// need to find out how to copy this xml file from /bin to the generalFileDir
SimpleTrigger simpleTrigger = xts.generateXalanTrigger(propertiesPath + File.separator + "ODMReportStylesheet.xsl", ODMXMLFileName, generalFileDir + "output.sql", db.getId());
scheduler = getScheduler();
JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
JobDetailFactoryBean.setGroup(xts.TRIGGER_GROUP_NAME);
JobDetailFactoryBean.setName(simpleTrigger.getKey().getName());
JobDetailFactoryBean.setJobClass(org.akaza.openclinica.web.job.XalanStatefulJob.class);
JobDetailFactoryBean.setJobDataMap(simpleTrigger.getJobDataMap());
// need durability?
JobDetailFactoryBean.setDurability(true);
try {
Date dateStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), simpleTrigger);
logger.info("== found job date: " + dateStart.toString());
} catch (SchedulerException se) {
se.printStackTrace();
}
}
} else if ("txt".equalsIgnoreCase(action)) {
// generateReport =
// dsdao.generateDataset(db,
// ExtractBean.TXT_FORMAT,
// currentStudy,
// parentStudy);
// eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
String TXTFileName = "";
HashMap answerMap = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, db, currentstudyid, parentstudy, "", ub);
// and of course DRY
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
TXTFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
fileName = TXTFileName;
request.setAttribute("generate", generalFileDir + TXTFileName);
// finalTarget.setFileName(generalFileDir+TXTFileName);
logger.debug("+++ set the following: " + generalFileDir + TXTFileName);
} else if ("html".equalsIgnoreCase(action)) {
// html based dataset browser
TabReportBean answer = new TabReportBean();
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
request.setAttribute("dataset", db);
request.setAttribute("extractBean", eb);
finalTarget = Page.GENERATE_DATASET_HTML;
} else if ("spss".equalsIgnoreCase(action)) {
SPSSReportBean answer = new SPSSReportBean();
// removed three lines here and put them in generate file
// service, createSPSSFile method. tbh 01/2009
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
// System.out.println("*** isShowCRFversion:
// "+db.isShowCRFversion());
// TODO in the spirit of DRY, if this works we need to remove
// lines 443-776 in this servlet, tbh 01/2009
String DDLFileName = "";
HashMap answerMap = generateFileService.createSPSSFile(db, eb, currentStudy, parentStudy, sysTimeBegin, generalFileDir, answer, "", ub);
// hmm, DRY?
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
DDLFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
request.setAttribute("generate", generalFileDir + DDLFileName);
logger.debug("+++ set the following: " + generalFileDir + DDLFileName);
} else if ("csv".equalsIgnoreCase(action)) {
CommaReportBean answer = new CommaReportBean();
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
// logger.info("found data set: "+generateReport);
String CSVFileName = db.getName() + "_comma.txt";
fId = generateFileService.createFile(CSVFileName, generalFileDir, answer.toString(), db, sysTimeEnd, ExportFormatBean.CSVFILE, true, ub);
fileName = CSVFileName;
logger.info("just created csv file");
request.setAttribute("generate", generalFileDir + CSVFileName);
// finalTarget.setFileName(generalFileDir+CSVFileName);
} else if ("excel".equalsIgnoreCase(action)) {
// HSSFWorkbook excelReport = dsdao.generateExcelDataset(db,
// ExtractBean.XLS_FORMAT,
// currentStudy,
// parentStudy);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
// TODO this will change and point to a created excel
// spreadsheet, tbh
String excelFileName = db.getName() + "_excel.xls";
// fId = this.createFile(excelFileName,
// generalFileDir,
// excelReport,
// db, sysTimeEnd,
// ExportFormatBean.EXCELFILE);
// logger.info("just created csv file, for excel output");
// response.setHeader("Content-disposition","attachment;
// filename="+CSVFileName);
// logger.info("csv file name: "+CSVFileName);
finalTarget = Page.GENERATE_EXCEL_DATASET;
// response.setContentType("application/vnd.ms-excel");
response.setHeader("Content-Disposition", "attachment; filename=" + db.getName() + "_excel.xls");
request.setAttribute("generate", generalFileDir + excelFileName);
logger.info("set 'generate' to :" + generalFileDir + excelFileName);
fileName = excelFileName;
// excelReport.write(stream);
// stream.flush();
// stream.close();
// finalTarget.setFileName(WEB_DIR+db.getId()+"/"+excelFileName);
}
// <%@page contentType="application/vnd.ms-excel"%>
if (!finalTarget.equals(Page.GENERATE_EXCEL_DATASET) && !finalTarget.equals(Page.GENERATE_DATASET_HTML)) {
// to catch all the others and try to set a new path for file
// capture
// tbh, 4-18-05
// request.setAttribute("generate",finalTarget.getFileName());
// TODO changing path to show refresh page, then window with
// link to download file, tbh 06-08-05
// finalTarget.setFileName(
// "/WEB-INF/jsp/extract/generatedFileDataset.jsp");
finalTarget.setFileName("" + "/WEB-INF/jsp/extract/generateMetadataCore.jsp");
// also set up table here???
asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
ArchivedDatasetFileBean asdfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(fId);
// *** do we need this below? tbh
ArrayList newFileList = new ArrayList();
newFileList.add(asdfBean);
// request.setAttribute("filelist",newFileList);
ArrayList filterRows = ArchivedDatasetFileRow.generateRowsFromBeans(newFileList);
EntityBeanTable table = fp.getEntityBeanTable();
// sort by date
table.setSortingIfNotExplicitlySet(3, false);
String[] columns = { resword.getString("file_name"), resword.getString("run_time"), resword.getString("file_size"), resword.getString("created_date"), resword.getString("created_by") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(0);
table.hideColumnLink(1);
table.hideColumnLink(2);
table.hideColumnLink(3);
table.hideColumnLink(4);
// table.setQuery("ExportDataset?datasetId=" +db.getId(), new
// HashMap());
// trying to continue...
// session.setAttribute("newDataset",db);
request.setAttribute("dataset", db);
request.setAttribute("file", asdfBean);
table.setRows(filterRows);
table.computeDisplay();
request.setAttribute("table", table);
// *** do we need this above? tbh
}
logger.info("set first part of 'generate' to :" + generalFileDir);
logger.info("found file name: " + finalTarget.getFileName());
// String del = CoreResources.getField("dataset_file_delete");
// if (del.equalsIgnoreCase("true") || del.equals("")) {
// File deleteFile = new File(generalFileDir + fileName);
// deleteFile.delete();
// }
forwardPage(finalTarget);
}
}
use of org.akaza.openclinica.web.bean.EntityBeanTable in project OpenClinica by OpenClinica.
the class CreateDatasetServlet method processRequest.
@Override
public void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
String action = fp.getString("action");
if (StringUtil.isBlank(action)) {
// step 1 -- instructions, and continue button
session.setAttribute("newDataset", new DatasetBean());
session.setAttribute("allItems", new ArrayList());
session.setAttribute("crf", new CRFBean());
session.setAttribute("allSelectedItems", new ArrayList());
forwardPage(Page.CREATE_DATASET_1);
} else {
StudyBean studyWithEventDefs = currentStudy;
if (currentStudy.getParentStudyId() > 0) {
studyWithEventDefs = new StudyBean();
studyWithEventDefs.setId(currentStudy.getParentStudyId());
}
if ("begin".equalsIgnoreCase(action)) {
// step 2 -- select study events/crfs
StudyEventDAO sedao = new StudyEventDAO(sm.getDataSource());
StudyEventDefinitionDAO seddao = new StudyEventDefinitionDAO(sm.getDataSource());
EventCRFDAO ecdao = new EventCRFDAO(sm.getDataSource());
StudyBean studyWithEventDefinitions = currentStudy;
if (currentStudy.getParentStudyId() > 0) {
studyWithEventDefinitions = new StudyBean();
studyWithEventDefinitions.setId(currentStudy.getParentStudyId());
}
ArrayList seds = seddao.findAllActiveByStudy(studyWithEventDefinitions);
CRFDAO crfdao = new CRFDAO(sm.getDataSource());
HashMap events = new LinkedHashMap();
for (int i = 0; i < seds.size(); i++) {
StudyEventDefinitionBean sed = (StudyEventDefinitionBean) seds.get(i);
ArrayList<CRFBean> crfs = (ArrayList<CRFBean>) crfdao.findAllActiveByDefinition(sed);
if (currentStudy.getParentStudyId() > 0) {
// sift through these CRFs and see which ones are hidden
HideCRFManager hideCRFs = HideCRFManager.createHideCRFManager();
crfs = hideCRFs.removeHiddenCRFBeans(studyWithEventDefinitions, sed, crfs, sm.getDataSource());
}
if (!crfs.isEmpty()) {
events.put(sed, crfs);
}
}
// datasets
if (events.isEmpty()) {
addPageMessage(respage.getString("not_have_study_definitions_assigned"));
forwardPage(Page.CREATE_DATASET_1);
} else {
crfdao = new CRFDAO(sm.getDataSource());
ItemDAO idao = new ItemDAO(sm.getDataSource());
ArrayList sedItemIds = CreateDatasetServlet.allSedItemIdsInStudy(events, crfdao, idao);
session.setAttribute("numberOfStudyItems", Integer.toString(sedItemIds.size()));
request.setAttribute("eventlist", events);
session.setAttribute(EVENTS_FOR_CREATE_DATASET, events);
session.setAttribute("newDataset", new DatasetBean());
session.setAttribute("allItems", new ArrayList());
session.setAttribute("crf", new CRFBean());
forwardPage(Page.CREATE_DATASET_2);
}
} else if ("beginsubmit".equalsIgnoreCase(action)) {
String saveItems = fp.getString(SAVE_BUTTON);
String saveContinue = fp.getString(SAVE_CONTINUE_BUTTON);
DatasetBean db = (DatasetBean) session.getAttribute("newDataset");
if (db == null) {
db = new DatasetBean();
}
extractIdsFromForm(db);
extractEventIds(db);
session.setAttribute("newDataset", db);
// +"");
if (!StringUtil.isBlank(saveItems)) {
request.setAttribute("eventlist", session.getAttribute(EVENTS_FOR_CREATE_DATASET));
// BWP 3095>>
String summary = respage.getString("you_have_selected") + " " + db.getItemIds().size() + " " + respage.getString("items_so_far");
summary += genAttMsg(db);
addPageMessage(summary);
int crfId = fp.getInt("crfId");
if (crfId > 0) {
// user choose a crf and select items
forwardPage(Page.CREATE_DATASET_2);
} else {
ArrayList sgclasses = (ArrayList) session.getAttribute("allSelectedGroups");
if (sgclasses == null || sgclasses.size() == 0) {
sgclasses = setUpStudyGroups();
}
session.setAttribute("allSelectedGroups", sgclasses);
request.setAttribute("allSelectedGroups", sgclasses);
// TODO push out list of subject groups here???
// form submitted from "view selected item ' or
// attribute page, so
// forward back to "view selected item " page
forwardPage(Page.CREATE_DATASET_VIEW_SELECTED);
}
} else {
if (db.getItemIds().size() == 0) {
request.setAttribute("eventlist", session.getAttribute(EVENTS_FOR_CREATE_DATASET));
addPageMessage(respage.getString("should_select_one_item_to_create_dataset"));
forwardPage(Page.CREATE_DATASET_2);
} else {
String summary = respage.getString("you_have_selected") + " " + db.getItemIds().size() + " " + respage.getString("items_totally_for_this_dataset");
summary += genAttMsg(db);
addPageMessage(summary);
// 0 means using
fp.addPresetValue("firstmonth", 0);
// default month
fp.addPresetValue("firstyear", 1900);
fp.addPresetValue("lastmonth", 0);
fp.addPresetValue("lastyear", 2100);
setPresetValues(fp.getPresetValues());
logger.warn("found preset values while setting date: " + fp.getPresetValues().toString());
request.setAttribute(BEAN_MONTHS, getMonths());
request.setAttribute(BEAN_YEARS, getYears());
forwardPage(Page.CREATE_DATASET_3);
}
}
} else if ("scopesubmit".equalsIgnoreCase(action)) {
ArrayList months = getMonths();
ArrayList years = getYears();
int firstMonth = fp.getInt("firstmonth");
int firstYear = fp.getInt("firstyear");
int lastMonth = fp.getInt("lastmonth");
int lastYear = fp.getInt("lastyear");
if (fp.getInt("firstmonth") == 0) {
// default value
firstMonth = 1;
}
if (fp.getInt("lastmonth") == 0) {
// default value
lastMonth = 12;
}
errors = new HashMap();
if (fp.getInt("firstmonth") > 0 && firstYear == 1900) {
Validator.addError(errors, "firstmonth", restext.getString("if_specify_month_also_specify_year"));
}
if (fp.getInt("lastmonth") > 0 && lastYear == 2100) {
Validator.addError(errors, "lastmonth", restext.getString("if_specify_month_also_specify_year"));
}
Date dateStart = getFirstDayOfMonth(firstYear, firstMonth);
Date dateEnd = getLastDayOfMonth(lastYear, lastMonth);
if (dateEnd.compareTo(dateStart) < 0) {
Validator.addError(errors, "firstmonth", restext.getString("the_from_should_be_come_before_to"));
}
if (!errors.isEmpty()) {
String[] fieldNames = { "firstmonth", "firstyear", "lastmonth", "lastyear" };
fp.setCurrentIntValuesAsPreset(fieldNames);
setInputMessages(errors);
addPageMessage(respage.getString("errors_in_submission_see_below"));
setPresetValues(fp.getPresetValues());
request.setAttribute(BEAN_MONTHS, getMonths());
request.setAttribute(BEAN_YEARS, getYears());
forwardPage(Page.CREATE_DATASET_3);
} else {
DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
dsb.setDateStart(dateStart);
dsb.setDateEnd(dateEnd);
session.setAttribute("newDataset", dsb);
if (fp.getString("submit").equals(resword.getString("continue_to_apply_filter"))) {
// FilterDAO fdao = new FilterDAO(sm.getDataSource());
// Collection filters = fdao.findAll();
// TODO make findAllByProject
// request.setAttribute("filters",filters);
EntityBeanTable table = getFilterTable();
session.setAttribute("partOfCreateDataset", new Integer(1));
// to be used in createFiltersThree servlet, tbh
request.setAttribute("table", table);
forwardPage(Page.APPLY_FILTER);
} else {
request.setAttribute("statuses", Status.toActiveArrayList());
// YW, 2-20-2008 <<these attributes will show on the
// CREATE_DATASET_4
String temp = dsb.getODMMetaDataVersionOid();
request.setAttribute("mdvOID", temp != null && temp.length() > 0 ? temp : "v1.0.0");
temp = dsb.getODMMetaDataVersionName();
request.setAttribute("mdvName", temp != null && temp.length() > 0 ? temp : "MetaDataVersion_v1.0.0");
request.setAttribute("mdvPrevStudy", dsb.getODMPriorStudyOid());
request.setAttribute("mdvPrevOID", dsb.getODMPriorMetaDataVersionOid());
// YW >>
forwardPage(Page.CREATE_DATASET_4);
}
}
} else if ("specifysubmit".equalsIgnoreCase(action)) {
Validator v = new Validator(request);
v.addValidation("dsName", Validator.NO_BLANKS);
v.addValidation("dsName", Validator.NO_SEMI_COLONS_OR_COLONS);
v.addValidation("dsDesc", Validator.NO_BLANKS);
v.addValidation("dsStatus", Validator.IS_VALID_TERM, TermType.STATUS);
v.addValidation("dsName", Validator.LENGTH_NUMERIC_COMPARISON, NumericComparisonOperator.LESS_THAN_OR_EQUAL_TO, 255);
v.addValidation("dsDesc", Validator.LENGTH_NUMERIC_COMPARISON, NumericComparisonOperator.LESS_THAN_OR_EQUAL_TO, 2000);
String mdvOID = fp.getString("mdvOID");
String mdvName = fp.getString("mdvName");
String mdvPrevStudy = fp.getString("mdvPrevStudy");
if (mdvPrevStudy != null && mdvPrevStudy.length() > 0) {
v.addValidation("mdvPrevOID", Validator.NO_BLANKS);
}
String mdvPrevOID = fp.getString("mdvPrevOID");
errors = v.validate();
String dsName = fp.getString("dsName");
if (!StringUtil.isBlank(dsName)) {
// YW, << 3-19-2008
if (dsName.contains("/") || dsName.contains("\\")) {
v.addError(errors, "dsName", restext.getString("slash_not_allowed"));
}
// 2-20-2008, no check for editing dataset
if (((DatasetBean) session.getAttribute("newDataset")).getId() <= 0) {
// YW >>
// logger.info("dsName" + fp.getString("dsName"));
DatasetDAO dsdao = new DatasetDAO(sm.getDataSource());
DatasetBean dsBean = (DatasetBean) dsdao.findByNameAndStudy(fp.getString("dsName").trim(), currentStudy);
if (dsBean.getId() > 0) {
Validator.addError(errors, "dsName", restext.getString("dataset_name_used_by_another_choose_unique"));
}
}
}
if (!errors.isEmpty()) {
String[] fieldNames = { "dsName", "dsDesc" };
fp.setCurrentStringValuesAsPreset(fieldNames);
fp.addPresetValue("dsStatusId", fp.getInt("dsStatus"));
fp.addPresetValue("mdvOID", mdvOID);
fp.addPresetValue("mdvName", mdvName);
fp.addPresetValue("mdvPrevStudy", mdvPrevStudy);
fp.addPresetValue("mdvPrevOID", mdvPrevOID);
addPageMessage(respage.getString("errors_in_submission_see_below"));
setInputMessages(errors);
setPresetValues(fp.getPresetValues());
request.setAttribute("statuses", Status.toActiveArrayList());
forwardPage(Page.CREATE_DATASET_4);
} else {
session.setAttribute("mdvOID", mdvOID);
session.setAttribute("mdvName", mdvName);
session.setAttribute("mdvPrevStudy", mdvPrevStudy);
session.setAttribute("mdvPrevOID", mdvPrevOID);
if (mdvPrevOID != null && mdvPrevOID.length() > 0 && (mdvPrevStudy == null || mdvPrevStudy.length() <= 0)) {
mdvPrevStudy = currentStudy.getId() + "";
}
DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
dsb.setSQLStatement(dsb.generateQuery());
String dbName = SQLInitServlet.getField("dataBase");
if ("oracle".equals(dbName)) {
dsb.setSQLStatement(dsb.generateOracleQuery());
}
// TODO set up oracle syntax for the query, grab the
// database
// from the session manager and feed it to the dataset bean
// possibly done, tbh 1/4/2005
// TODO look for the filter here, re-create the sql
// statement
// and put it in here
// possibly done need to test, tbh 1/7/2005
FilterBean fb = (FilterBean) session.getAttribute("newFilter");
if (fb != null) {
// FilterDAO fDAO = new FilterDAO(sm.getDataSource());
dsb.setSQLStatement(dsb.getSQLStatement() + " " + fb.getSQLStatement());
}
// dataset if dataset name has been changed.
if (dsb.getId() > 0 && !dsb.getName().equals(fp.getString("dsName"))) {
dsb.setId(0);
}
// YW >>
dsb.setODMMetaDataVersionName(mdvName);
dsb.setODMMetaDataVersionOid(mdvOID);
dsb.setODMPriorMetaDataVersionOid(mdvPrevOID);
dsb.setODMPriorStudyOid(mdvPrevStudy);
dsb.setName(fp.getString("dsName"));
dsb.setDescription(fp.getString("dsDesc"));
dsb.setStatus(Status.get(fp.getInt("dsStatus")));
dsb.setDatasetItemStatus(DatasetItemStatus.get(fp.getInt("itemStatus")));
session.removeAttribute("partOfCreateDataset");
Date ddate = new SimpleDateFormat("MM/dd/yyyy").parse("01/01/1900");
// done to remove the set up of going to get the filter, tbh
// set up dataset here, grab primary key???!!!???
// added by jxu
request.setAttribute("defaultStart", local_df.parse(local_df.format(ddate)));
request.setAttribute("defaultEnd", getLastDayOfMonth(2100, 12));
session.setAttribute("newDataset", dsb);
forwardPage(Page.CONFIRM_DATASET);
}
} else if ("confirmall".equalsIgnoreCase(action)) {
String submit = fp.getString("btnSubmit");
logger.info("reached confirm all");
if (!resword.getString("confirm_and_save").equalsIgnoreCase(submit)) {
// we're going back, so we should not destroy the
// data we've created, tbh
// session.removeAttribute("newDataset");
// session.removeAttribute("newFilter");
forwardPage(Page.CREATE_DATASET_4);
} else {
DatasetDAO ddao = new DatasetDAO(sm.getDataSource());
DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
dsb.setStudyId(this.currentStudy.getId());
dsb.setOwner(ub);
// dsb.setOwnerId(ub.getId());
//
// at this point, dataset itemId will still be kept
// uniquely.
dsb = finalUpateDatasetBean(dsb);
if (dsb.getId() == 0) {
// if the bean hasn't been created already that is...
logger.info("*** about to create the dataset bean");
dsb = (DatasetBean) ddao.create(dsb);
logger.info("created dataset bean: " + dsb.getId() + ", name: " + dsb.getName());
if (!dsb.isActive()) {
addPageMessage(restext.getString("problem_creating_dataset_try_again"));
forwardPage(Page.EXTRACT_DATASETS_MAIN);
}
} else // YW, 2-20-2008 << for editing existing dataset
if (dsb.getId() > 0) {
dsb = (DatasetBean) ddao.updateAll(dsb);
if (!dsb.isActive()) {
addPageMessage(restext.getString("problem_creating_dataset_try_again"));
forwardPage(Page.EXTRACT_DATASETS_MAIN);
}
dsb = (DatasetBean) ddao.updateGroupMap(dsb);
if (!dsb.isActive()) {
addPageMessage(restext.getString("problem_updating_subject_group_class_when_updating_dataset"));
forwardPage(Page.EXTRACT_DATASETS_MAIN);
}
}
// YW >>
logger.info("setting data set id here");
// may be easier to just set the dataset bean
// back into the session?
request.setAttribute("dataset", dsb);
forwardPage(Page.EXPORT_DATASETS);
}
} else {
// refine this bit to catch errors, hopefully
addPageMessage(restext.getString("creating_new_dataset_cancelled"));
forwardPage(Page.CREATE_DATASET_1);
}
}
}
use of org.akaza.openclinica.web.bean.EntityBeanTable in project OpenClinica by OpenClinica.
the class CreateFiltersOneServlet method processRequest.
// < ResourceBundlerestext,resword,respage,resexception;
@Override
public void processRequest() throws Exception {
// clean up the previous setup, if necessary
session.removeAttribute("newExp");
// removes the new explanation for setting up the create dataset
// covers the plan if you cancel out of a process then want to get in
// again, tbh
String action = request.getParameter("action");
if (StringUtil.isBlank(action)) {
// our start page:
// note that this is now set up to accept the
// tabling classes created in View.
FormProcessor fp = new FormProcessor(request);
FilterDAO fdao = new FilterDAO(sm.getDataSource());
EntityBeanTable table = fp.getEntityBeanTable();
ArrayList filters = new ArrayList();
if (ub.isSysAdmin()) {
filters = (ArrayList) fdao.findAllAdmin();
} else {
filters = (ArrayList) fdao.findAll();
}
ArrayList filterRows = FilterRow.generateRowsFromBeans(filters);
String[] columns = { resword.getString("filter_name"), resword.getString("description"), resword.getString("created_by"), resword.getString("created_date"), resword.getString("status"), resword.getString("actions") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(5);
table.addLink(resword.getString("create_new_filter"), "CreateFiltersOne?action=begin");
table.setQuery("CreateFiltersOne", new HashMap());
table.setRows(filterRows);
table.computeDisplay();
request.setAttribute("table", table);
// the code above replaces the following line:
// request.setAttribute("filters",filters);
forwardPage(Page.CREATE_FILTER_SCREEN_1);
} else if ("begin".equalsIgnoreCase(action)) {
forwardPage(Page.CREATE_FILTER_SCREEN_2);
}
}
Aggregations