use of org.akaza.openclinica.bean.extract.DatasetBean in project OpenClinica by OpenClinica.
the class DatasetDAO method create.
public EntityBean create(EntityBean eb) {
/*
* INSERT INTO DATASET (STUDY_ID, STATUS_ID, NAME, DESCRIPTION,
* SQL_STATEMENT, OWNER_ID, DATE_CREATED, DATE_LAST_RUN, NUM_RUNS,
* DATE_START, DATE_END,
* SHOW_EVENT_LOCATION,SHOW_EVENT_START,SHOW_EVENT_END,
* SHOW_SUBJECT_DOB,SHOW_SUBJECT_GENDER) VALUES
* (?,?,?,?,?,?,NOW(),NOW(),?,NOW(),'2005-11-15', ?,?,?,?,?) ADDED THE
* COLUMNS 7-2007, TBH ALTER TABLE dataset ADD COLUMN show_event_status
* bool DEFAULT false; ALTER TABLE dataset ADD COLUMN
* show_subject_status bool DEFAULT false; ALTER TABLE dataset ADD
* COLUMN show_subject_unique_id bool DEFAULT false; ALTER TABLE dataset
* ADD COLUMN show_subject_age_at_event bool DEFAULT false; ALTER TABLE
* dataset ADD COLUMN show_crf_status bool DEFAULT false; ALTER TABLE
* dataset ADD COLUMN show_crf_version bool DEFAULT false; ALTER TABLE
* dataset ADD COLUMN show_crf_int_name bool DEFAULT false; ALTER TABLE
* dataset ADD COLUMN show_crf_int_date bool DEFAULT false; ALTER TABLE
* dataset ADD COLUMN show_group_info bool DEFAULT false; ALTER TABLE
* dataset ADD COLUMN show_disc_info bool DEFAULT false; added table
* mapping dataset id to study group classes id, tbh
*
*/
DatasetBean db = (DatasetBean) eb;
HashMap<Integer, Object> variables = new HashMap<Integer, Object>();
HashMap nullVars = new HashMap();
variables.put(Integer.valueOf(1), Integer.valueOf(db.getStudyId()));
variables.put(Integer.valueOf(2), Integer.valueOf(db.getStatus().getId()));
variables.put(Integer.valueOf(3), db.getName());
variables.put(Integer.valueOf(4), db.getDescription());
variables.put(Integer.valueOf(5), db.getSQLStatement());
variables.put(Integer.valueOf(6), Integer.valueOf(db.getOwnerId()));
variables.put(Integer.valueOf(7), Integer.valueOf(db.getNumRuns()));
variables.put(Integer.valueOf(8), new Boolean(db.isShowEventLocation()));
variables.put(Integer.valueOf(9), new Boolean(db.isShowEventStart()));
variables.put(Integer.valueOf(10), new Boolean(db.isShowEventEnd()));
variables.put(Integer.valueOf(11), new Boolean(db.isShowSubjectDob()));
variables.put(Integer.valueOf(12), new Boolean(db.isShowSubjectGender()));
variables.put(Integer.valueOf(13), new Boolean(db.isShowEventStatus()));
variables.put(Integer.valueOf(14), new Boolean(db.isShowSubjectStatus()));
variables.put(Integer.valueOf(15), new Boolean(db.isShowSubjectUniqueIdentifier()));
variables.put(Integer.valueOf(16), new Boolean(db.isShowSubjectAgeAtEvent()));
variables.put(Integer.valueOf(17), new Boolean(db.isShowCRFstatus()));
variables.put(Integer.valueOf(18), new Boolean(db.isShowCRFversion()));
variables.put(Integer.valueOf(19), new Boolean(db.isShowCRFinterviewerName()));
variables.put(Integer.valueOf(20), new Boolean(db.isShowCRFinterviewerDate()));
variables.put(Integer.valueOf(21), new Boolean(db.isShowSubjectGroupInformation()));
// variables.put(Integer.valueOf(22), new
// Boolean(db.isShowDiscrepancyInformation()));
variables.put(Integer.valueOf(22), new Boolean(false));
// currently not changing structure to allow for disc notes to be added
// in the future
variables.put(Integer.valueOf(23), db.getODMMetaDataVersionName());
variables.put(Integer.valueOf(24), db.getODMMetaDataVersionOid());
variables.put(Integer.valueOf(25), db.getODMPriorStudyOid());
variables.put(Integer.valueOf(26), db.getODMPriorMetaDataVersionOid());
variables.put(Integer.valueOf(27), db.isShowSubjectSecondaryId());
variables.put(Integer.valueOf(28), db.getDatasetItemStatus().getId());
this.executeWithPK(digester.getQuery("create"), variables, nullVars);
if (isQuerySuccessful()) {
eb.setId(getLatestPK());
if (db.isShowSubjectGroupInformation()) {
// add additional information here
for (int i = 0; i < db.getSubjectGroupIds().size(); i++) {
createGroupMap(eb.getId(), ((Integer) db.getSubjectGroupIds().get(i)).intValue(), nullVars);
}
}
}
return eb;
}
use of org.akaza.openclinica.bean.extract.DatasetBean in project OpenClinica by OpenClinica.
the class ExampleSpringJob method executeInternal.
@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
// need to generate a Locale so that user beans and other things will
// generate normally
Locale locale = new Locale("en-US");
ResourceBundleProvider.updateLocale(locale);
ResourceBundle pageMessages = ResourceBundleProvider.getPageMessagesBundle();
// logger.debug("--");
// logger.debug("-- executing a job " + message + " at " + new
// java.util.Date().toString());
JobDataMap dataMap = context.getMergedJobDataMap();
SimpleTrigger trigger = (SimpleTrigger) context.getTrigger();
try {
ApplicationContext appContext = (ApplicationContext) context.getScheduler().getContext().get("applicationContext");
String studySubjectNumber = ((CoreResources) appContext.getBean("coreResources")).getField("extract.number");
coreResources = (CoreResources) appContext.getBean("coreResources");
ruleSetRuleDao = (RuleSetRuleDao) appContext.getBean("ruleSetRuleDao");
dataSource = (DataSource) appContext.getBean("dataSource");
mailSender = (OpenClinicaMailSender) appContext.getBean("openClinicaMailSender");
AuditEventDAO auditEventDAO = new AuditEventDAO(dataSource);
// Scheduler scheduler = context.getScheduler();
// JobDetail detail = context.getJobDetail();
// jobDetailBean = (JobDetailBean) detail;
/*
* data map here should coincide with the job data map found in
* CreateJobExportServlet, with the following code: jobDataMap = new
* JobDataMap(); jobDataMap.put(DATASET_ID, datasetId);
* jobDataMap.put(PERIOD, period); jobDataMap.put(EMAIL, email);
* jobDataMap.put(TAB, tab); jobDataMap.put(CDISC, cdisc);
* jobDataMap.put(SPSS, spss);
*/
String alertEmail = dataMap.getString(EMAIL);
String localeStr = dataMap.getString(LOCALE);
if (localeStr != null) {
locale = new Locale(localeStr);
ResourceBundleProvider.updateLocale(locale);
pageMessages = ResourceBundleProvider.getPageMessagesBundle();
}
int dsId = dataMap.getInt(DATASET_ID);
String tab = dataMap.getString(TAB);
String cdisc = dataMap.getString(CDISC);
String cdisc12 = dataMap.getString(CDISC12);
if (cdisc12 == null) {
cdisc12 = "0";
}
String cdisc13 = dataMap.getString(CDISC13);
if (cdisc13 == null) {
cdisc13 = "0";
}
String cdisc13oc = dataMap.getString(CDISC13OC);
if (cdisc13oc == null) {
cdisc13oc = "0";
}
String spss = dataMap.getString(SPSS);
int userId = dataMap.getInt(USER_ID);
int studyId = dataMap.getInt(STUDY_ID);
// String datasetId = dataMap.getString(DATASET_ID);
// int dsId = new Integer(datasetId).intValue();
// String userAcctId = dataMap.getString(USER_ID);
// int userId = new Integer(userAcctId).intValue();
// why the flip-flop? if one property is set to 'true' we can
// see jobs in another screen but all properties have to be
// strings
logger.debug("-- found the job: " + dsId + " dataset id");
// for (Iterator it = dataMap.entrySet().iterator(); it.hasNext();)
// {
// java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
// Object key = entry.getKey();
// Object value = entry.getValue();
// // logger.debug("-- found datamap property: " + key.toString() +
// // " : " + value.toString());
// }
HashMap fileName = new HashMap<String, Integer>();
if (dsId > 0) {
// trying to not throw an error if there's no dataset id
DatasetDAO dsdao = new DatasetDAO(dataSource);
DatasetBean datasetBean = (DatasetBean) dsdao.findByPK(dsId);
StudyDAO studyDao = new StudyDAO(dataSource);
UserAccountDAO userAccountDAO = new UserAccountDAO(dataSource);
// hmm, three lines in the if block DRY?
String generalFileDir = "";
String generalFileDirCopy = "";
String exportFilePath = SQLInitServlet.getField("exportFilePath");
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
generalFileDir = DATASET_DIR + datasetBean.getId() + File.separator + sdfDir.format(new java.util.Date());
if (!"".equals(exportFilePath)) {
generalFileDirCopy = SQLInitServlet.getField("filePath") + exportFilePath + File.separator;
}
// logger.debug("-- created the following dir: " +
// generalFileDir);
long sysTimeBegin = System.currentTimeMillis();
// set up the user bean here, tbh
// logger.debug("-- gen tab file 00");
userBean = (UserAccountBean) userAccountDAO.findByPK(userId);
// needs to also be captured by the servlet, tbh
// logger.debug("-- gen tab file 00");
generateFileService = new GenerateExtractFileService(dataSource, coreResources, ruleSetRuleDao);
// logger.debug("-- gen tab file 00");
// tbh #5796 - covers a bug when the user changes studies, 10/2010
StudyBean activeStudy = (StudyBean) studyDao.findByPK(studyId);
StudyBean parentStudy = new StudyBean();
logger.debug("active study: " + studyId + " parent study: " + activeStudy.getParentStudyId());
if (activeStudy.getParentStudyId() > 0) {
// StudyDAO sdao = new StudyDAO(sm.getDataSource());
parentStudy = (StudyBean) studyDao.findByPK(activeStudy.getParentStudyId());
} else {
parentStudy = activeStudy;
// covers a bug in tab file creation, tbh 01/2009
}
logger.debug("-- found extract bean ");
ExtractBean eb = generateFileService.generateExtractBean(datasetBean, activeStudy, parentStudy);
MessageFormat mf = new MessageFormat("");
StringBuffer message = new StringBuffer();
StringBuffer auditMessage = new StringBuffer();
// use resource bundle page messages to generate the email, tbh
// 02/2009
// message.append(pageMessages.getString("html_email_header_1")
// + " " + alertEmail +
// pageMessages.getString("html_email_header_2") + "<br/>");
message.append("<p>" + pageMessages.getString("email_header_1") + " " + EmailEngine.getAdminEmail() + " " + pageMessages.getString("email_header_2") + " Job Execution " + pageMessages.getString("email_header_3") + "</p>");
message.append("<P>Dataset: " + datasetBean.getName() + "</P>");
message.append("<P>Study: " + activeStudy.getName() + "</P>");
message.append("<p>" + pageMessages.getString("html_email_body_1") + datasetBean.getName() + pageMessages.getString("html_email_body_2") + SQLInitServlet.getField("sysURL") + pageMessages.getString("html_email_body_3") + "</p>");
// logger.debug("-- gen tab file 00");
if ("1".equals(tab)) {
logger.debug("-- gen tab file 01");
fileName = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, datasetBean, activeStudy.getId(), parentStudy.getId(), generalFileDirCopy, userBean);
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_tab_delimited"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your tab-delimited file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_tab_delimited") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc)) {
String odmVersion = "oc1.2";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_12"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.2 w/OpenClinica Extension XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName)
// + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_12") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc12)) {
String odmVersion = "1.2";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.2 default");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_12_xml"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// // auditMessage.append(
// "You can access your ODM 1.2 XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_12_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc13)) {
String odmVersion = "1.3";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.3");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_13"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.3 XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_13") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(cdisc13oc)) {
String odmVersion = "oc1.3";
fileName = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, datasetBean, activeStudy, generalFileDirCopy, eb, activeStudy.getId(), parentStudy.getId(), studySubjectNumber, true, true, true, null, userBean);
logger.debug("-- gen odm file 1.3 oc");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_odm_13_xml"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your ODM 1.3 w/OpenClinica Extension XML file <a href='AccessFile?fileId="
// + getFileIdInt(fileName)
// + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_odm_13_xml") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
if ("1".equals(spss)) {
SPSSReportBean answer = new SPSSReportBean();
fileName = generateFileService.createSPSSFile(datasetBean, eb, activeStudy, parentStudy, sysTimeBegin, generalFileDir, answer, generalFileDirCopy, userBean);
logger.debug("-- gen spss file");
message.append("<p>" + pageMessages.getString("html_email_body_4") + " " + getFileNameStr(fileName) + pageMessages.getString("html_email_body_4_5") + SQLInitServlet.getField("sysURL.base") + "AccessFile?fileId=" + getFileIdInt(fileName) + pageMessages.getString("html_email_body_3") + "</p>");
// MessageFormat mf = new MessageFormat("");
// mf.applyPattern(pageMessages.getString(
// "you_can_access_spss"));
// Object[] arguments = { getFileIdInt(fileName) };
// auditMessage.append(mf.format(arguments));
// auditMessage.append(
// "You can access your SPSS files <a href='AccessFile?fileId="
// + getFileIdInt(fileName) + "'>here</a>.<br/>");
auditMessage.append(pageMessages.getString("you_can_access_spss") + getFileIdInt(fileName) + pageMessages.getString("access_end"));
}
// wrap up the message, and send the email
message.append("<p>" + pageMessages.getString("html_email_body_5") + "</P><P>" + pageMessages.getString("email_footer"));
try {
mailSender.sendEmail(alertEmail.trim(), pageMessages.getString("job_ran_for") + " " + datasetBean.getName(), message.toString(), true);
} catch (OpenClinicaSystemException ose) {
// Do Nothing, In the future we might want to have an email
// status added to system.
}
TriggerBean triggerBean = new TriggerBean();
triggerBean.setDataset(datasetBean);
triggerBean.setUserAccount(userBean);
triggerBean.setFullName(trigger.getKey().getName());
auditEventDAO.createRowForExtractDataJobSuccess(triggerBean, auditMessage.toString());
} else {
TriggerBean triggerBean = new TriggerBean();
// triggerBean.setDataset(datasetBean);
triggerBean.setUserAccount(userBean);
triggerBean.setFullName(trigger.getKey().getName());
auditEventDAO.createRowForExtractDataJobFailure(triggerBean);
// logger.debug("-- made it here for some reason, ds id: "
// + dsId);
}
// logger.debug("-- generated file: " + fileNameStr);
// dataSource.
} catch (Exception e) {
// TODO Auto-generated catch block -- ideally should generate a fail
// msg here, tbh 02/2009
logger.debug("-- found exception: " + e.getMessage());
e.printStackTrace();
}
}
use of org.akaza.openclinica.bean.extract.DatasetBean in project OpenClinica by OpenClinica.
the class RemoveSiteServlet method processRequest.
@Override
public void processRequest() throws Exception {
StudyDAO sdao = new StudyDAO(sm.getDataSource());
String idString = request.getParameter("id");
logger.info("site id:" + idString);
int siteId = Integer.valueOf(idString.trim()).intValue();
StudyBean study = (StudyBean) sdao.findByPK(siteId);
if (currentStudy.getId() != study.getParentStudyId()) {
addPageMessage(respage.getString("no_have_correct_privilege_current_study") + " " + respage.getString("change_active_study_or_contact"));
forwardPage(Page.MENU_SERVLET);
return;
}
// find all user and roles
UserAccountDAO udao = new UserAccountDAO(sm.getDataSource());
ArrayList userRoles = udao.findAllByStudyId(siteId);
// find all subjects
StudySubjectDAO ssdao = new StudySubjectDAO(sm.getDataSource());
ArrayList subjects = ssdao.findAllByStudy(study);
// find all events
StudyEventDefinitionDAO sefdao = new StudyEventDefinitionDAO(sm.getDataSource());
ArrayList definitions = sefdao.findAllByStudy(study);
String action = request.getParameter("action");
if (StringUtil.isBlank(idString)) {
addPageMessage(respage.getString("please_choose_a_site_to_remove"));
forwardPage(Page.SITE_LIST_SERVLET);
} else {
if ("confirm".equalsIgnoreCase(action)) {
request.setAttribute("siteToRemove", study);
request.setAttribute("userRolesToRemove", userRoles);
request.setAttribute("subjectsToRemove", subjects);
forwardPage(Page.REMOVE_SITE);
} else {
logger.info("submit to remove the site");
// change all statuses to unavailable
StudyDAO studao = new StudyDAO(sm.getDataSource());
study.setOldStatus(study.getStatus());
study.setStatus(Status.DELETED);
study.setUpdater(ub);
study.setUpdatedDate(new Date());
studao.update(study);
// remove all users and roles
for (int i = 0; i < userRoles.size(); i++) {
StudyUserRoleBean role = (StudyUserRoleBean) userRoles.get(i);
if (!role.getStatus().equals(Status.DELETED)) {
role.setStatus(Status.AUTO_DELETED);
role.setUpdater(ub);
role.setUpdatedDate(new Date());
// YW << So study_user_role table status_id field can be
// updated
udao.updateStudyUserRole(role, role.getUserName());
}
// YW 06-18-2007 >>
}
// YW << bug fix that current active study has been deleted
if (study.getId() == currentStudy.getId()) {
currentStudy.setStatus(Status.DELETED);
// currentRole.setRole(Role.INVALID);
currentRole.setStatus(Status.DELETED);
}
// remove all subjects
for (int i = 0; i < subjects.size(); i++) {
StudySubjectBean subject = (StudySubjectBean) subjects.get(i);
}
// remove all study_group
StudyGroupDAO sgdao = new StudyGroupDAO(sm.getDataSource());
SubjectGroupMapDAO sgmdao = new SubjectGroupMapDAO(sm.getDataSource());
ArrayList groups = sgdao.findAllByStudy(study);
for (int i = 0; i < groups.size(); i++) {
StudyGroupBean group = (StudyGroupBean) groups.get(i);
if (!group.getStatus().equals(Status.DELETED)) {
group.setStatus(Status.AUTO_DELETED);
group.setUpdater(ub);
group.setUpdatedDate(new Date());
sgdao.update(group);
// all subject_group_map
ArrayList subjectGroupMaps = sgmdao.findAllByStudyGroupId(group.getId());
for (int j = 0; j < subjectGroupMaps.size(); j++) {
SubjectGroupMapBean sgMap = (SubjectGroupMapBean) subjectGroupMaps.get(j);
if (!sgMap.getStatus().equals(Status.DELETED)) {
sgMap.setStatus(Status.AUTO_DELETED);
sgMap.setUpdater(ub);
sgMap.setUpdatedDate(new Date());
sgmdao.update(sgMap);
}
}
}
}
// remove all events
EventDefinitionCRFDAO edcdao = new EventDefinitionCRFDAO(sm.getDataSource());
StudyEventDAO sedao = new StudyEventDAO(sm.getDataSource());
for (int i = 0; i < subjects.size(); i++) {
StudySubjectBean subject = (StudySubjectBean) subjects.get(i);
if (!subject.getStatus().equals(Status.DELETED)) {
subject.setStatus(Status.AUTO_DELETED);
subject.setUpdater(ub);
subject.setUpdatedDate(new Date());
ssdao.update(subject);
ArrayList events = sedao.findAllByStudySubject(subject);
EventCRFDAO ecdao = new EventCRFDAO(sm.getDataSource());
for (int j = 0; j < events.size(); j++) {
StudyEventBean event = (StudyEventBean) events.get(j);
if (!event.getStatus().equals(Status.DELETED)) {
event.setStatus(Status.AUTO_DELETED);
event.setUpdater(ub);
event.setUpdatedDate(new Date());
sedao.update(event);
ArrayList eventCRFs = ecdao.findAllByStudyEvent(event);
ItemDataDAO iddao = new ItemDataDAO(sm.getDataSource());
for (int k = 0; k < eventCRFs.size(); k++) {
EventCRFBean eventCRF = (EventCRFBean) eventCRFs.get(k);
if (!eventCRF.getStatus().equals(Status.DELETED)) {
eventCRF.setOldStatus(eventCRF.getStatus());
eventCRF.setStatus(Status.AUTO_DELETED);
eventCRF.setUpdater(ub);
eventCRF.setUpdatedDate(new Date());
ecdao.update(eventCRF);
ArrayList itemDatas = iddao.findAllByEventCRFId(eventCRF.getId());
for (int a = 0; a < itemDatas.size(); a++) {
ItemDataBean item = (ItemDataBean) itemDatas.get(a);
if (!item.getStatus().equals(Status.DELETED)) {
item.setOldStatus(item.getStatus());
item.setStatus(Status.AUTO_DELETED);
item.setUpdater(ub);
item.setUpdatedDate(new Date());
iddao.update(item);
}
}
}
}
}
}
}
}
// for subjects
DatasetDAO datadao = new DatasetDAO(sm.getDataSource());
ArrayList dataset = datadao.findAllByStudyId(study.getId());
for (int i = 0; i < dataset.size(); i++) {
DatasetBean data = (DatasetBean) dataset.get(i);
if (!data.getStatus().equals(Status.DELETED)) {
data.setStatus(Status.AUTO_DELETED);
data.setUpdater(ub);
data.setUpdatedDate(new Date());
datadao.update(data);
}
}
addPageMessage(respage.getString("this_site_has_been_removed_succesfully"));
String fromListSite = (String) session.getAttribute("fromListSite");
if (fromListSite != null && fromListSite.equals("yes") && currentRole.getRole().equals(Role.STUDYDIRECTOR)) {
session.removeAttribute("fromListSite");
forwardPage(Page.SITE_LIST_SERVLET);
} else {
session.removeAttribute("fromListSite");
if (currentRole.getRole().equals(Role.ADMIN)) {
forwardPage(Page.STUDY_LIST_SERVLET);
} else {
forwardPage(Page.SITE_LIST_SERVLET);
}
}
}
}
}
use of org.akaza.openclinica.bean.extract.DatasetBean in project OpenClinica by OpenClinica.
the class RestoreSiteServlet method processRequest.
@Override
public void processRequest() throws Exception {
StudyDAO sdao = new StudyDAO(sm.getDataSource());
String idString = request.getParameter("id");
logger.info("site id:" + idString);
int siteId = Integer.valueOf(idString.trim()).intValue();
StudyBean study = (StudyBean) sdao.findByPK(siteId);
// find all user and roles
UserAccountDAO udao = new UserAccountDAO(sm.getDataSource());
ArrayList userRoles = udao.findAllByStudyId(siteId);
// find all subjects
StudySubjectDAO ssdao = new StudySubjectDAO(sm.getDataSource());
ArrayList subjects = ssdao.findAllByStudy(study);
// find all events
StudyEventDefinitionDAO sefdao = new StudyEventDefinitionDAO(sm.getDataSource());
ArrayList definitions = sefdao.findAllByStudy(study);
String action = request.getParameter("action");
if (StringUtil.isBlank(idString)) {
addPageMessage(respage.getString("please_choose_a_site_to_restore"));
forwardPage(Page.SITE_LIST_SERVLET);
} else {
if ("confirm".equalsIgnoreCase(action)) {
// site can be restored when its parent study is not "removed"
// -- YW -6-21-2007
StudyBean parentstudy = (StudyBean) sdao.findByPK(study.getParentStudyId());
if (!"removed".equals(parentstudy.getStatus().getName())) {
request.setAttribute("siteToRestore", study);
request.setAttribute("userRolesToRestore", userRoles);
request.setAttribute("subjectsToRestore", subjects);
// request.setAttribute("definitionsToRestore",
// definitions);
} else {
MessageFormat mf = new MessageFormat("");
mf.applyPattern(respage.getString("choosen_site_cannot_restored"));
Object[] arguments = { study.getName(), parentstudy.getName() };
addPageMessage(mf.format(arguments));
forwardPage(Page.STUDY_LIST_SERVLET);
}
forwardPage(Page.RESTORE_SITE);
} else {
logger.info("submit to restore the site");
// change all statuses to unavailable
StudyDAO studao = new StudyDAO(sm.getDataSource());
study.setStatus(study.getOldStatus());
study.setUpdater(ub);
study.setUpdatedDate(new Date());
studao.update(study);
// restore all users and roles
for (int i = 0; i < userRoles.size(); i++) {
StudyUserRoleBean role = (StudyUserRoleBean) userRoles.get(i);
if (role.getStatus().equals(Status.AUTO_DELETED)) {
role.setStatus(Status.AVAILABLE);
role.setUpdater(ub);
role.setUpdatedDate(new Date());
// YW << So study_user_role table status_id field can be
// updated
udao.updateStudyUserRole(role, role.getUserName());
}
// YW 06-18-2007 >>
}
// study
if (study.getId() == currentStudy.getId()) {
currentStudy.setStatus(Status.AVAILABLE);
StudyUserRoleBean r = (new UserAccountDAO(sm.getDataSource())).findRoleByUserNameAndStudyId(ub.getName(), currentStudy.getId());
StudyUserRoleBean rInParent = (new UserAccountDAO(sm.getDataSource())).findRoleByUserNameAndStudyId(ub.getName(), currentStudy.getParentStudyId());
// according to logic in SecureController.java: inherited
// role from parent study, pick the higher role
currentRole.setRole(Role.max(r.getRole(), rInParent.getRole()));
}
// YW >>
// restore all study_group
StudyGroupDAO sgdao = new StudyGroupDAO(sm.getDataSource());
SubjectGroupMapDAO sgmdao = new SubjectGroupMapDAO(sm.getDataSource());
ArrayList groups = sgdao.findAllByStudy(study);
for (int i = 0; i < groups.size(); i++) {
StudyGroupBean group = (StudyGroupBean) groups.get(i);
if (group.getStatus().equals(Status.AUTO_DELETED)) {
group.setStatus(Status.AVAILABLE);
group.setUpdater(ub);
group.setUpdatedDate(new Date());
sgdao.update(group);
// all subject_group_map
ArrayList subjectGroupMaps = sgmdao.findAllByStudyGroupId(group.getId());
for (int j = 0; j < subjectGroupMaps.size(); j++) {
SubjectGroupMapBean sgMap = (SubjectGroupMapBean) subjectGroupMaps.get(j);
if (sgMap.getStatus().equals(Status.AUTO_DELETED)) {
sgMap.setStatus(Status.AVAILABLE);
sgMap.setUpdater(ub);
sgMap.setUpdatedDate(new Date());
sgmdao.update(sgMap);
}
}
}
}
// restore all events with subjects
EventDefinitionCRFDAO edcdao = new EventDefinitionCRFDAO(sm.getDataSource());
StudyEventDAO sedao = new StudyEventDAO(sm.getDataSource());
for (int i = 0; i < subjects.size(); i++) {
StudySubjectBean subject = (StudySubjectBean) subjects.get(i);
if (subject.getStatus().equals(Status.AUTO_DELETED)) {
subject.setStatus(Status.AVAILABLE);
subject.setUpdater(ub);
subject.setUpdatedDate(new Date());
ssdao.update(subject);
ArrayList events = sedao.findAllByStudySubject(subject);
EventCRFDAO ecdao = new EventCRFDAO(sm.getDataSource());
for (int j = 0; j < events.size(); j++) {
StudyEventBean event = (StudyEventBean) events.get(j);
if (event.getStatus().equals(Status.AUTO_DELETED)) {
event.setStatus(Status.AVAILABLE);
event.setUpdater(ub);
event.setUpdatedDate(new Date());
sedao.update(event);
ArrayList eventCRFs = ecdao.findAllByStudyEvent(event);
ItemDataDAO iddao = new ItemDataDAO(sm.getDataSource());
for (int k = 0; k < eventCRFs.size(); k++) {
// YW << fix broken page for storing site
EventCRFBean eventCRF = (EventCRFBean) eventCRFs.get(k);
// >> YW
if (eventCRF.getStatus().equals(Status.AUTO_DELETED)) {
eventCRF.setStatus(eventCRF.getOldStatus());
eventCRF.setUpdater(ub);
eventCRF.setUpdatedDate(new Date());
ecdao.update(eventCRF);
ArrayList itemDatas = iddao.findAllByEventCRFId(eventCRF.getId());
for (int a = 0; a < itemDatas.size(); a++) {
ItemDataBean item = (ItemDataBean) itemDatas.get(a);
if (item.getStatus().equals(Status.AUTO_DELETED)) {
item.setStatus(item.getOldStatus());
item.setUpdater(ub);
item.setUpdatedDate(new Date());
iddao.update(item);
}
}
}
}
}
}
}
}
// for subjects
DatasetDAO datadao = new DatasetDAO(sm.getDataSource());
ArrayList dataset = datadao.findAllByStudyId(study.getId());
for (int i = 0; i < dataset.size(); i++) {
DatasetBean data = (DatasetBean) dataset.get(i);
data.setStatus(Status.AVAILABLE);
data.setUpdater(ub);
data.setUpdatedDate(new Date());
datadao.update(data);
}
addPageMessage(respage.getString("this_site_has_been_restored_succesfully"));
String fromListSite = (String) session.getAttribute("fromListSite");
if (fromListSite != null && fromListSite.equals("yes") && currentRole.getRole().equals(Role.STUDYDIRECTOR)) {
session.removeAttribute("fromListSite");
forwardPage(Page.SITE_LIST_SERVLET);
} else {
session.removeAttribute("fromListSite");
if (currentRole.getRole().equals(Role.ADMIN)) {
forwardPage(Page.STUDY_LIST_SERVLET);
} else {
forwardPage(Page.SITE_LIST_SERVLET);
}
}
}
}
}
use of org.akaza.openclinica.bean.extract.DatasetBean in project OpenClinica by OpenClinica.
the class CreateJobExportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
// TODO multi stage servlet which will create export jobs
// will accept, create, and return the ViewJob servlet
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
scheduler = getScheduler();
String action = fp.getString("action");
ExtractUtils extractUtils = new ExtractUtils();
if (StringUtil.isBlank(action)) {
// set up list of data sets
// select by ... active study
setUpServlet();
forwardPage(Page.CREATE_JOB_EXPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
// collect form information
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals("DEFAULT"));
String[] triggerNames = triggerKeys.stream().toArray(String[]::new);
HashMap errors = validateForm(fp, request, triggerNames, "");
if (!errors.isEmpty()) {
// set errors to request
request.setAttribute("formMessages", errors);
logger.info("has validation errors in the first section");
logger.info("errors found: " + errors.toString());
setUpServlet();
forwardPage(Page.CREATE_JOB_EXPORT);
} else {
logger.info("found no validation errors, continuing");
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
CoreResources cr = new CoreResources();
int datasetId = fp.getInt(DATASET_ID);
String period = fp.getString(PERIOD);
String email = fp.getString(EMAIL);
String jobName = fp.getString(JOB_NAME);
String jobDesc = fp.getString(JOB_DESC);
Date startDateTime = fp.getDateTime(DATE_START_JOB);
Integer exportFormatId = fp.getInt(FORMAT_ID);
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
// set the job in motion
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
//JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
while (i < exportFiles.length) {
temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
// need to set the dataset path here, tbh
// next, can already run jobs, translations, and then add a message to be notified later
//JN all the properties need to have the variables...
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
//String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcLocation(prePocLoc);
}
extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
SimpleTrigger trigger = null;
trigger = xsltService.generateXsltTrigger(scheduler, xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", xsltService.getTriggerGroupNameForExportJobs());
//Updating the original trigger with user given inputs
trigger.getTriggerBuilder().withSchedule(simpleSchedule().withRepeatCount(64000).withIntervalInSeconds(new Integer(period).intValue()).withMisfireHandlingInstructionNextWithExistingCount()).startAt(startDateTime).forJob(jobName).withDescription(jobDesc);
trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
trigger.getJobDataMap().put("job_type", "exportJob");
JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
JobDetailFactoryBean.setGroup(xsltService.getTriggerGroupNameForExportJobs());
JobDetailFactoryBean.setName(trigger.getKey().getName());
JobDetailFactoryBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
JobDetailFactoryBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
JobDetailFactoryBean.setDurability(true);
// set to the scheduler
try {
Date dateStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), trigger);
logger.info("== found job date: " + dateStart.toString());
// set a success message here
} catch (SchedulerException se) {
se.printStackTrace();
setUpServlet();
addPageMessage("Error creating Job.");
forwardPage(Page.VIEW_JOB_SERVLET);
return;
}
setUpServlet();
addPageMessage("You have successfully created a new job: " + jobName + " which is now set to run at the time you specified.");
forwardPage(Page.VIEW_JOB_SERVLET);
}
} else {
forwardPage(Page.ADMIN_SYSTEM);
// forward to form
// should we even get to this part?
}
}
Aggregations