use of org.quartz.SimpleTrigger in project camel by apache.
the class QuartzEndpoint method hasTriggerChanged.
private boolean hasTriggerChanged(Trigger oldTrigger, Trigger newTrigger) {
if (newTrigger instanceof CronTrigger && oldTrigger instanceof CronTrigger) {
CronTrigger newCron = (CronTrigger) newTrigger;
CronTrigger oldCron = (CronTrigger) oldTrigger;
return !newCron.getCronExpression().equals(oldCron.getCronExpression());
} else if (newTrigger instanceof SimpleTrigger && oldTrigger instanceof SimpleTrigger) {
SimpleTrigger newSimple = (SimpleTrigger) newTrigger;
SimpleTrigger oldSimple = (SimpleTrigger) oldTrigger;
return newSimple.getRepeatInterval() != oldSimple.getRepeatInterval() || newSimple.getRepeatCount() != oldSimple.getRepeatCount();
} else {
return !newTrigger.getClass().equals(oldTrigger.getClass()) || !newTrigger.equals(oldTrigger);
}
}
use of org.quartz.SimpleTrigger in project camel by apache.
the class QuartzEndpointConfigureTest method testConfigureGroupAndName.
@Test
public void testConfigureGroupAndName() throws Exception {
QuartzEndpoint endpoint = resolveMandatoryEndpoint("quartz2://myGroup/myName?trigger.repeatCount=3&trigger.repeatInterval=1000");
Scheduler scheduler = endpoint.getComponent().getScheduler();
TriggerKey triggerKey = endpoint.getTriggerKey();
Trigger trigger = scheduler.getTrigger(triggerKey);
JobDetail jobDetail = scheduler.getJobDetail(JobKey.jobKey(triggerKey.getName(), triggerKey.getGroup()));
assertEquals("getName()", "myName", triggerKey.getName());
assertEquals("getGroup()", "myGroup", triggerKey.getGroup());
assertEquals("getJobName", "myName", jobDetail.getKey().getName());
assertEquals("getJobGroup", "myGroup", jobDetail.getKey().getGroup());
SimpleTrigger simpleTrigger = assertIsInstanceOf(SimpleTrigger.class, trigger);
assertEquals("getRepeatCount()", 3, simpleTrigger.getRepeatCount());
}
use of org.quartz.SimpleTrigger in project camel by apache.
the class SpringQuartzPersistentStoreRestartAppChangeOptionsTest method testRestartAppChangeTriggerType.
@Test
public void testRestartAppChangeTriggerType() throws Exception {
// Test creates application context twice with different simple trigger options in configuration xml.
// Both times it retrieves back the option, accessing it via trigger (so, using value stored in DB).
// After that it asserts that two options are not equal.
// load spring app
app = new ClassPathXmlApplicationContext("org/apache/camel/component/quartz2/SpringQuartzPersistentStoreRestartAppChangeCronExpressionTest1.xml");
app.start();
CamelContext camel = app.getBean("camelContext", CamelContext.class);
assertNotNull(camel);
assertTrue(getTrigger(camel, "quartzRoute") instanceof CronTrigger);
app.stop();
log.info("Restarting ...");
log.info("Restarting ...");
log.info("Restarting ...");
// load spring app
AbstractXmlApplicationContext app2 = new ClassPathXmlApplicationContext("org/apache/camel/component/quartz2/SpringQuartzPersistentStoreRestartAppChangeOptionsTest2.xml");
app2.start();
CamelContext camel2 = app2.getBean("camelContext", CamelContext.class);
assertNotNull(camel2);
assertTrue(getTrigger(camel2, "quartzRoute") instanceof SimpleTrigger);
app2.stop();
// we're done so let's properly close the application contexts, but close
// the second app before the first one so that the quartz scheduler running
// inside it can be properly shutdown
IOHelper.close(app2, app);
}
use of org.quartz.SimpleTrigger in project OpenClinica by OpenClinica.
the class XsltTriggerService method generateXsltTrigger.
public SimpleTrigger generateXsltTrigger(Scheduler scheduler, String xslFile, String xmlFile, String endFilePath, String endFile, int datasetId, ExtractPropertyBean epBean, UserAccountBean userAccountBean, String locale, int cnt, String xsltPath, String triggerGroupName) {
//Date startDateTime = new Date(System.currentTimeMillis());
String jobName = datasetId + "_" + epBean.getExportFileName()[0];
if (triggerGroupName != null)
TRIGGER_GROUP_NAME = triggerGroupName;
//WebApplicationContext context = ContextLoader.getCurrentWebApplicationContext();
ApplicationContext context = null;
try {
context = (ApplicationContext) scheduler.getContext().get("applicationContext");
} catch (SchedulerException e) {
e.printStackTrace();
}
SimpleTriggerFactoryBean triggerFactoryBean = context.getBean(SimpleTriggerFactoryBean.class, xslFile, xmlFile, endFilePath, endFile, datasetId, epBean, userAccountBean, locale, cnt, xsltPath);
SimpleTrigger trigger = triggerFactoryBean.getObject();
return trigger;
}
use of org.quartz.SimpleTrigger in project OpenClinica by OpenClinica.
the class ExportDatasetServlet method processRequest.
@Override
public void processRequest() throws Exception {
DatasetDAO dsdao = new DatasetDAO(sm.getDataSource());
ArchivedDatasetFileDAO asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
FormProcessor fp = new FormProcessor(request);
GenerateExtractFileService generateFileService = new GenerateExtractFileService(sm.getDataSource(), (CoreResources) SpringServletAccess.getApplicationContext(context).getBean("coreResources"), (RuleSetRuleDao) SpringServletAccess.getApplicationContext(context).getBean("ruleSetRuleDao"));
String action = fp.getString("action");
int datasetId = fp.getInt("datasetId");
int adfId = fp.getInt("adfId");
if (datasetId == 0) {
try {
DatasetBean dsb = (DatasetBean) session.getAttribute("newDataset");
datasetId = dsb.getId();
logger.info("dataset id was zero, trying session: " + datasetId);
} catch (NullPointerException e) {
e.printStackTrace();
logger.info("tripped over null pointer exception");
}
}
DatasetBean db = (DatasetBean) dsdao.findByPK(datasetId);
StudyDAO sdao = new StudyDAO(sm.getDataSource());
StudyBean study = (StudyBean) sdao.findByPK(db.getStudyId());
checkRoleByUserAndStudy(ub, study.getParentStudyId(), study.getId());
//Checks if the study is current study or child of current study
if (study.getId() != currentStudy.getId() && study.getParentStudyId() != currentStudy.getId()) {
addPageMessage(respage.getString("no_have_correct_privilege_current_study") + " " + respage.getString("change_active_study_or_contact"));
forwardPage(Page.MENU_SERVLET);
return;
}
/**
* @vbc 08/06/2008 NEW EXTRACT DATA IMPLEMENTATION get study_id and
* parentstudy_id int currentstudyid = currentStudy.getId(); int
* parentstudy = currentStudy.getParentStudyId(); if (parentstudy >
* 0) { // is OK } else { // same parentstudy = currentstudyid; } //
*/
int currentstudyid = currentStudy.getId();
// YW 11-09-2008 << modified logic here.
int parentstudy = currentstudyid;
// YW 11-09-2008 >>
StudyBean parentStudy = new StudyBean();
if (currentStudy.getParentStudyId() > 0) {
//StudyDAO sdao = new StudyDAO(sm.getDataSource());
parentStudy = (StudyBean) sdao.findByPK(currentStudy.getParentStudyId());
}
ExtractBean eb = generateFileService.generateExtractBean(db, currentStudy, parentStudy);
if (StringUtil.isBlank(action)) {
loadList(db, asdfdao, datasetId, fp, eb);
forwardPage(Page.EXPORT_DATASETS);
} else if ("delete".equalsIgnoreCase(action) && adfId > 0) {
boolean success = false;
ArchivedDatasetFileBean adfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(adfId);
File file = new File(adfBean.getFileReference());
if (!file.canWrite()) {
addPageMessage(respage.getString("write_protected"));
} else {
success = file.delete();
if (success) {
asdfdao.deleteArchiveDataset(adfBean);
addPageMessage(respage.getString("file_removed"));
} else {
addPageMessage(respage.getString("error_removing_file"));
}
}
loadList(db, asdfdao, datasetId, fp, eb);
forwardPage(Page.EXPORT_DATASETS);
} else {
logger.info("**** found action ****: " + action);
String generateReport = "";
// generate file, and show screen export
// String generalFileDir = DATASET_DIR + db.getId() +
// File.separator;
// change this up, so that we don't overwrite anything
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
String generalFileDir = DATASET_DIR + db.getId() + File.separator + sdfDir.format(new java.util.Date());
String fileName = "";
db.setName(db.getName().replaceAll(" ", "_"));
Page finalTarget = Page.GENERATE_DATASET;
finalTarget = Page.EXPORT_DATA_CUSTOM;
// now display report according to format specified
// TODO revise final target to set to fileReference????
long sysTimeBegin = System.currentTimeMillis();
int fId = 0;
if ("sas".equalsIgnoreCase(action)) {
// generateReport =
// dsdao.generateDataset(db,
// ExtractBean.SAS_FORMAT,
// currentStudy,
// parentStudy);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
String SASFileName = db.getName() + "_sas.sas";
// logger.info("found data set: "+generateReport);
generateFileService.createFile(SASFileName, generalFileDir, generateReport, db, sysTimeEnd, ExportFormatBean.TXTFILE, true, ub);
logger.info("created sas file");
request.setAttribute("generate", generalFileDir + SASFileName);
finalTarget.setFileName(generalFileDir + SASFileName);
fileName = SASFileName;
// won't work since page creator is private
} else if ("odm".equalsIgnoreCase(action)) {
String odmVersion = fp.getString("odmVersion");
String ODMXMLFileName = "";
// DRY
// HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "");
HashMap answerMap = generateFileService.createODMFile(odmVersion, sysTimeBegin, generalFileDir, db, this.currentStudy, "", eb, currentStudy.getId(), currentStudy.getParentStudyId(), "99", true, true, true, null, ub);
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
ODMXMLFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
fileName = ODMXMLFileName;
request.setAttribute("generate", generalFileDir + ODMXMLFileName);
logger.debug("+++ set the following: " + generalFileDir + ODMXMLFileName);
// send a link with the SQL file? put the generated SQL file with the dataset?
if (fp.getString("xalan") != null) {
XalanTriggerService xts = new XalanTriggerService();
String propertiesPath = SQLInitServlet.getField("filePath");
// the trick there, we need to open up the zipped file and get at the XML
openZipFile(generalFileDir + ODMXMLFileName + ".zip");
// need to find out how to copy this xml file from /bin to the generalFileDir
SimpleTrigger simpleTrigger = xts.generateXalanTrigger(propertiesPath + File.separator + "ODMReportStylesheet.xsl", ODMXMLFileName, generalFileDir + "output.sql", db.getId());
scheduler = getScheduler();
JobDetailFactoryBean JobDetailFactoryBean = new JobDetailFactoryBean();
JobDetailFactoryBean.setGroup(xts.TRIGGER_GROUP_NAME);
JobDetailFactoryBean.setName(simpleTrigger.getKey().getName());
JobDetailFactoryBean.setJobClass(org.akaza.openclinica.web.job.XalanStatefulJob.class);
JobDetailFactoryBean.setJobDataMap(simpleTrigger.getJobDataMap());
// need durability?
JobDetailFactoryBean.setDurability(true);
try {
Date dateStart = scheduler.scheduleJob(JobDetailFactoryBean.getObject(), simpleTrigger);
logger.info("== found job date: " + dateStart.toString());
} catch (SchedulerException se) {
se.printStackTrace();
}
}
} else if ("txt".equalsIgnoreCase(action)) {
// generateReport =
// dsdao.generateDataset(db,
// ExtractBean.TXT_FORMAT,
// currentStudy,
// parentStudy);
// eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
String TXTFileName = "";
HashMap answerMap = generateFileService.createTabFile(eb, sysTimeBegin, generalFileDir, db, currentstudyid, parentstudy, "", ub);
// and of course DRY
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
TXTFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
fileName = TXTFileName;
request.setAttribute("generate", generalFileDir + TXTFileName);
// finalTarget.setFileName(generalFileDir+TXTFileName);
logger.debug("+++ set the following: " + generalFileDir + TXTFileName);
} else if ("html".equalsIgnoreCase(action)) {
// html based dataset browser
TabReportBean answer = new TabReportBean();
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
request.setAttribute("dataset", db);
request.setAttribute("extractBean", eb);
finalTarget = Page.GENERATE_DATASET_HTML;
} else if ("spss".equalsIgnoreCase(action)) {
SPSSReportBean answer = new SPSSReportBean();
// removed three lines here and put them in generate file
// service, createSPSSFile method. tbh 01/2009
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
// System.out.println("*** isShowCRFversion:
// "+db.isShowCRFversion());
// TODO in the spirit of DRY, if this works we need to remove
// lines 443-776 in this servlet, tbh 01/2009
String DDLFileName = "";
HashMap answerMap = generateFileService.createSPSSFile(db, eb, currentStudy, parentStudy, sysTimeBegin, generalFileDir, answer, "", ub);
// hmm, DRY?
for (Iterator it = answerMap.entrySet().iterator(); it.hasNext(); ) {
java.util.Map.Entry entry = (java.util.Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
DDLFileName = (String) key;
Integer fileID = (Integer) value;
fId = fileID.intValue();
}
request.setAttribute("generate", generalFileDir + DDLFileName);
logger.debug("+++ set the following: " + generalFileDir + DDLFileName);
} else if ("csv".equalsIgnoreCase(action)) {
CommaReportBean answer = new CommaReportBean();
eb = dsdao.getDatasetData(eb, currentstudyid, parentstudy);
eb.getMetadata();
eb.computeReport(answer);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
// logger.info("found data set: "+generateReport);
String CSVFileName = db.getName() + "_comma.txt";
fId = generateFileService.createFile(CSVFileName, generalFileDir, answer.toString(), db, sysTimeEnd, ExportFormatBean.CSVFILE, true, ub);
fileName = CSVFileName;
logger.info("just created csv file");
request.setAttribute("generate", generalFileDir + CSVFileName);
// finalTarget.setFileName(generalFileDir+CSVFileName);
} else if ("excel".equalsIgnoreCase(action)) {
// HSSFWorkbook excelReport = dsdao.generateExcelDataset(db,
// ExtractBean.XLS_FORMAT,
// currentStudy,
// parentStudy);
long sysTimeEnd = System.currentTimeMillis() - sysTimeBegin;
// TODO this will change and point to a created excel
// spreadsheet, tbh
String excelFileName = db.getName() + "_excel.xls";
// fId = this.createFile(excelFileName,
// generalFileDir,
// excelReport,
// db, sysTimeEnd,
// ExportFormatBean.EXCELFILE);
// logger.info("just created csv file, for excel output");
// response.setHeader("Content-disposition","attachment;
// filename="+CSVFileName);
// logger.info("csv file name: "+CSVFileName);
finalTarget = Page.GENERATE_EXCEL_DATASET;
// response.setContentType("application/vnd.ms-excel");
response.setHeader("Content-Disposition", "attachment; filename=" + db.getName() + "_excel.xls");
request.setAttribute("generate", generalFileDir + excelFileName);
logger.info("set 'generate' to :" + generalFileDir + excelFileName);
fileName = excelFileName;
// excelReport.write(stream);
// stream.flush();
// stream.close();
// finalTarget.setFileName(WEB_DIR+db.getId()+"/"+excelFileName);
}
// <%@page contentType="application/vnd.ms-excel"%>
if (!finalTarget.equals(Page.GENERATE_EXCEL_DATASET) && !finalTarget.equals(Page.GENERATE_DATASET_HTML)) {
// to catch all the others and try to set a new path for file
// capture
// tbh, 4-18-05
// request.setAttribute("generate",finalTarget.getFileName());
// TODO changing path to show refresh page, then window with
// link to download file, tbh 06-08-05
// finalTarget.setFileName(
// "/WEB-INF/jsp/extract/generatedFileDataset.jsp");
finalTarget.setFileName("" + "/WEB-INF/jsp/extract/generateMetadataCore.jsp");
// also set up table here???
asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
ArchivedDatasetFileBean asdfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(fId);
// *** do we need this below? tbh
ArrayList newFileList = new ArrayList();
newFileList.add(asdfBean);
// request.setAttribute("filelist",newFileList);
ArrayList filterRows = ArchivedDatasetFileRow.generateRowsFromBeans(newFileList);
EntityBeanTable table = fp.getEntityBeanTable();
// sort by date
table.setSortingIfNotExplicitlySet(3, false);
String[] columns = { resword.getString("file_name"), resword.getString("run_time"), resword.getString("file_size"), resword.getString("created_date"), resword.getString("created_by") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(0);
table.hideColumnLink(1);
table.hideColumnLink(2);
table.hideColumnLink(3);
table.hideColumnLink(4);
// table.setQuery("ExportDataset?datasetId=" +db.getId(), new
// HashMap());
// trying to continue...
// session.setAttribute("newDataset",db);
request.setAttribute("dataset", db);
request.setAttribute("file", asdfBean);
table.setRows(filterRows);
table.computeDisplay();
request.setAttribute("table", table);
// *** do we need this above? tbh
}
logger.info("set first part of 'generate' to :" + generalFileDir);
logger.info("found file name: " + finalTarget.getFileName());
// String del = CoreResources.getField("dataset_file_delete");
// if (del.equalsIgnoreCase("true") || del.equals("")) {
// File deleteFile = new File(generalFileDir + fileName);
// deleteFile.delete();
// }
forwardPage(finalTarget);
}
}
Aggregations