use of org.quartz.JobDataMap in project openhab1-addons by openhab.
the class SonosBinding method execute.
@Override
protected void execute() {
if (isProperlyConfigured()) {
if (!bindingStarted) {
// This will create necessary network resources for UPnP right away
upnpService = new UpnpServiceImpl(new SonosUpnpServiceConfiguration(), listener);
try {
Iterator<SonosZonePlayer> it = sonosZonePlayerCache.iterator();
while (it.hasNext()) {
SonosZonePlayer aPlayer = it.next();
if (aPlayer.getDevice() == null) {
logger.info("Querying the network for a predefined Sonos device with UDN {}", aPlayer.getUdn());
upnpService.getControlPoint().search(new UDNHeader(aPlayer.getUdn()));
}
}
logger.info("Querying the network for any other Sonos device");
final UDAServiceType udaType = new UDAServiceType("AVTransport");
upnpService.getControlPoint().search(new UDAServiceTypeHeader(udaType));
} catch (Exception e) {
logger.warn("An exception occurred while searching the network for Sonos devices: ", e.getMessage());
}
bindingStarted = true;
}
Scheduler sched = null;
try {
sched = StdSchedulerFactory.getDefaultScheduler();
} catch (SchedulerException e) {
logger.error("An exception occurred while getting a reference to the Quartz Scheduler");
}
// Cycle through the Items and setup sonos zone players if required
for (SonosBindingProvider provider : providers) {
for (String itemName : provider.getItemNames()) {
for (String sonosID : provider.getSonosID(itemName)) {
if (!sonosZonePlayerCache.contains(sonosID)) {
// the device is not yet discovered on the network or not defined in the .cfg
// Verify that the sonosID has the format of a valid UDN
Pattern SONOS_UDN_PATTERN = Pattern.compile("RINCON_(\\w{17})");
Matcher matcher = SONOS_UDN_PATTERN.matcher(sonosID);
if (matcher.matches()) {
// Add device to the cached Configs
SonosZonePlayer thePlayer = new SonosZonePlayer(sonosID, self);
thePlayer.setUdn(new UDN(sonosID));
sonosZonePlayerCache.add(thePlayer);
// Query the network for this device
logger.info("Querying the network for a predefined Sonos device with UDN '{}'", thePlayer.getUdn());
upnpService.getControlPoint().search(new UDNHeader(thePlayer.getUdn()));
}
}
}
}
}
// Cycle through the item binding configuration that define polling criteria
for (SonosCommandType sonosCommandType : SonosCommandType.getPolling()) {
for (SonosBindingProvider provider : providers) {
for (String itemName : provider.getItemNames(sonosCommandType.getSonosCommand())) {
for (Command aCommand : provider.getCommands(itemName, sonosCommandType.getSonosCommand())) {
// We are dealing with a valid device
SonosZonePlayer thePlayer = sonosZonePlayerCache.getById(provider.getSonosID(itemName, aCommand));
if (thePlayer != null) {
RemoteDevice theDevice = thePlayer.getDevice();
// Not all Sonos devices have the same capabilities
if (theDevice != null) {
if (theDevice.findService(new UDAServiceId(sonosCommandType.getService())) != null) {
boolean jobExists = false;
// enumerate each job group
try {
for (String group : sched.getJobGroupNames()) {
// enumerate each job in group
for (JobKey jobKey : sched.getJobKeys(jobGroupEquals(group))) {
if (jobKey.getName().equals(provider.getSonosID(itemName, aCommand) + "-" + sonosCommandType.getJobClass().toString())) {
jobExists = true;
break;
}
}
}
} catch (SchedulerException e1) {
logger.error("An exception occurred while quering the Quartz Scheduler ({})", e1.getMessage());
}
if (!jobExists) {
// set up the Quartz jobs
JobDataMap map = new JobDataMap();
map.put("Player", thePlayer);
JobDetail job = newJob(sonosCommandType.getJobClass()).withIdentity(provider.getSonosID(itemName, aCommand) + "-" + sonosCommandType.getJobClass().toString(), "Sonos-" + provider.toString()).usingJobData(map).build();
Trigger trigger = newTrigger().withIdentity(provider.getSonosID(itemName, aCommand) + "-" + sonosCommandType.getJobClass().toString(), "Sonos-" + provider.toString()).startNow().withSchedule(simpleSchedule().repeatForever().withIntervalInMilliseconds(pollingPeriod)).build();
try {
sched.scheduleJob(job, trigger);
} catch (SchedulerException e) {
logger.error("An exception occurred while scheduling a Quartz Job ({})", e.getMessage());
}
}
}
}
}
}
}
}
}
}
}
use of org.quartz.JobDataMap in project openhab1-addons by openhab.
the class WeatherJob method execute.
/**
* {@inheritDoc}
*/
@Override
public void execute(JobExecutionContext jobContext) throws JobExecutionException {
JobDataMap jobDataMap = jobContext.getJobDetail().getJobDataMap();
String locationId = jobDataMap.getString("locationId");
logger.debug("Starting Weather job for location '{}'", locationId);
try {
LocationConfig locationConfig = context.getConfig().getLocationConfig(locationId);
WeatherProvider weatherProvider = WeatherProviderFactory.createWeatherProvider(locationConfig.getProviderName());
context.setWeather(locationId, weatherProvider.getWeather(locationConfig));
weatherPublisher.publish(locationId);
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
throw new JobExecutionException(ex.getMessage(), ex);
}
}
use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.
the class ViewJobServlet method processRequest.
@Override
protected void processRequest() throws Exception {
// TODO single stage servlet where we get the list of jobs
// and push them out to the JSP page
// related classes will be required to generate the table rows
// and eventually links to view and edit the jobs as well
FormProcessor fp = new FormProcessor(request);
// First we must get a reference to a scheduler
scheduler = getScheduler();
XsltTriggerService xsltTriggerSrvc = new XsltTriggerService();
// Scheduler sched = sfb.getScheduler();
Set<TriggerKey> triggerKeys = scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals(xsltTriggerSrvc.getTriggerGroupNameForExportJobs()));
String[] triggerNames = triggerKeys.stream().toArray(String[]::new);
// String[] triggerNames = scheduler.getJobNames(XsltTriggerService.TRIGGER_GROUP_NAME);
// logger.info("trigger list: "+triggerNames.length);
// logger.info("trigger names: "+triggerNames.toString());
ArrayList triggerBeans = new ArrayList();
for (String triggerName : triggerNames) {
Trigger trigger = scheduler.getTrigger(TriggerKey.triggerKey(triggerName, xsltTriggerSrvc.getTriggerGroupNameForExportJobs()));
try {
logger.debug("prev fire time " + trigger.getPreviousFireTime().toString());
logger.debug("next fire time " + trigger.getNextFireTime().toString());
logger.debug("final fire time: " + trigger.getFinalFireTime().toString());
} catch (NullPointerException npe) {
// could be nulls in the dates, etc
}
// logger.info(trigger.getDescription());
// logger.info("");//getJobDataMap()
TriggerBean triggerBean = new TriggerBean();
triggerBean.setFullName(trigger.getKey().getName());
triggerBean.setPreviousDate(trigger.getPreviousFireTime());
triggerBean.setNextDate(trigger.getNextFireTime());
if (trigger.getDescription() != null) {
triggerBean.setDescription(trigger.getDescription());
}
// setting: frequency, dataset name
JobDataMap dataMap = new JobDataMap();
DatasetDAO datasetDAO = new DatasetDAO(sm.getDataSource());
StudyDAO studyDao = new StudyDAO(sm.getDataSource());
if (trigger.getJobDataMap().size() > 0) {
dataMap = trigger.getJobDataMap();
int dsId = dataMap.getInt(ExampleSpringJob.DATASET_ID);
String periodToRun = dataMap.getString(ExampleSpringJob.PERIOD);
triggerBean.setPeriodToRun(periodToRun);
DatasetBean dataset = (DatasetBean) datasetDAO.findByPK(dsId);
triggerBean.setDataset(dataset);
triggerBean.setDatasetName(dataset.getName());
StudyBean study = (StudyBean) studyDao.findByPK(dataset.getStudyId());
triggerBean.setStudyName(study.getName());
// triggerBean.setStudyName(dataMap.getString(ExampleSpringJob.STUDY_NAME));
}
logger.debug("Trigger Priority: " + trigger.getKey().getName() + " " + trigger.getPriority());
if (scheduler.getTriggerState(TriggerKey.triggerKey(triggerName, XsltTriggerService.TRIGGER_GROUP_NAME)) == Trigger.TriggerState.PAUSED) {
triggerBean.setActive(false);
logger.debug("setting active to false for trigger: " + trigger.getKey().getName());
} else {
triggerBean.setActive(true);
logger.debug("setting active to TRUE for trigger: " + trigger.getKey().getName());
}
triggerBeans.add(triggerBean);
// our wrapper to show triggers
}
ArrayList allRows = TriggerRow.generateRowsFromBeans(triggerBeans);
EntityBeanTable table = fp.getEntityBeanTable();
String[] columns = { resword.getString("name"), resword.getString("previous_fire_time"), resword.getString("next_fire_time"), resword.getString("description"), resword.getString("period_to_run"), resword.getString("dataset"), resword.getString("study"), resword.getString("actions") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(3);
table.hideColumnLink(7);
table.setQuery("ViewJob", new HashMap());
// table.addLink("", "CreateUserAccount");
table.setSortingColumnInd(0);
table.setRows(allRows);
table.computeDisplay();
request.setAttribute("table", table);
// throw new NullPointerException("faking an error here");
forwardPage(Page.VIEW_JOB);
}
use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.
the class ViewSingleJobServlet method processRequest.
@Override
protected void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
// changes to this servlet, we now look at group name too, tbh 05/2009
String triggerName = fp.getString("tname");
String gName = fp.getString("gname");
String groupName = "";
if (gName.equals("") || gName.equals("0")) {
groupName = XsltTriggerService.TRIGGER_GROUP_NAME;
} else {
// if (gName.equals("1")) {
groupName = TRIGGER_IMPORT_GROUP;
}
// << tbh 09/03/2009 #4143
scheduler = getScheduler();
Trigger trigger = scheduler.getTrigger(new TriggerKey(triggerName, groupName));
if (trigger == null) {
groupName = XsltTriggerService.TRIGGER_GROUP_NAME;
trigger = scheduler.getTrigger(new TriggerKey(triggerName.trim(), groupName));
}
// << tbh 09/03/2009 #4143
// above is a hack, if we add more trigger groups this will have
// to be redone
logger.debug("found trigger name: " + triggerName);
logger.debug("found group name: " + groupName);
TriggerBean triggerBean = new TriggerBean();
JobDataMap dataMap = new JobDataMap();
AuditEventDAO auditEventDAO = new AuditEventDAO(sm.getDataSource());
try {
triggerBean.setFullName(trigger.getKey().getName());
triggerBean.setPreviousDate(trigger.getPreviousFireTime());
triggerBean.setNextDate(trigger.getNextFireTime());
// >> set active here, tbh 10/08/2009
if (scheduler.getTriggerState(new TriggerKey(triggerName, groupName)) == Trigger.TriggerState.PAUSED) {
triggerBean.setActive(false);
logger.debug("setting active to false for trigger: " + trigger.getKey().getName());
} else {
triggerBean.setActive(true);
logger.debug("setting active to TRUE for trigger: " + trigger.getKey().getName());
}
// <<
if (trigger.getDescription() != null) {
triggerBean.setDescription(trigger.getDescription());
}
if (trigger.getJobDataMap().size() > 0) {
dataMap = trigger.getJobDataMap();
String contactEmail = dataMap.getString(XsltTriggerService.EMAIL);
logger.debug("found email: " + contactEmail);
// int dsId = new Integer(datasetId).intValue();
if (gName.equals("") || gName.equals("0")) {
String exportFormat = dataMap.getString(XsltTriggerService.EXPORT_FORMAT);
String periodToRun = dataMap.getString(ExampleSpringJob.PERIOD);
// int userId = new Integer(userAcctId).intValue();
int dsId = dataMap.getInt(ExampleSpringJob.DATASET_ID);
triggerBean.setExportFormat(exportFormat);
triggerBean.setPeriodToRun(periodToRun);
DatasetDAO datasetDAO = new DatasetDAO(sm.getDataSource());
DatasetBean dataset = (DatasetBean) datasetDAO.findByPK(dsId);
triggerBean.setDataset(dataset);
}
int userId = dataMap.getInt(ExampleSpringJob.USER_ID);
// need to set information, extract bean, user account bean
UserAccountDAO userAccountDAO = new UserAccountDAO(sm.getDataSource());
triggerBean.setContactEmail(contactEmail);
UserAccountBean userAccount = (UserAccountBean) userAccountDAO.findByPK(userId);
triggerBean.setUserAccount(userAccount);
ArrayList<AuditEventBean> triggerLogs = auditEventDAO.findAllByAuditTable(trigger.getKey().getName());
// set the table for the audit event beans here
ArrayList allRows = AuditEventRow.generateRowsFromBeans(triggerLogs);
EntityBeanTable table = fp.getEntityBeanTable();
String[] columns = { resword.getString("date_and_time"), resword.getString("action_message"), resword.getString("entity_operation"), // resword.getString("study_subject_ID"),
resword.getString("changes_and_additions"), resword.getString("actions") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.setAscendingSort(false);
table.hideColumnLink(1);
table.hideColumnLink(3);
table.hideColumnLink(4);
table.setQuery("ViewSingleJob?tname=" + triggerName + "&gname=" + gName, new HashMap());
table.setRows(allRows);
table.computeDisplay();
request.setAttribute("table", table);
}
} catch (NullPointerException e) {
// TODO Auto-generated catch block
logger.debug(" found NPE " + e.getMessage());
e.printStackTrace();
}
// need to show the extract for which this runs, which files, etc
// in other words the job data map
request.setAttribute("triggerBean", triggerBean);
request.setAttribute("groupName", groupName);
forwardPage(Page.VIEW_SINGLE_JOB);
}
use of org.quartz.JobDataMap in project OpenClinica by OpenClinica.
the class QuartzConfiguration method simpleTriggerFactoryBean.
@Bean
@Scope("prototype")
@Lazy
public SimpleTriggerFactoryBean simpleTriggerFactoryBean(String xslFile, String xmlFile, String endFilePath, String endFile, int datasetId, ExtractPropertyBean epBean, UserAccountBean userAccountBean, String locale, int cnt, String xsltPath) {
SimpleTriggerFactoryBean triggerFactoryBean = new SimpleTriggerFactoryBean();
triggerFactoryBean.setBeanName("trigger1");
triggerFactoryBean.setGroup("group1");
triggerFactoryBean.setRepeatInterval(1);
triggerFactoryBean.setRepeatCount(0);
//triggerFactoryBean.setStartTime(startDateTime);
triggerFactoryBean.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_FIRE_NOW);
// set job data map
JobDataMap jobDataMap = new JobDataMap();
jobDataMap.put(XSL_FILE_PATH, xslFile);
jobDataMap.put(XML_FILE_PATH, endFilePath);
jobDataMap.put(POST_FILE_PATH, endFilePath);
jobDataMap.put(POST_FILE_NAME, endFile);
jobDataMap.put(EXTRACT_PROPERTY, epBean.getId());
jobDataMap.put(USER_ID, userAccountBean.getId());
jobDataMap.put(STUDY_ID, userAccountBean.getActiveStudyId());
jobDataMap.put(LOCALE, locale);
jobDataMap.put(DATASET_ID, datasetId);
jobDataMap.put(EMAIL, userAccountBean.getEmail());
jobDataMap.put(ZIPPED, epBean.getZipFormat());
jobDataMap.put(DELETE_OLD, epBean.getDeleteOld());
jobDataMap.put(SUCCESS_MESSAGE, epBean.getSuccessMessage());
jobDataMap.put(FAILURE_MESSAGE, epBean.getFailureMessage());
jobDataMap.put(POST_PROC_DELETE_OLD, epBean.getPostProcDeleteOld());
jobDataMap.put(POST_PROC_ZIP, epBean.getPostProcZip());
jobDataMap.put(POST_PROC_LOCATION, epBean.getPostProcLocation());
jobDataMap.put(POST_PROC_EXPORT_NAME, epBean.getPostProcExportName());
jobDataMap.put(COUNT, cnt);
jobDataMap.put(XSLT_PATH, xsltPath);
// jobDataMap.put(DIRECTORY, directory);
// jobDataMap.put(ExampleSpringJob.LOCALE, locale);
jobDataMap.put(EP_BEAN, epBean);
triggerFactoryBean.setJobDataMap(jobDataMap);
return triggerFactoryBean;
}
Aggregations