use of org.quartz.JobDataMap in project openhab1-addons by openhab.
the class JobScheduler method startAndScheduleDailyJob.
/**
* Schedules a daily job at midnight for astro calculation and starts it
* immediately too.
*/
public void startAndScheduleDailyJob() {
String jobName = DailyJob.class.getSimpleName();
CronTrigger cronTrigger = newTrigger().withIdentity(jobName + "-Trigger", JOB_GROUP).startNow().withSchedule(CronScheduleBuilder.cronSchedule("0 0 0 * * ?")).build();
schedule(jobName, DailyJob.class, cronTrigger, new JobDataMap());
logger.info("Scheduled a daily job at midnight for astro calculation");
Trigger trigger = newTrigger().withIdentity(jobName + "-StartupTrigger", JOB_GROUP).startNow().build();
schedule(jobName + "-Startup", DailyJob.class, trigger, new JobDataMap());
}
use of org.quartz.JobDataMap in project openhab1-addons by openhab.
the class OceanicBinding method execute.
@Override
protected void execute() {
if (isProperlyConfigured()) {
Scheduler sched = null;
try {
sched = StdSchedulerFactory.getDefaultScheduler();
} catch (SchedulerException e) {
logger.error("An exception occurred while getting a reference to the Quartz Scheduler");
}
// reset the contextMap before rebuilding it
for (String serialPort : serialDevices.keySet()) {
Set<String> itemNames = contextMap.get(serialPort);
if (itemNames != null) {
contextMap.clear();
}
}
for (OceanicBindingProvider provider : providers) {
for (String itemName : provider.getItemNames()) {
String serialPort = provider.getSerialPort(itemName);
SerialDevice serialDevice = serialDevices.get(serialPort);
boolean serialDeviceReady = true;
if (serialDevice == null) {
serialDevice = new SerialDevice(serialPort);
try {
serialDevice.initialize();
} catch (InitializationException e) {
logger.error("Could not open serial port " + serialPort + ": " + e.getMessage());
serialDeviceReady = false;
} catch (Throwable e) {
logger.error("Could not open serial port " + serialPort + ": " + e.getMessage());
serialDeviceReady = false;
}
if (serialDeviceReady) {
serialDevice.setEventPublisher(eventPublisher);
serialDevices.put(serialPort, serialDevice);
}
}
Set<String> itemNames = contextMap.get(serialPort);
if (itemNames == null) {
itemNames = new HashSet<String>();
contextMap.put(serialPort, itemNames);
}
itemNames.add(itemName);
if (serialDeviceReady) {
// set up the polling jobs
boolean jobExists = false;
// enumerate each job group
try {
for (String group : sched.getJobGroupNames()) {
// enumerate each job in group
if (group.equals("Oceanic-" + provider.toString())) {
for (JobKey jobKey : sched.getJobKeys(jobGroupEquals(group))) {
if (jobKey.getName().equals(itemName + "-" + provider.getValueSelector(itemName).toString())) {
jobExists = true;
break;
}
}
}
}
} catch (SchedulerException e1) {
logger.error("An exception occurred while querying the Quartz Scheduler ({})", e1.getMessage());
}
if (!jobExists && OceanicValueSelector.getValueSelector(provider.getValueSelector(itemName), ValueSelectorType.GET) != null) {
// set up the Quartz jobs
JobDataMap map = new JobDataMap();
map.put("SerialPort", serialPort);
map.put("ValueSelector", OceanicValueSelector.getValueSelector(provider.getValueSelector(itemName), ValueSelectorType.GET));
map.put("Binding", this);
JobDetail job = newJob(OceanicBinding.PollJob.class).withIdentity(itemName + "-" + provider.getValueSelector(itemName).toString(), "Oceanic-" + provider.toString()).usingJobData(map).build();
Trigger trigger = newTrigger().withIdentity(itemName + "-" + provider.getValueSelector(itemName).toString(), "Oceanic-" + provider.toString()).startNow().withSchedule(simpleSchedule().repeatForever().withIntervalInSeconds(provider.getPollingInterval(itemName))).build();
try {
logger.debug("Adding a poll job {} for {}", job.getKey(), itemName);
sched.scheduleJob(job, trigger);
} catch (SchedulerException e) {
logger.error("An exception occurred while scheduling a Quartz Job");
}
}
// kill the Quartz jobs that we do not need anymore
try {
for (String group : sched.getJobGroupNames()) {
// enumerate each job in group
if (group.equals("Oceanic-" + provider.toString())) {
for (JobKey jobKey : sched.getJobKeys(jobGroupEquals(group))) {
if (findFirstMatchingBindingProvider(jobKey.getName().split("-")[0]) == null) {
logger.debug("Removing a poll job {} for {}", jobKey, itemName);
sched.deleteJob(jobKey);
}
}
}
}
} catch (SchedulerException e1) {
logger.error("An exception occurred while querying the Quartz Scheduler ({})", e1.getMessage());
}
}
}
}
// close down the serial ports that do not have any Items anymore associated to them
for (String serialPort : serialDevices.keySet()) {
SerialDevice serialDevice = serialDevices.get(serialPort);
Set<String> itemNames = contextMap.get(serialPort);
if (itemNames == null || itemNames.size() == 0) {
contextMap.remove(serialPort);
logger.debug("Closing the serial port {}", serialPort);
serialDevice.close();
serialDevices.remove(serialPort);
}
}
}
}
use of org.quartz.JobDataMap in project openhab1-addons by openhab.
the class WeatherJobScheduler method scheduleIntervalJob.
/**
* Schedules the WeatherJob with the specified interval and starts it
* immediately.
*/
public void scheduleIntervalJob(LocationConfig locationConfig) {
String jobName = "weatherJob-" + locationConfig.getLocationId();
int interval = locationConfig.getUpdateInterval() * 60;
JobDataMap jobDataMap = new JobDataMap();
jobDataMap.put("locationId", locationConfig.getLocationId());
try {
Trigger trigger = newTrigger().withIdentity(jobName + "-Trigger", JOB_GROUP).startNow().withSchedule(simpleSchedule().repeatForever().withIntervalInSeconds(interval)).build();
JobDetail jobDetail = newJob(WeatherJob.class).withIdentity(jobName, JOB_GROUP).usingJobData(jobDataMap).build();
scheduler.scheduleJob(jobDetail, trigger);
logger.info("Starting and scheduling {} with interval of {} minutes", jobName, locationConfig.getUpdateInterval());
} catch (SchedulerException ex) {
logger.error(ex.getMessage(), ex);
}
}
use of org.quartz.JobDataMap in project cachecloud by sohutv.
the class InspectorJob method action.
@Override
public void action(JobExecutionContext context) {
try {
long start = System.currentTimeMillis();
SchedulerContext schedulerContext = context.getScheduler().getContext();
ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get(APPLICATION_CONTEXT_KEY);
// 应用相关
InspectHandler inspectHandler;
JobDataMap jobDataMap = context.getMergedJobDataMap();
String inspectorType = MapUtils.getString(jobDataMap, "inspectorType");
if (StringUtils.isBlank(inspectorType)) {
logger.error("=====================InspectorJob:inspectorType is null=====================");
return;
} else if (inspectorType.equals("host")) {
inspectHandler = applicationContext.getBean("hostInspectHandler", InspectHandler.class);
} else if (inspectorType.equals("app")) {
inspectHandler = applicationContext.getBean("appInspectHandler", InspectHandler.class);
} else {
logger.error("=====================InspectorJob:inspectorType not match:{}=====================", inspectorType);
return;
}
inspectHandler.handle();
long end = System.currentTimeMillis();
logger.info("=====================InspectorJob {} Done! cost={} ms=====================", inspectHandler.getClass().getSimpleName(), (end - start));
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
use of org.quartz.JobDataMap in project cachecloud by sohutv.
the class MachineMonitorJob method action.
@Override
public void action(JobExecutionContext context) {
try {
JobDataMap dataMap = context.getMergedJobDataMap();
String ip = dataMap.getString(ConstUtils.HOST_KEY);
long hostId = dataMap.getLong(ConstUtils.HOST_ID_KEY);
SchedulerContext schedulerContext = context.getScheduler().getContext();
ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get(APPLICATION_CONTEXT_KEY);
MachineCenter machineCenter = applicationContext.getBean("machineCenter", MachineCenter.class);
machineCenter.asyncMonitorMachineStats(hostId, ip);
} catch (SchedulerException e) {
logger.error(e.getMessage(), e);
}
}
Aggregations