use of org.quartz.JobKey in project cachecloud by sohutv.
the class MachineCenterImpl method deployServerCollection.
@Override
public boolean deployServerCollection(long hostId, String ip) {
Assert.hasText(ip);
Map<String, Object> dataMap = new HashMap<String, Object>();
dataMap.put(ConstUtils.HOST_KEY, ip);
JobKey jobKey = JobKey.jobKey(ConstUtils.SERVER_JOB_NAME, ConstUtils.SERVER_JOB_GROUP);
TriggerKey triggerKey = TriggerKey.triggerKey(ip, ConstUtils.SERVER_TRIGGER_GROUP + ip);
boolean result = schedulerCenter.deployJobByCron(jobKey, triggerKey, dataMap, ScheduleUtil.getFiveMinuteCronByHostId(hostId), false);
return result;
}
use of org.quartz.JobKey in project cachecloud by sohutv.
the class MachineCenterImpl method deployMachineCollection.
/**
* 为当前机器收集信息创建trigger并部署
*
* @param hostId 机器id
* @param ip ip
* @return 部署成功返回true,否则返回false
*/
@Override
public boolean deployMachineCollection(final long hostId, final String ip) {
Assert.isTrue(hostId > 0);
Assert.hasText(ip);
Map<String, Object> dataMap = new HashMap<String, Object>();
dataMap.put(ConstUtils.HOST_KEY, ip);
dataMap.put(ConstUtils.HOST_ID_KEY, hostId);
JobKey jobKey = JobKey.jobKey(ConstUtils.MACHINE_JOB_NAME, ConstUtils.MACHINE_JOB_GROUP);
TriggerKey triggerKey = TriggerKey.triggerKey(ip, ConstUtils.MACHINE_TRIGGER_GROUP + hostId);
boolean result = schedulerCenter.deployJobByCron(jobKey, triggerKey, dataMap, ScheduleUtil.getMachineStatsCron(hostId), false);
return result;
}
use of org.quartz.JobKey in project cachecloud by sohutv.
the class RedisCenterImpl method deployRedisCollection.
@Override
public boolean deployRedisCollection(long appId, String host, int port) {
Assert.isTrue(appId > 0);
Assert.hasText(host);
Assert.isTrue(port > 0);
Map<String, Object> dataMap = new HashMap<String, Object>();
dataMap.put(ConstUtils.HOST_KEY, host);
dataMap.put(ConstUtils.PORT_KEY, port);
dataMap.put(ConstUtils.APP_KEY, appId);
JobKey jobKey = JobKey.jobKey(ConstUtils.REDIS_JOB_NAME, ConstUtils.REDIS_JOB_GROUP);
TriggerKey triggerKey = TriggerKey.triggerKey(ObjectConvert.linkIpAndPort(host, port), ConstUtils.REDIS_TRIGGER_GROUP + appId);
return schedulerCenter.deployJobByCron(jobKey, triggerKey, dataMap, ScheduleUtil.getMinuteCronByAppId(appId), false);
}
use of org.quartz.JobKey in project cachecloud by sohutv.
the class RedisCenterImpl method deployRedisSlowLogCollection.
@Override
public boolean deployRedisSlowLogCollection(long appId, String host, int port) {
Assert.isTrue(appId > 0);
Assert.hasText(host);
Assert.isTrue(port > 0);
Map<String, Object> dataMap = new HashMap<String, Object>();
dataMap.put(ConstUtils.HOST_KEY, host);
dataMap.put(ConstUtils.PORT_KEY, port);
dataMap.put(ConstUtils.APP_KEY, appId);
JobKey jobKey = JobKey.jobKey(ConstUtils.REDIS_SLOWLOG_JOB_NAME, ConstUtils.REDIS_SLOWLOG_JOB_GROUP);
TriggerKey triggerKey = TriggerKey.triggerKey(ObjectConvert.linkIpAndPort(host, port), ConstUtils.REDIS_SLOWLOG_TRIGGER_GROUP + appId);
boolean result = schedulerCenter.deployJobByCron(jobKey, triggerKey, dataMap, ScheduleUtil.getRedisSlowLogCron(appId), false);
return result;
}
use of org.quartz.JobKey in project OpenClinica by OpenClinica.
the class SecureController method pingJobServer.
private void pingJobServer(HttpServletRequest request) {
String jobName = (String) request.getSession().getAttribute("jobName");
String groupName = (String) request.getSession().getAttribute("groupName");
Integer datasetId = (Integer) request.getSession().getAttribute("datasetId");
try {
if (jobName != null && groupName != null) {
Trigger.TriggerState triggerState = getScheduler(request).getTriggerState(new TriggerKey(jobName, groupName));
org.quartz.JobDetail details = getScheduler(request).getJobDetail(new JobKey(jobName, groupName));
List contexts = getScheduler(request).getCurrentlyExecutingJobs();
// will we get the above, even if its completed running?
// ProcessingResultType message = null;
// for (int i = 0; i < contexts.size(); i++) {
// org.quartz.JobExecutionContext context = (org.quartz.JobExecutionContext) contexts.get(i);
// if (context.getJobDetail().getName().equals(jobName) &&
// context.getJobDetail().getGroup().equals(groupName)) {
// message = (ProcessingResultType) context.getResult();
// System.out.println("found message " + message.getDescription());
// }
// }
// ProcessingResultType message = (ProcessingResultType) details.getResult();
org.quartz.JobDataMap dataMap = details.getJobDataMap();
String failMessage = dataMap.getString("failMessage");
if (triggerState == Trigger.TriggerState.NONE || triggerState == Trigger.TriggerState.COMPLETE) {
// TODO i18n
if (failMessage != null) {
// The extract data job failed with the message:
// ERROR: relation "demographics" already exists
// More information may be available in the log files.
addPageMessage("The extract data job failed with the message: <br/><br/>" + failMessage + "<br/><br/>More information may be available in the log files.");
request.getSession().removeAttribute("jobName");
request.getSession().removeAttribute("groupName");
request.getSession().removeAttribute("datasetId");
} else {
String successMsg = dataMap.getString("SUCCESS_MESSAGE");
String success = dataMap.getString("successMsg");
if (success != null) {
if (successMsg.contains("$linkURL")) {
successMsg = decodeLINKURL(successMsg, datasetId);
}
if (successMsg != null && !successMsg.isEmpty()) {
addPageMessage(successMsg);
} else {
addPageMessage("Your Extract is now completed. Please go to review them at <a href='ExportDataset?datasetId=" + datasetId + "'> Here </a>.");
}
request.getSession().removeAttribute("jobName");
request.getSession().removeAttribute("groupName");
request.getSession().removeAttribute("datasetId");
}
}
} else {
}
}
} catch (SchedulerException se) {
se.printStackTrace();
}
}
Aggregations