use of org.quartz.JobExecutionException in project head by mifos.
the class MifosBatchJob method executeInternal.
@Override
public void executeInternal(JobExecutionContext context) throws JobExecutionException {
try {
String jobName = context.getJobDetail().getName();
Job job = jobLocator.getJob(jobName);
catchUpMissedLaunches(job, context);
checkAndLaunchJob(job, getJobParametersFromContext(context), 0);
} catch (Exception ex) {
throw new JobExecutionException(ex);
}
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class AlertTaskRunner method sendFailureEmail.
private void sendFailureEmail(Throwable t) throws JobExecutionException {
HtmlEmail email = new HtmlEmail();
String collection = alertConfig.getCollection();
String metric = alertConfig.getMetric();
String subject = String.format("[ThirdEye Anomaly Detector] FAILED ALERT ID=%d (%s:%s)", alertConfig.getId(), collection, metric);
String textBody = String.format("%s%n%nException:%s", alertConfig.toString(), ExceptionUtils.getStackTrace(t));
try {
EmailHelper.sendEmailWithTextBody(email, thirdeyeConfig.getSmtpConfiguration(), subject, textBody, thirdeyeConfig.getFailureFromAddress(), thirdeyeConfig.getFailureToAddress());
} catch (EmailException e) {
throw new JobExecutionException(e);
}
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class EmailHelper method getData.
/**
* Generate and send request to retrieve chart data. If the request window is too small, the graph
* data retrieved has default window sizes based on time granularity and ending at the defined
* endpoint: <br/> <ul> <li>DAYS: 7</li> <li>HOURS: 24</li> </ul>
*
* @param bucketGranularity
*
* @throws JobExecutionException
*/
public static TimeOnTimeComparisonResponse getData(TimeOnTimeComparisonHandler timeOnTimeComparisonHandler, EmailConfigurationDTO config, DateTime start, final DateTime end, long baselinePeriodMillis, TimeGranularity bucketGranularity) throws JobExecutionException {
start = calculateGraphDataStart(start, end, bucketGranularity);
try {
TimeOnTimeComparisonRequest comparisonRequest = new TimeOnTimeComparisonRequest();
comparisonRequest.setCollectionName(config.getCollection());
comparisonRequest.setBaselineStart(start.minus(baselinePeriodMillis));
comparisonRequest.setBaselineEnd(end.minus(baselinePeriodMillis));
comparisonRequest.setCurrentStart(start);
comparisonRequest.setCurrentEnd(end);
comparisonRequest.setEndDateInclusive(true);
List<MetricExpression> metricExpressions = new ArrayList<>();
MetricExpression expression = new MetricExpression(config.getMetric());
metricExpressions.add(expression);
comparisonRequest.setMetricExpressions(metricExpressions);
comparisonRequest.setAggregationTimeGranularity(bucketGranularity);
LOG.debug("Starting...");
TimeOnTimeComparisonResponse response = timeOnTimeComparisonHandler.handle(comparisonRequest);
LOG.debug("Done!");
return response;
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class TimeSeriesUtil method getTimeSeriesByDimension.
/**
* Returns the metric time series that were given to the anomaly function for anomaly detection. If the dimension to
* retrieve is OTHER, this method retrieves all combinations of dimensions and calculate the metric time series for
* OTHER dimension on-the-fly.
*
* @param anomalyFunctionSpec spec of the anomaly function
* @param startEndTimeRanges the time ranges to retrieve the data for constructing the time series
* @param dimensionMap a dimension map that is used to construct the filter for retrieving the corresponding data
* that was used to detected the anomaly
* @param timeGranularity time granularity of the time series
* @param endTimeInclusive set to true if the end time should be inclusive; mainly used by the query for UI
* @return the time series in the same format as those used by the given anomaly function for anomaly detection
*
* @throws JobExecutionException
* @throws ExecutionException
*/
public static MetricTimeSeries getTimeSeriesByDimension(AnomalyFunctionDTO anomalyFunctionSpec, List<Pair<Long, Long>> startEndTimeRanges, DimensionMap dimensionMap, TimeGranularity timeGranularity, boolean endTimeInclusive) throws JobExecutionException, ExecutionException {
// Get the original filter
Multimap<String, String> filters;
String filterString = anomalyFunctionSpec.getFilters();
if (StringUtils.isNotBlank(filterString)) {
filters = ThirdEyeUtils.getFilterSet(filterString);
} else {
filters = HashMultimap.create();
}
// Decorate filters according to dimensionMap
filters = ThirdEyeUtils.getFilterSetFromDimensionMap(dimensionMap, filters);
boolean hasOTHERDimensionName = false;
for (String dimensionValue : dimensionMap.values()) {
if (dimensionValue.equalsIgnoreCase(ResponseParserUtils.OTHER)) {
hasOTHERDimensionName = true;
break;
}
}
// groupByDimensions (i.e., exploreDimensions) is empty by default because the query for getting the time series
// will have the decorated filters according to anomalies' explore dimensions.
// However, if there exists any dimension with value "OTHER, then we need to honor the origin groupBy in order to
// construct the data for OTHER
List<String> groupByDimensions = Collections.emptyList();
if (hasOTHERDimensionName && StringUtils.isNotBlank(anomalyFunctionSpec.getExploreDimensions().trim())) {
groupByDimensions = Arrays.asList(anomalyFunctionSpec.getExploreDimensions().trim().split(","));
}
TimeSeriesResponse response = getTimeSeriesResponseImpl(anomalyFunctionSpec, startEndTimeRanges, timeGranularity, filters, groupByDimensions, endTimeInclusive);
try {
Map<DimensionKey, MetricTimeSeries> metricTimeSeriesMap = TimeSeriesResponseConverter.toMap(response, Utils.getSchemaDimensionNames(anomalyFunctionSpec.getCollection()));
return extractMetricTimeSeriesByDimension(metricTimeSeriesMap);
} catch (Exception e) {
LOG.warn("Unable to get schema dimension name for retrieving metric time series: {}", e.toString());
return null;
}
}
use of org.quartz.JobExecutionException in project openhab1-addons by openhab.
the class EventReloaderJob method execute.
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
final String config = context.getJobDetail().getJobDataMap().getString(KEY_CONFIG);
CalendarRuntime eventRuntime = EventStorage.getInstance().getEventCache().get(config);
log.debug("running EventReloaderJob for config : {}", config);
// reload cached events (if necessary)
if (!cachedEventsLoaded.containsKey(config)) {
try {
log.debug("reload cached events for config: {}", eventRuntime.getConfig().getKey());
for (File fileCalendarKeys : new File(CalDavLoaderImpl.CACHE_PATH).listFiles()) {
if (!eventRuntime.getConfig().getKey().equals(Util.getFilename(fileCalendarKeys.getName()))) {
log.trace("not our config : {}", Util.getFilename(fileCalendarKeys.getName()));
continue;
}
log.trace("found our config : {}", Util.getFilename(fileCalendarKeys.getName()));
final Collection<File> icsFiles = FileUtils.listFiles(fileCalendarKeys, new String[] { "ics" }, false);
for (File icsFile : icsFiles) {
try {
FileInputStream fis = new FileInputStream(icsFile);
log.debug("loading events from file : {}", icsFile);
loadEvents(Util.getFilename(icsFile.getAbsolutePath()), new org.joda.time.DateTime(icsFile.lastModified()), fis, eventRuntime.getConfig(), new ArrayList<String>(), true);
} catch (IOException e) {
log.error("cannot load events for file: " + icsFile, e);
} catch (ParserException e) {
log.error("cannot load events for file: " + icsFile, e);
}
}
break;
}
} catch (Throwable e) {
log.error("cannot load events", e);
} finally {
cachedEventsLoaded.put(config, true);
}
}
try {
log.debug("loading events for config: " + config);
List<String> oldEventIds = new ArrayList<String>();
for (EventContainer eventContainer : eventRuntime.getEventMap().values()) {
oldEventIds.add(eventContainer.getFilename());
log.debug("old eventcontainer -- id : {} -- filename : {} -- calcuntil : {} -- lastchanged : {} -- ishistoric : {}", eventContainer.getEventId(), eventContainer.getFilename(), eventContainer.getCalculatedUntil(), eventContainer.getLastChanged(), eventContainer.isHistoricEvent());
if (log.isDebugEnabled()) {
for (int i = 0; i < eventContainer.getEventList().size(); i++) {
CalDavEvent elem = eventContainer.getEventList().get(i);
log.debug("old eventlist contient l'evenement : {} -- deb : {} -- fin : {} -- lastchang {}", elem.getName(), elem.getStart(), elem.getEnd(), elem.getLastChanged());
}
}
}
loadEvents(eventRuntime, oldEventIds);
// stop all events in oldMap
removeDeletedEvents(config, oldEventIds);
for (EventNotifier notifier : CalDavLoaderImpl.instance.getEventListenerList()) {
try {
notifier.calendarReloaded(config);
} catch (Exception e) {
log.error("error while invoking listener", e);
}
}
// print All scheduled jobs :
if (log.isDebugEnabled()) {
log.debug("jobs scheduled : ");
Scheduler scheduler = CalDavLoaderImpl.instance.getScheduler();
for (String groupName : CalDavLoaderImpl.instance.getScheduler().getJobGroupNames()) {
for (JobKey jobKey : scheduler.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
String jobName = jobKey.getName();
String jobGroup = jobKey.getGroup();
// get job's trigger
List<Trigger> triggers = (List<Trigger>) scheduler.getTriggersOfJob(jobKey);
Date nextFireTime = triggers.get(0).getNextFireTime();
log.debug("[job] : {} - [groupName] : {} - {}", jobName, jobGroup, nextFireTime);
}
}
}
} catch (SardineException e) {
log.error("error while loading calendar entries: {} ({} - {} )", e.getMessage(), e.getStatusCode(), e.getResponsePhrase(), e);
throw new JobExecutionException("error while loading calendar entries", e, false);
} catch (Exception e) {
log.error("error while loading calendar entries: {}", e.getMessage(), e);
throw new JobExecutionException("error while loading calendar entries", e, false);
}
}
Aggregations