use of org.quartz.JobExecutionException in project pinot by linkedin.
the class AlertTaskRunner method sendAlertForAnomalies.
private void sendAlertForAnomalies(String collectionAlias, List<MergedAnomalyResultDTO> results, Map<DimensionMap, List<MergedAnomalyResultDTO>> groupedResults) throws JobExecutionException {
long anomalyStartMillis = 0;
long anomalyEndMillis = 0;
int anomalyResultSize = 0;
if (CollectionUtils.isNotEmpty(results)) {
anomalyResultSize = results.size();
anomalyStartMillis = results.get(0).getStartTime();
anomalyEndMillis = results.get(0).getEndTime();
for (MergedAnomalyResultDTO mergedAnomalyResultDTO : results) {
if (mergedAnomalyResultDTO.getStartTime() < anomalyStartMillis) {
anomalyStartMillis = mergedAnomalyResultDTO.getStartTime();
}
if (mergedAnomalyResultDTO.getEndTime() > anomalyEndMillis) {
anomalyEndMillis = mergedAnomalyResultDTO.getEndTime();
}
}
}
DateTimeZone timeZone = DateTimeZone.forTimeZone(DEFAULT_TIME_ZONE);
DataReportHelper.DateFormatMethod dateFormatMethod = new DataReportHelper.DateFormatMethod(timeZone);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (Writer out = new OutputStreamWriter(baos, CHARSET)) {
Configuration freemarkerConfig = new Configuration(Configuration.VERSION_2_3_21);
freemarkerConfig.setClassForTemplateLoading(getClass(), "/com/linkedin/thirdeye/detector/");
freemarkerConfig.setDefaultEncoding(CHARSET);
freemarkerConfig.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
Map<String, Object> templateData = new HashMap<>();
String metric = alertConfig.getMetric();
String windowUnit = alertConfig.getWindowUnit().toString();
templateData.put("groupedAnomalyResults", DataReportHelper.convertToStringKeyBasedMap(groupedResults));
templateData.put("anomalyCount", anomalyResultSize);
templateData.put("startTime", anomalyStartMillis);
templateData.put("endTime", anomalyEndMillis);
templateData.put("reportGenerationTimeMillis", System.currentTimeMillis());
templateData.put("dateFormat", dateFormatMethod);
templateData.put("timeZone", timeZone);
templateData.put("collection", collectionAlias);
templateData.put("metric", metric);
templateData.put("windowUnit", windowUnit);
templateData.put("dashboardHost", thirdeyeConfig.getDashboardHost());
if (alertConfig.isReportEnabled() & alertConfig.getDimensions() != null) {
long reportStartTs = 0;
List<MetricDimensionReport> metricDimensionValueReports;
List<ContributorViewResponse> reports = new ArrayList<>();
for (String dimension : alertConfig.getDimensions()) {
ContributorViewResponse report = EmailHelper.getContributorDataForDataReport(collectionAlias, alertConfig.getMetric(), Arrays.asList(dimension));
if (report != null) {
reports.add(report);
}
}
reportStartTs = reports.get(0).getTimeBuckets().get(0).getCurrentStart();
metricDimensionValueReports = DataReportHelper.getInstance().getDimensionReportList(reports);
templateData.put("metricDimensionValueReports", metricDimensionValueReports);
templateData.put("reportStartDateTime", reportStartTs);
}
Template template = freemarkerConfig.getTemplate("anomaly-report.ftl");
template.process(templateData, out);
} catch (Exception e) {
throw new JobExecutionException(e);
}
// Send email
try {
String alertEmailSubject;
if (results.size() > 0) {
String anomalyString = (results.size() == 1) ? "anomaly" : "anomalies";
alertEmailSubject = String.format("Thirdeye: %s: %s - %d %s detected", alertConfig.getMetric(), collectionAlias, results.size(), anomalyString);
} else {
alertEmailSubject = String.format("Thirdeye data report : %s: %s", alertConfig.getMetric(), collectionAlias);
}
HtmlEmail email = new HtmlEmail();
String alertEmailHtml = new String(baos.toByteArray(), CHARSET);
EmailHelper.sendEmailWithHtml(email, thirdeyeConfig.getSmtpConfiguration(), alertEmailSubject, alertEmailHtml, alertConfig.getFromAddress(), alertConfig.getToAddresses());
} catch (Exception e) {
throw new JobExecutionException(e);
}
// once email is sent, update the last merged anomaly id as watermark in email config
long anomalyId = 0;
for (MergedAnomalyResultDTO anomalyResultDTO : results) {
if (anomalyResultDTO.getId() > anomalyId) {
anomalyId = anomalyResultDTO.getId();
}
}
alertConfig.setLastNotifiedAnomalyId(anomalyId);
emailConfigurationDAO.update(alertConfig);
LOG.info("Sent email with {} anomalies! {}", results.size(), alertConfig);
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class EmailHelper method writeTimeSeriesChart.
public static String writeTimeSeriesChart(final EmailConfigurationDTO config, TimeOnTimeComparisonHandler timeOnTimeComparisonHandler, final DateTime now, final DateTime then, final String collection, final Map<RawAnomalyResultDTO, String> anomaliesWithLabels) throws JobExecutionException {
try {
int windowSize = config.getWindowSize();
TimeUnit windowUnit = config.getWindowUnit();
long windowMillis = windowUnit.toMillis(windowSize);
// TODO provide a way for email reports to specify desired graph granularity.
DatasetConfigManager datasetConfigDAO = DAO_REGISTRY.getDatasetConfigDAO();
DatasetConfigDTO datasetConfig = datasetConfigDAO.findByDataset(collection);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
TimeGranularity dataGranularity = timespec.getDataGranularity();
TimeOnTimeComparisonResponse chartData = getData(timeOnTimeComparisonHandler, config, then, now, WEEK_MILLIS, dataGranularity);
AnomalyGraphGenerator anomalyGraphGenerator = AnomalyGraphGenerator.getInstance();
JFreeChart chart = anomalyGraphGenerator.createChart(chartData, dataGranularity, windowMillis, anomaliesWithLabels);
String chartFilePath = EMAIL_REPORT_CHART_PREFIX + config.getId() + PNG;
LOG.info("Writing chart to {}", chartFilePath);
anomalyGraphGenerator.writeChartToFile(chart, chartFilePath);
return chartFilePath;
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class TimeSeriesUtil method getTimeSeriesResponseImpl.
private static TimeSeriesResponse getTimeSeriesResponseImpl(AnomalyFunctionDTO anomalyFunctionSpec, List<Pair<Long, Long>> startEndTimeRanges, TimeGranularity timeGranularity, Multimap<String, String> filters, List<String> groupByDimensions, boolean endTimeInclusive) throws JobExecutionException, ExecutionException {
TimeSeriesHandler timeSeriesHandler = new TimeSeriesHandler(ThirdEyeCacheRegistry.getInstance().getQueryCache());
// Seed request with top-level...
TimeSeriesRequest request = new TimeSeriesRequest();
request.setCollectionName(anomalyFunctionSpec.getCollection());
// TODO: Check low level support for multiple metrics retrieval
String metricsToRetrieve = String.join(",", anomalyFunctionSpec.getMetrics());
List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metricsToRetrieve, anomalyFunctionSpec.getMetricFunction(), anomalyFunctionSpec.getCollection());
request.setMetricExpressions(metricExpressions);
request.setAggregationTimeGranularity(timeGranularity);
request.setEndDateInclusive(false);
request.setFilterSet(filters);
request.setGroupByDimensions(groupByDimensions);
request.setEndDateInclusive(endTimeInclusive);
LOG.info("Found [{}] time ranges to fetch data", startEndTimeRanges.size());
for (Pair<Long, Long> timeRange : startEndTimeRanges) {
LOG.info("Start Time [{}], End Time [{}] for anomaly analysis", new DateTime(timeRange.getFirst()), new DateTime(timeRange.getSecond()));
}
Set<TimeSeriesRow> timeSeriesRowSet = new HashSet<>();
// TODO : replace this with Pinot MultiQuery Request
for (Pair<Long, Long> startEndInterval : startEndTimeRanges) {
DateTime startTime = new DateTime(startEndInterval.getFirst());
DateTime endTime = new DateTime(startEndInterval.getSecond());
request.setStart(startTime);
request.setEnd(endTime);
LOG.info("Fetching data with startTime: [{}], endTime: [{}], metricExpressions: [{}], timeGranularity: [{}]", startTime, endTime, metricExpressions, timeGranularity);
try {
LOG.debug("Executing {}", request);
TimeSeriesResponse response = timeSeriesHandler.handle(request);
timeSeriesRowSet.addAll(response.getRows());
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
List<TimeSeriesRow> timeSeriesRows = new ArrayList<>();
timeSeriesRows.addAll(timeSeriesRowSet);
return new TimeSeriesResponse(timeSeriesRows);
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class TimeSeriesUtil method getTimeSeriesForAnomalyDetection.
/**
* Returns the set of metric time series that are needed by the given anomaly function for detecting anomalies.
*
* The time granularity is the granularity of the function's collection, i.e., the buckets are not aggregated,
* in order to increase the accuracy for detecting anomalies.
*
* @param anomalyFunctionSpec spec of the anomaly function
* @param startEndTimeRanges the time ranges to retrieve the data for constructing the time series
*
* @return the data that is needed by the anomaly function for detecting anomalies.
* @throws JobExecutionException
* @throws ExecutionException
*/
public static Map<DimensionKey, MetricTimeSeries> getTimeSeriesForAnomalyDetection(AnomalyFunctionDTO anomalyFunctionSpec, List<Pair<Long, Long>> startEndTimeRanges) throws JobExecutionException, ExecutionException {
String filterString = anomalyFunctionSpec.getFilters();
Multimap<String, String> filters;
if (StringUtils.isNotBlank(filterString)) {
filters = ThirdEyeUtils.getFilterSet(filterString);
} else {
filters = HashMultimap.create();
}
List<String> groupByDimensions;
String exploreDimensionString = anomalyFunctionSpec.getExploreDimensions();
if (StringUtils.isNotBlank(exploreDimensionString)) {
groupByDimensions = Arrays.asList(exploreDimensionString.trim().split(","));
} else {
groupByDimensions = Collections.emptyList();
}
TimeGranularity timeGranularity = new TimeGranularity(anomalyFunctionSpec.getBucketSize(), anomalyFunctionSpec.getBucketUnit());
TimeSeriesResponse timeSeriesResponse = getTimeSeriesResponseImpl(anomalyFunctionSpec, startEndTimeRanges, timeGranularity, filters, groupByDimensions, false);
try {
Map<DimensionKey, MetricTimeSeries> dimensionKeyMetricTimeSeriesMap = TimeSeriesResponseConverter.toMap(timeSeriesResponse, Utils.getSchemaDimensionNames(anomalyFunctionSpec.getCollection()));
return dimensionKeyMetricTimeSeriesMap;
} catch (Exception e) {
LOG.info("Failed to get schema dimensions for constructing dimension keys:", e.toString());
return Collections.emptyMap();
}
}
use of org.quartz.JobExecutionException in project openhab1-addons by openhab.
the class EventJob method execute.
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
try {
final String config = context.getJobDetail().getJobDataMap().getString(KEY_CONFIG);
final String eventId = context.getJobDetail().getJobDataMap().getString(KEY_EVENT);
final int recIndex = context.getJobDetail().getJobDataMap().getInt(KEY_REC_INDEX);
final EventTrigger eventTrigger = EventTrigger.valueOf(context.getJobDetail().getJobDataMap().getString(KEY_EVENT_TRIGGER));
CalendarRuntime calendarRuntime = EventStorage.getInstance().getEventCache().get(config);
if (calendarRuntime == null) {
throw new JobExecutionException("cannot get runtime for config: " + config, false);
}
EventContainer eventContainer = calendarRuntime.getEventMap().get(eventId);
if (eventContainer == null) {
throw new JobExecutionException("cannot get event-container for config: " + config + " and eventId: " + eventId, false);
}
if (eventContainer.getEventList().size() <= recIndex) {
throw new JobExecutionException("cannot get recurence-event for config: " + config + " and eventId: " + eventId + " and occurence: " + recIndex, false);
}
CalDavEvent event = eventContainer.getEventList().get(recIndex);
log.info("event {} for: {}", eventTrigger, event.getShortName());
for (EventNotifier notifier : CalDavLoaderImpl.instance.getEventListenerList()) {
try {
if (eventTrigger == EventTrigger.BEGIN) {
notifier.eventBegins(event);
} else if (eventTrigger == EventTrigger.END) {
notifier.eventEnds(event);
} else {
throw new IllegalStateException("not implemented event trigger: " + eventTrigger);
}
} catch (Exception e) {
log.error("error while invoking listener", e);
}
}
if (eventTrigger == EventTrigger.END) {
// if event is ended, remove it from the map
calendarRuntime.getEventMap().remove(eventContainer.getEventId());
}
} catch (Exception e) {
log.error("error executing event job", e);
throw new JobExecutionException("error executing event job", e, false);
}
}
Aggregations