use of org.quartz.JobExecutionException in project pinot by linkedin.
the class AlertTaskRunnerV2 method sendScheduledDataReport.
private void sendScheduledDataReport() throws Exception {
AlertConfigBean.ReportConfigCollection reportConfigCollection = alertConfig.getReportConfigCollection();
if (reportConfigCollection != null && reportConfigCollection.isEnabled()) {
if (reportConfigCollection.getReportMetricConfigs() != null && reportConfigCollection.getReportMetricConfigs().size() > 0) {
List<MetricDimensionReport> metricDimensionValueReports;
// Used later to provide collection for a metric to help build the url link in report
Map<String, MetricConfigDTO> metricMap = new HashMap<>();
List<ContributorViewResponse> reports = new ArrayList<>();
for (int i = 0; i < reportConfigCollection.getReportMetricConfigs().size(); i++) {
AlertConfigBean.ReportMetricConfig reportMetricConfig = reportConfigCollection.getReportMetricConfigs().get(i);
MetricConfigDTO metricConfig = metricConfigManager.findById(reportMetricConfig.getMetricId());
List<String> dimensions = reportMetricConfig.getDimensions();
if (dimensions != null && dimensions.size() > 0) {
for (String dimension : dimensions) {
ContributorViewResponse report = EmailHelper.getContributorDataForDataReport(metricConfig.getDataset(), metricConfig.getName(), Arrays.asList(dimension), reportMetricConfig.getCompareMode(), alertConfig.getReportConfigCollection().getDelayOffsetMillis(), alertConfig.getReportConfigCollection().isIntraDay());
if (report != null) {
metricMap.put(metricConfig.getName(), metricConfig);
reports.add(report);
}
}
}
}
if (reports.size() == 0) {
LOG.warn("Could not fetch report data for " + alertConfig.getName());
return;
}
long reportStartTs = reports.get(0).getTimeBuckets().get(0).getCurrentStart();
metricDimensionValueReports = DataReportHelper.getInstance().getDimensionReportList(reports);
for (int i = 0; i < metricDimensionValueReports.size(); i++) {
MetricDimensionReport report = metricDimensionValueReports.get(i);
report.setDataset(metricMap.get(report.getMetricName()).getDataset());
long metricId = metricMap.get(report.getMetricName()).getId();
report.setMetricId(metricId);
for (AlertConfigBean.ReportMetricConfig reportMetricConfig : reportConfigCollection.getReportMetricConfigs()) {
if (reportMetricConfig.getMetricId() == metricId) {
metricDimensionValueReports.get(i).setCompareMode(reportMetricConfig.getCompareMode().name());
}
}
}
Configuration freemarkerConfig = new Configuration(Configuration.VERSION_2_3_21);
freemarkerConfig.setClassForTemplateLoading(getClass(), "/com/linkedin/thirdeye/detector/");
freemarkerConfig.setDefaultEncoding(CHARSET);
freemarkerConfig.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
Map<String, Object> templateData = new HashMap<>();
DateTimeZone timeZone = DateTimeZone.forTimeZone(DEFAULT_TIME_ZONE);
DataReportHelper.DateFormatMethod dateFormatMethod = new DataReportHelper.DateFormatMethod(timeZone);
templateData.put("timeZone", timeZone);
templateData.put("dateFormat", dateFormatMethod);
templateData.put("dashboardHost", thirdeyeConfig.getDashboardHost());
templateData.put("fromEmail", alertConfig.getFromAddress());
templateData.put("contactEmail", alertConfig.getReportConfigCollection().getContactEmail());
templateData.put("reportStartDateTime", reportStartTs);
templateData.put("metricDimensionValueReports", metricDimensionValueReports);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (Writer out = new OutputStreamWriter(baos, CHARSET)) {
Template template = freemarkerConfig.getTemplate("data-report-by-metric-dimension.ftl");
template.process(templateData, out);
// Send email
HtmlEmail email = new HtmlEmail();
String alertEmailSubject = String.format("Thirdeye data report : %s", alertConfig.getName());
String alertEmailHtml = new String(baos.toByteArray(), CHARSET);
EmailHelper.sendEmailWithHtml(email, thirdeyeConfig.getSmtpConfiguration(), alertEmailSubject, alertEmailHtml, alertConfig.getFromAddress(), alertConfig.getRecipients());
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
}
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class AlertTaskRunner method sendAlertForAnomalies.
private void sendAlertForAnomalies(String collectionAlias, List<MergedAnomalyResultDTO> results, Map<DimensionMap, List<MergedAnomalyResultDTO>> groupedResults) throws JobExecutionException {
long anomalyStartMillis = 0;
long anomalyEndMillis = 0;
int anomalyResultSize = 0;
if (CollectionUtils.isNotEmpty(results)) {
anomalyResultSize = results.size();
anomalyStartMillis = results.get(0).getStartTime();
anomalyEndMillis = results.get(0).getEndTime();
for (MergedAnomalyResultDTO mergedAnomalyResultDTO : results) {
if (mergedAnomalyResultDTO.getStartTime() < anomalyStartMillis) {
anomalyStartMillis = mergedAnomalyResultDTO.getStartTime();
}
if (mergedAnomalyResultDTO.getEndTime() > anomalyEndMillis) {
anomalyEndMillis = mergedAnomalyResultDTO.getEndTime();
}
}
}
DateTimeZone timeZone = DateTimeZone.forTimeZone(DEFAULT_TIME_ZONE);
DataReportHelper.DateFormatMethod dateFormatMethod = new DataReportHelper.DateFormatMethod(timeZone);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (Writer out = new OutputStreamWriter(baos, CHARSET)) {
Configuration freemarkerConfig = new Configuration(Configuration.VERSION_2_3_21);
freemarkerConfig.setClassForTemplateLoading(getClass(), "/com/linkedin/thirdeye/detector/");
freemarkerConfig.setDefaultEncoding(CHARSET);
freemarkerConfig.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
Map<String, Object> templateData = new HashMap<>();
String metric = alertConfig.getMetric();
String windowUnit = alertConfig.getWindowUnit().toString();
templateData.put("groupedAnomalyResults", DataReportHelper.convertToStringKeyBasedMap(groupedResults));
templateData.put("anomalyCount", anomalyResultSize);
templateData.put("startTime", anomalyStartMillis);
templateData.put("endTime", anomalyEndMillis);
templateData.put("reportGenerationTimeMillis", System.currentTimeMillis());
templateData.put("dateFormat", dateFormatMethod);
templateData.put("timeZone", timeZone);
templateData.put("collection", collectionAlias);
templateData.put("metric", metric);
templateData.put("windowUnit", windowUnit);
templateData.put("dashboardHost", thirdeyeConfig.getDashboardHost());
if (alertConfig.isReportEnabled() & alertConfig.getDimensions() != null) {
long reportStartTs = 0;
List<MetricDimensionReport> metricDimensionValueReports;
List<ContributorViewResponse> reports = new ArrayList<>();
for (String dimension : alertConfig.getDimensions()) {
ContributorViewResponse report = EmailHelper.getContributorDataForDataReport(collectionAlias, alertConfig.getMetric(), Arrays.asList(dimension));
if (report != null) {
reports.add(report);
}
}
reportStartTs = reports.get(0).getTimeBuckets().get(0).getCurrentStart();
metricDimensionValueReports = DataReportHelper.getInstance().getDimensionReportList(reports);
templateData.put("metricDimensionValueReports", metricDimensionValueReports);
templateData.put("reportStartDateTime", reportStartTs);
}
Template template = freemarkerConfig.getTemplate("anomaly-report.ftl");
template.process(templateData, out);
} catch (Exception e) {
throw new JobExecutionException(e);
}
// Send email
try {
String alertEmailSubject;
if (results.size() > 0) {
String anomalyString = (results.size() == 1) ? "anomaly" : "anomalies";
alertEmailSubject = String.format("Thirdeye: %s: %s - %d %s detected", alertConfig.getMetric(), collectionAlias, results.size(), anomalyString);
} else {
alertEmailSubject = String.format("Thirdeye data report : %s: %s", alertConfig.getMetric(), collectionAlias);
}
HtmlEmail email = new HtmlEmail();
String alertEmailHtml = new String(baos.toByteArray(), CHARSET);
EmailHelper.sendEmailWithHtml(email, thirdeyeConfig.getSmtpConfiguration(), alertEmailSubject, alertEmailHtml, alertConfig.getFromAddress(), alertConfig.getToAddresses());
} catch (Exception e) {
throw new JobExecutionException(e);
}
// once email is sent, update the last merged anomaly id as watermark in email config
long anomalyId = 0;
for (MergedAnomalyResultDTO anomalyResultDTO : results) {
if (anomalyResultDTO.getId() > anomalyId) {
anomalyId = anomalyResultDTO.getId();
}
}
alertConfig.setLastNotifiedAnomalyId(anomalyId);
emailConfigurationDAO.update(alertConfig);
LOG.info("Sent email with {} anomalies! {}", results.size(), alertConfig);
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class EmailHelper method writeTimeSeriesChart.
public static String writeTimeSeriesChart(final EmailConfigurationDTO config, TimeOnTimeComparisonHandler timeOnTimeComparisonHandler, final DateTime now, final DateTime then, final String collection, final Map<RawAnomalyResultDTO, String> anomaliesWithLabels) throws JobExecutionException {
try {
int windowSize = config.getWindowSize();
TimeUnit windowUnit = config.getWindowUnit();
long windowMillis = windowUnit.toMillis(windowSize);
// TODO provide a way for email reports to specify desired graph granularity.
DatasetConfigManager datasetConfigDAO = DAO_REGISTRY.getDatasetConfigDAO();
DatasetConfigDTO datasetConfig = datasetConfigDAO.findByDataset(collection);
TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
TimeGranularity dataGranularity = timespec.getDataGranularity();
TimeOnTimeComparisonResponse chartData = getData(timeOnTimeComparisonHandler, config, then, now, WEEK_MILLIS, dataGranularity);
AnomalyGraphGenerator anomalyGraphGenerator = AnomalyGraphGenerator.getInstance();
JFreeChart chart = anomalyGraphGenerator.createChart(chartData, dataGranularity, windowMillis, anomaliesWithLabels);
String chartFilePath = EMAIL_REPORT_CHART_PREFIX + config.getId() + PNG;
LOG.info("Writing chart to {}", chartFilePath);
anomalyGraphGenerator.writeChartToFile(chart, chartFilePath);
return chartFilePath;
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class TimeSeriesUtil method getTimeSeriesResponseImpl.
private static TimeSeriesResponse getTimeSeriesResponseImpl(AnomalyFunctionDTO anomalyFunctionSpec, List<Pair<Long, Long>> startEndTimeRanges, TimeGranularity timeGranularity, Multimap<String, String> filters, List<String> groupByDimensions, boolean endTimeInclusive) throws JobExecutionException, ExecutionException {
TimeSeriesHandler timeSeriesHandler = new TimeSeriesHandler(ThirdEyeCacheRegistry.getInstance().getQueryCache());
// Seed request with top-level...
TimeSeriesRequest request = new TimeSeriesRequest();
request.setCollectionName(anomalyFunctionSpec.getCollection());
// TODO: Check low level support for multiple metrics retrieval
String metricsToRetrieve = String.join(",", anomalyFunctionSpec.getMetrics());
List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metricsToRetrieve, anomalyFunctionSpec.getMetricFunction(), anomalyFunctionSpec.getCollection());
request.setMetricExpressions(metricExpressions);
request.setAggregationTimeGranularity(timeGranularity);
request.setEndDateInclusive(false);
request.setFilterSet(filters);
request.setGroupByDimensions(groupByDimensions);
request.setEndDateInclusive(endTimeInclusive);
LOG.info("Found [{}] time ranges to fetch data", startEndTimeRanges.size());
for (Pair<Long, Long> timeRange : startEndTimeRanges) {
LOG.info("Start Time [{}], End Time [{}] for anomaly analysis", new DateTime(timeRange.getFirst()), new DateTime(timeRange.getSecond()));
}
Set<TimeSeriesRow> timeSeriesRowSet = new HashSet<>();
// TODO : replace this with Pinot MultiQuery Request
for (Pair<Long, Long> startEndInterval : startEndTimeRanges) {
DateTime startTime = new DateTime(startEndInterval.getFirst());
DateTime endTime = new DateTime(startEndInterval.getSecond());
request.setStart(startTime);
request.setEnd(endTime);
LOG.info("Fetching data with startTime: [{}], endTime: [{}], metricExpressions: [{}], timeGranularity: [{}]", startTime, endTime, metricExpressions, timeGranularity);
try {
LOG.debug("Executing {}", request);
TimeSeriesResponse response = timeSeriesHandler.handle(request);
timeSeriesRowSet.addAll(response.getRows());
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
List<TimeSeriesRow> timeSeriesRows = new ArrayList<>();
timeSeriesRows.addAll(timeSeriesRowSet);
return new TimeSeriesResponse(timeSeriesRows);
}
use of org.quartz.JobExecutionException in project pinot by linkedin.
the class TimeSeriesUtil method getTimeSeriesForAnomalyDetection.
/**
* Returns the set of metric time series that are needed by the given anomaly function for detecting anomalies.
*
* The time granularity is the granularity of the function's collection, i.e., the buckets are not aggregated,
* in order to increase the accuracy for detecting anomalies.
*
* @param anomalyFunctionSpec spec of the anomaly function
* @param startEndTimeRanges the time ranges to retrieve the data for constructing the time series
*
* @return the data that is needed by the anomaly function for detecting anomalies.
* @throws JobExecutionException
* @throws ExecutionException
*/
public static Map<DimensionKey, MetricTimeSeries> getTimeSeriesForAnomalyDetection(AnomalyFunctionDTO anomalyFunctionSpec, List<Pair<Long, Long>> startEndTimeRanges) throws JobExecutionException, ExecutionException {
String filterString = anomalyFunctionSpec.getFilters();
Multimap<String, String> filters;
if (StringUtils.isNotBlank(filterString)) {
filters = ThirdEyeUtils.getFilterSet(filterString);
} else {
filters = HashMultimap.create();
}
List<String> groupByDimensions;
String exploreDimensionString = anomalyFunctionSpec.getExploreDimensions();
if (StringUtils.isNotBlank(exploreDimensionString)) {
groupByDimensions = Arrays.asList(exploreDimensionString.trim().split(","));
} else {
groupByDimensions = Collections.emptyList();
}
TimeGranularity timeGranularity = new TimeGranularity(anomalyFunctionSpec.getBucketSize(), anomalyFunctionSpec.getBucketUnit());
TimeSeriesResponse timeSeriesResponse = getTimeSeriesResponseImpl(anomalyFunctionSpec, startEndTimeRanges, timeGranularity, filters, groupByDimensions, false);
try {
Map<DimensionKey, MetricTimeSeries> dimensionKeyMetricTimeSeriesMap = TimeSeriesResponseConverter.toMap(timeSeriesResponse, Utils.getSchemaDimensionNames(anomalyFunctionSpec.getCollection()));
return dimensionKeyMetricTimeSeriesMap;
} catch (Exception e) {
LOG.info("Failed to get schema dimensions for constructing dimension keys:", e.toString());
return Collections.emptyMap();
}
}
Aggregations