Search in sources :

Example 21 with JobExecutionException

use of org.quartz.JobExecutionException in project pinot by linkedin.

the class AlertTaskRunner method sendAlertForAnomalies.

private void sendAlertForAnomalies(String collectionAlias, List<MergedAnomalyResultDTO> results, Map<DimensionMap, List<MergedAnomalyResultDTO>> groupedResults) throws JobExecutionException {
    long anomalyStartMillis = 0;
    long anomalyEndMillis = 0;
    int anomalyResultSize = 0;
    if (CollectionUtils.isNotEmpty(results)) {
        anomalyResultSize = results.size();
        anomalyStartMillis = results.get(0).getStartTime();
        anomalyEndMillis = results.get(0).getEndTime();
        for (MergedAnomalyResultDTO mergedAnomalyResultDTO : results) {
            if (mergedAnomalyResultDTO.getStartTime() < anomalyStartMillis) {
                anomalyStartMillis = mergedAnomalyResultDTO.getStartTime();
            }
            if (mergedAnomalyResultDTO.getEndTime() > anomalyEndMillis) {
                anomalyEndMillis = mergedAnomalyResultDTO.getEndTime();
            }
        }
    }
    DateTimeZone timeZone = DateTimeZone.forTimeZone(DEFAULT_TIME_ZONE);
    DataReportHelper.DateFormatMethod dateFormatMethod = new DataReportHelper.DateFormatMethod(timeZone);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try (Writer out = new OutputStreamWriter(baos, CHARSET)) {
        Configuration freemarkerConfig = new Configuration(Configuration.VERSION_2_3_21);
        freemarkerConfig.setClassForTemplateLoading(getClass(), "/com/linkedin/thirdeye/detector/");
        freemarkerConfig.setDefaultEncoding(CHARSET);
        freemarkerConfig.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
        Map<String, Object> templateData = new HashMap<>();
        String metric = alertConfig.getMetric();
        String windowUnit = alertConfig.getWindowUnit().toString();
        templateData.put("groupedAnomalyResults", DataReportHelper.convertToStringKeyBasedMap(groupedResults));
        templateData.put("anomalyCount", anomalyResultSize);
        templateData.put("startTime", anomalyStartMillis);
        templateData.put("endTime", anomalyEndMillis);
        templateData.put("reportGenerationTimeMillis", System.currentTimeMillis());
        templateData.put("dateFormat", dateFormatMethod);
        templateData.put("timeZone", timeZone);
        templateData.put("collection", collectionAlias);
        templateData.put("metric", metric);
        templateData.put("windowUnit", windowUnit);
        templateData.put("dashboardHost", thirdeyeConfig.getDashboardHost());
        if (alertConfig.isReportEnabled() & alertConfig.getDimensions() != null) {
            long reportStartTs = 0;
            List<MetricDimensionReport> metricDimensionValueReports;
            List<ContributorViewResponse> reports = new ArrayList<>();
            for (String dimension : alertConfig.getDimensions()) {
                ContributorViewResponse report = EmailHelper.getContributorDataForDataReport(collectionAlias, alertConfig.getMetric(), Arrays.asList(dimension));
                if (report != null) {
                    reports.add(report);
                }
            }
            reportStartTs = reports.get(0).getTimeBuckets().get(0).getCurrentStart();
            metricDimensionValueReports = DataReportHelper.getInstance().getDimensionReportList(reports);
            templateData.put("metricDimensionValueReports", metricDimensionValueReports);
            templateData.put("reportStartDateTime", reportStartTs);
        }
        Template template = freemarkerConfig.getTemplate("anomaly-report.ftl");
        template.process(templateData, out);
    } catch (Exception e) {
        throw new JobExecutionException(e);
    }
    // Send email
    try {
        String alertEmailSubject;
        if (results.size() > 0) {
            String anomalyString = (results.size() == 1) ? "anomaly" : "anomalies";
            alertEmailSubject = String.format("Thirdeye: %s: %s - %d %s detected", alertConfig.getMetric(), collectionAlias, results.size(), anomalyString);
        } else {
            alertEmailSubject = String.format("Thirdeye data report : %s: %s", alertConfig.getMetric(), collectionAlias);
        }
        HtmlEmail email = new HtmlEmail();
        String alertEmailHtml = new String(baos.toByteArray(), CHARSET);
        EmailHelper.sendEmailWithHtml(email, thirdeyeConfig.getSmtpConfiguration(), alertEmailSubject, alertEmailHtml, alertConfig.getFromAddress(), alertConfig.getToAddresses());
    } catch (Exception e) {
        throw new JobExecutionException(e);
    }
    // once email is sent, update the last merged anomaly id as watermark in email config
    long anomalyId = 0;
    for (MergedAnomalyResultDTO anomalyResultDTO : results) {
        if (anomalyResultDTO.getId() > anomalyId) {
            anomalyId = anomalyResultDTO.getId();
        }
    }
    alertConfig.setLastNotifiedAnomalyId(anomalyId);
    emailConfigurationDAO.update(alertConfig);
    LOG.info("Sent email with {} anomalies! {}", results.size(), alertConfig);
}
Also used : DataReportHelper(com.linkedin.thirdeye.anomaly.alert.util.DataReportHelper) ThirdEyeAnomalyConfiguration(com.linkedin.thirdeye.anomaly.ThirdEyeAnomalyConfiguration) Configuration(freemarker.template.Configuration) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) HtmlEmail(org.apache.commons.mail.HtmlEmail) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DateTimeZone(org.joda.time.DateTimeZone) JobExecutionException(org.quartz.JobExecutionException) EmailException(org.apache.commons.mail.EmailException) Template(freemarker.template.Template) ContributorViewResponse(com.linkedin.thirdeye.dashboard.views.contributor.ContributorViewResponse) JobExecutionException(org.quartz.JobExecutionException) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) OutputStreamWriter(java.io.OutputStreamWriter) MetricDimensionReport(com.linkedin.thirdeye.anomaly.alert.template.pojo.MetricDimensionReport) OutputStreamWriter(java.io.OutputStreamWriter) Writer(java.io.Writer)

Example 22 with JobExecutionException

use of org.quartz.JobExecutionException in project pinot by linkedin.

the class EmailHelper method writeTimeSeriesChart.

public static String writeTimeSeriesChart(final EmailConfigurationDTO config, TimeOnTimeComparisonHandler timeOnTimeComparisonHandler, final DateTime now, final DateTime then, final String collection, final Map<RawAnomalyResultDTO, String> anomaliesWithLabels) throws JobExecutionException {
    try {
        int windowSize = config.getWindowSize();
        TimeUnit windowUnit = config.getWindowUnit();
        long windowMillis = windowUnit.toMillis(windowSize);
        // TODO provide a way for email reports to specify desired graph granularity.
        DatasetConfigManager datasetConfigDAO = DAO_REGISTRY.getDatasetConfigDAO();
        DatasetConfigDTO datasetConfig = datasetConfigDAO.findByDataset(collection);
        TimeSpec timespec = ThirdEyeUtils.getTimeSpecFromDatasetConfig(datasetConfig);
        TimeGranularity dataGranularity = timespec.getDataGranularity();
        TimeOnTimeComparisonResponse chartData = getData(timeOnTimeComparisonHandler, config, then, now, WEEK_MILLIS, dataGranularity);
        AnomalyGraphGenerator anomalyGraphGenerator = AnomalyGraphGenerator.getInstance();
        JFreeChart chart = anomalyGraphGenerator.createChart(chartData, dataGranularity, windowMillis, anomaliesWithLabels);
        String chartFilePath = EMAIL_REPORT_CHART_PREFIX + config.getId() + PNG;
        LOG.info("Writing chart to {}", chartFilePath);
        anomalyGraphGenerator.writeChartToFile(chart, chartFilePath);
        return chartFilePath;
    } catch (Exception e) {
        throw new JobExecutionException(e);
    }
}
Also used : JFreeChart(org.jfree.chart.JFreeChart) MalformedURLException(java.net.MalformedURLException) JobExecutionException(org.quartz.JobExecutionException) EmailException(org.apache.commons.mail.EmailException) TimeSpec(com.linkedin.thirdeye.api.TimeSpec) DatasetConfigDTO(com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO) TimeOnTimeComparisonResponse(com.linkedin.thirdeye.client.comparison.TimeOnTimeComparisonResponse) DatasetConfigManager(com.linkedin.thirdeye.datalayer.bao.DatasetConfigManager) JobExecutionException(org.quartz.JobExecutionException) TimeUnit(java.util.concurrent.TimeUnit) TimeGranularity(com.linkedin.thirdeye.api.TimeGranularity) AnomalyGraphGenerator(com.linkedin.thirdeye.detector.email.AnomalyGraphGenerator)

Example 23 with JobExecutionException

use of org.quartz.JobExecutionException in project pinot by linkedin.

the class TimeSeriesUtil method getTimeSeriesResponseImpl.

private static TimeSeriesResponse getTimeSeriesResponseImpl(AnomalyFunctionDTO anomalyFunctionSpec, List<Pair<Long, Long>> startEndTimeRanges, TimeGranularity timeGranularity, Multimap<String, String> filters, List<String> groupByDimensions, boolean endTimeInclusive) throws JobExecutionException, ExecutionException {
    TimeSeriesHandler timeSeriesHandler = new TimeSeriesHandler(ThirdEyeCacheRegistry.getInstance().getQueryCache());
    // Seed request with top-level...
    TimeSeriesRequest request = new TimeSeriesRequest();
    request.setCollectionName(anomalyFunctionSpec.getCollection());
    // TODO: Check low level support for multiple metrics retrieval
    String metricsToRetrieve = String.join(",", anomalyFunctionSpec.getMetrics());
    List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metricsToRetrieve, anomalyFunctionSpec.getMetricFunction(), anomalyFunctionSpec.getCollection());
    request.setMetricExpressions(metricExpressions);
    request.setAggregationTimeGranularity(timeGranularity);
    request.setEndDateInclusive(false);
    request.setFilterSet(filters);
    request.setGroupByDimensions(groupByDimensions);
    request.setEndDateInclusive(endTimeInclusive);
    LOG.info("Found [{}] time ranges to fetch data", startEndTimeRanges.size());
    for (Pair<Long, Long> timeRange : startEndTimeRanges) {
        LOG.info("Start Time [{}], End Time [{}] for anomaly analysis", new DateTime(timeRange.getFirst()), new DateTime(timeRange.getSecond()));
    }
    Set<TimeSeriesRow> timeSeriesRowSet = new HashSet<>();
    // TODO : replace this with Pinot MultiQuery Request
    for (Pair<Long, Long> startEndInterval : startEndTimeRanges) {
        DateTime startTime = new DateTime(startEndInterval.getFirst());
        DateTime endTime = new DateTime(startEndInterval.getSecond());
        request.setStart(startTime);
        request.setEnd(endTime);
        LOG.info("Fetching data with startTime: [{}], endTime: [{}], metricExpressions: [{}], timeGranularity: [{}]", startTime, endTime, metricExpressions, timeGranularity);
        try {
            LOG.debug("Executing {}", request);
            TimeSeriesResponse response = timeSeriesHandler.handle(request);
            timeSeriesRowSet.addAll(response.getRows());
        } catch (Exception e) {
            throw new JobExecutionException(e);
        }
    }
    List<TimeSeriesRow> timeSeriesRows = new ArrayList<>();
    timeSeriesRows.addAll(timeSeriesRowSet);
    return new TimeSeriesResponse(timeSeriesRows);
}
Also used : TimeSeriesRow(com.linkedin.thirdeye.client.timeseries.TimeSeriesRow) TimeSeriesResponse(com.linkedin.thirdeye.client.timeseries.TimeSeriesResponse) ArrayList(java.util.ArrayList) MetricExpression(com.linkedin.thirdeye.client.MetricExpression) DateTime(org.joda.time.DateTime) ExecutionException(java.util.concurrent.ExecutionException) JobExecutionException(org.quartz.JobExecutionException) JobExecutionException(org.quartz.JobExecutionException) TimeSeriesHandler(com.linkedin.thirdeye.client.timeseries.TimeSeriesHandler) TimeSeriesRequest(com.linkedin.thirdeye.client.timeseries.TimeSeriesRequest) HashSet(java.util.HashSet)

Example 24 with JobExecutionException

use of org.quartz.JobExecutionException in project pinot by linkedin.

the class TimeSeriesUtil method getTimeSeriesForAnomalyDetection.

/**
   * Returns the set of metric time series that are needed by the given anomaly function for detecting anomalies.
   *
   * The time granularity is the granularity of the function's collection, i.e., the buckets are not aggregated,
   * in order to increase the accuracy for detecting anomalies.
   *
   * @param anomalyFunctionSpec spec of the anomaly function
   * @param startEndTimeRanges the time ranges to retrieve the data for constructing the time series
   *
   * @return the data that is needed by the anomaly function for detecting anomalies.
   * @throws JobExecutionException
   * @throws ExecutionException
   */
public static Map<DimensionKey, MetricTimeSeries> getTimeSeriesForAnomalyDetection(AnomalyFunctionDTO anomalyFunctionSpec, List<Pair<Long, Long>> startEndTimeRanges) throws JobExecutionException, ExecutionException {
    String filterString = anomalyFunctionSpec.getFilters();
    Multimap<String, String> filters;
    if (StringUtils.isNotBlank(filterString)) {
        filters = ThirdEyeUtils.getFilterSet(filterString);
    } else {
        filters = HashMultimap.create();
    }
    List<String> groupByDimensions;
    String exploreDimensionString = anomalyFunctionSpec.getExploreDimensions();
    if (StringUtils.isNotBlank(exploreDimensionString)) {
        groupByDimensions = Arrays.asList(exploreDimensionString.trim().split(","));
    } else {
        groupByDimensions = Collections.emptyList();
    }
    TimeGranularity timeGranularity = new TimeGranularity(anomalyFunctionSpec.getBucketSize(), anomalyFunctionSpec.getBucketUnit());
    TimeSeriesResponse timeSeriesResponse = getTimeSeriesResponseImpl(anomalyFunctionSpec, startEndTimeRanges, timeGranularity, filters, groupByDimensions, false);
    try {
        Map<DimensionKey, MetricTimeSeries> dimensionKeyMetricTimeSeriesMap = TimeSeriesResponseConverter.toMap(timeSeriesResponse, Utils.getSchemaDimensionNames(anomalyFunctionSpec.getCollection()));
        return dimensionKeyMetricTimeSeriesMap;
    } catch (Exception e) {
        LOG.info("Failed to get schema dimensions for constructing dimension keys:", e.toString());
        return Collections.emptyMap();
    }
}
Also used : DimensionKey(com.linkedin.thirdeye.api.DimensionKey) TimeSeriesResponse(com.linkedin.thirdeye.client.timeseries.TimeSeriesResponse) TimeGranularity(com.linkedin.thirdeye.api.TimeGranularity) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) ExecutionException(java.util.concurrent.ExecutionException) JobExecutionException(org.quartz.JobExecutionException)

Example 25 with JobExecutionException

use of org.quartz.JobExecutionException in project openhab1-addons by openhab.

the class EventJob method execute.

@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    try {
        final String config = context.getJobDetail().getJobDataMap().getString(KEY_CONFIG);
        final String eventId = context.getJobDetail().getJobDataMap().getString(KEY_EVENT);
        final int recIndex = context.getJobDetail().getJobDataMap().getInt(KEY_REC_INDEX);
        final EventTrigger eventTrigger = EventTrigger.valueOf(context.getJobDetail().getJobDataMap().getString(KEY_EVENT_TRIGGER));
        CalendarRuntime calendarRuntime = EventStorage.getInstance().getEventCache().get(config);
        if (calendarRuntime == null) {
            throw new JobExecutionException("cannot get runtime for config: " + config, false);
        }
        EventContainer eventContainer = calendarRuntime.getEventMap().get(eventId);
        if (eventContainer == null) {
            throw new JobExecutionException("cannot get event-container for config: " + config + " and eventId: " + eventId, false);
        }
        if (eventContainer.getEventList().size() <= recIndex) {
            throw new JobExecutionException("cannot get recurence-event for config: " + config + " and eventId: " + eventId + " and occurence: " + recIndex, false);
        }
        CalDavEvent event = eventContainer.getEventList().get(recIndex);
        log.info("event {} for: {}", eventTrigger, event.getShortName());
        for (EventNotifier notifier : CalDavLoaderImpl.instance.getEventListenerList()) {
            try {
                if (eventTrigger == EventTrigger.BEGIN) {
                    notifier.eventBegins(event);
                } else if (eventTrigger == EventTrigger.END) {
                    notifier.eventEnds(event);
                } else {
                    throw new IllegalStateException("not implemented event trigger: " + eventTrigger);
                }
            } catch (Exception e) {
                log.error("error while invoking listener", e);
            }
        }
        if (eventTrigger == EventTrigger.END) {
            // if event is ended, remove it from the map
            calendarRuntime.getEventMap().remove(eventContainer.getEventId());
        }
    } catch (Exception e) {
        log.error("error executing event job", e);
        throw new JobExecutionException("error executing event job", e, false);
    }
}
Also used : JobExecutionException(org.quartz.JobExecutionException) EventContainer(org.openhab.io.caldav.internal.EventStorage.EventContainer) CalDavEvent(org.openhab.io.caldav.CalDavEvent) EventNotifier(org.openhab.io.caldav.EventNotifier) CalendarRuntime(org.openhab.io.caldav.internal.EventStorage.CalendarRuntime) JobExecutionException(org.quartz.JobExecutionException)

Aggregations

JobExecutionException (org.quartz.JobExecutionException)33 SchedulerException (org.quartz.SchedulerException)9 EmailException (org.apache.commons.mail.EmailException)6 JobDataMap (org.quartz.JobDataMap)6 ArrayList (java.util.ArrayList)5 HashMap (java.util.HashMap)5 SchedulerContext (org.quartz.SchedulerContext)4 TimeSeriesResponse (com.linkedin.thirdeye.client.timeseries.TimeSeriesResponse)3 ByteArrayOutputStream (java.io.ByteArrayOutputStream)3 ExecutionException (java.util.concurrent.ExecutionException)3 CamelContext (org.apache.camel.CamelContext)3 Route (org.apache.camel.Route)3 HtmlEmail (org.apache.commons.mail.HtmlEmail)3 ThirdEyeAnomalyConfiguration (com.linkedin.thirdeye.anomaly.ThirdEyeAnomalyConfiguration)2 MetricDimensionReport (com.linkedin.thirdeye.anomaly.alert.template.pojo.MetricDimensionReport)2 DataReportHelper (com.linkedin.thirdeye.anomaly.alert.util.DataReportHelper)2 DimensionKey (com.linkedin.thirdeye.api.DimensionKey)2 MetricTimeSeries (com.linkedin.thirdeye.api.MetricTimeSeries)2 TimeGranularity (com.linkedin.thirdeye.api.TimeGranularity)2 MetricExpression (com.linkedin.thirdeye.client.MetricExpression)2