Search in sources :

Example 1 with AnomalyDetails

use of com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails in project pinot by linkedin.

the class AnomaliesResource method constructAnomalyDetails.

/** Construct anomaly details using all details fetched from calls
   *
   * @param metricName
   * @param dataset
   * @param datasetConfig
   * @param mergedAnomaly
   * @param anomalyFunction
   * @param currentStartTime inclusive
   * @param currentEndTime inclusive
   * @param anomalyTimelinesView
   * @param timeSeriesDateFormatter
   * @param startEndDateFormatterHours
   * @param startEndDateFormatterDays
   * @return
   * @throws JSONException
   */
private AnomalyDetails constructAnomalyDetails(String metricName, String dataset, DatasetConfigDTO datasetConfig, MergedAnomalyResultDTO mergedAnomaly, AnomalyFunctionDTO anomalyFunction, long currentStartTime, long currentEndTime, AnomalyTimelinesView anomalyTimelinesView, DateTimeFormatter timeSeriesDateFormatter, DateTimeFormatter startEndDateFormatterHours, DateTimeFormatter startEndDateFormatterDays, String externalUrl) throws JSONException {
    MetricConfigDTO metricConfigDTO = metricConfigDAO.findByMetricAndDataset(metricName, dataset);
    AnomalyDetails anomalyDetails = new AnomalyDetails();
    anomalyDetails.setMetric(metricName);
    anomalyDetails.setDataset(dataset);
    if (metricConfigDTO != null) {
        anomalyDetails.setMetricId(metricConfigDTO.getId());
    }
    // The filter ensures that the returned time series from anomalies function only includes the values that are
    // located inside the request windows (i.e., between currentStartTime and currentEndTime, inclusive).
    List<TimeBucket> timeBuckets = anomalyTimelinesView.getTimeBuckets();
    int timeStartIndex = -1;
    int timeEndIndex = -1;
    for (int i = 0; i < timeBuckets.size(); ++i) {
        long currentTimeStamp = timeBuckets.get(i).getCurrentStart();
        if (timeStartIndex < 0 && currentTimeStamp >= currentStartTime) {
            timeStartIndex = i;
            timeEndIndex = i + 1;
        } else if (currentTimeStamp <= currentEndTime) {
            timeEndIndex = i + 1;
        } else if (currentTimeStamp > currentEndTime) {
            break;
        }
    }
    if (timeStartIndex < 0 || timeEndIndex < 0) {
        timeStartIndex = 0;
        timeEndIndex = 0;
    }
    // get this from timeseries calls
    List<String> dateValues = getDateFromTimeSeriesObject(timeBuckets.subList(timeStartIndex, timeEndIndex), timeSeriesDateFormatter);
    anomalyDetails.setDates(dateValues);
    anomalyDetails.setCurrentEnd(getFormattedDateTime(currentEndTime, datasetConfig, startEndDateFormatterHours, startEndDateFormatterDays));
    anomalyDetails.setCurrentStart(getFormattedDateTime(currentStartTime, datasetConfig, startEndDateFormatterHours, startEndDateFormatterDays));
    List<String> baselineValues = getDataFromTimeSeriesObject(anomalyTimelinesView.getBaselineValues().subList(timeStartIndex, timeEndIndex));
    anomalyDetails.setBaselineValues(baselineValues);
    List<String> currentValues = getDataFromTimeSeriesObject(anomalyTimelinesView.getCurrentValues().subList(timeStartIndex, timeEndIndex));
    anomalyDetails.setCurrentValues(currentValues);
    // from function and anomaly
    anomalyDetails.setAnomalyId(mergedAnomaly.getId());
    anomalyDetails.setAnomalyRegionStart(timeSeriesDateFormatter.print(mergedAnomaly.getStartTime()));
    anomalyDetails.setAnomalyRegionEnd(timeSeriesDateFormatter.print(mergedAnomaly.getEndTime()));
    Map<String, String> messageDataMap = getAnomalyMessageDataMap(mergedAnomaly.getMessage());
    anomalyDetails.setCurrent(messageDataMap.get(ANOMALY_CURRENT_VAL_KEY));
    anomalyDetails.setBaseline(messageDataMap.get(ANOMALY_BASELINE_VAL_KEY));
    anomalyDetails.setAnomalyFunctionId(anomalyFunction.getId());
    anomalyDetails.setAnomalyFunctionName(anomalyFunction.getFunctionName());
    anomalyDetails.setAnomalyFunctionType(anomalyFunction.getType());
    anomalyDetails.setAnomalyFunctionProps(anomalyFunction.getProperties());
    anomalyDetails.setAnomalyFunctionDimension(mergedAnomaly.getDimensions().toString());
    if (mergedAnomaly.getFeedback() != null) {
        anomalyDetails.setAnomalyFeedback(AnomalyDetails.getFeedbackStringFromFeedbackType(mergedAnomaly.getFeedback().getFeedbackType()));
    }
    anomalyDetails.setExternalUrl(externalUrl);
    return anomalyDetails;
}
Also used : MetricConfigDTO(com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO) TimeBucket(com.linkedin.thirdeye.dashboard.views.TimeBucket) AnomalyDetails(com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails)

Example 2 with AnomalyDetails

use of com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails in project pinot by linkedin.

the class AnomaliesResource method getAnomalyDetails.

/**
   * Generates Anomaly Details for each merged anomaly
   * @param mergedAnomaly
   * @param datasetConfig
   * @param timeSeriesDateFormatter
   * @param startEndDateFormatterHours
   * @param startEndDateFormatterDays
   * @param externalUrl
   * @return
   */
private AnomalyDetails getAnomalyDetails(MergedAnomalyResultDTO mergedAnomaly, DatasetConfigDTO datasetConfig, DateTimeFormatter timeSeriesDateFormatter, DateTimeFormatter startEndDateFormatterHours, DateTimeFormatter startEndDateFormatterDays, String externalUrl) throws Exception {
    String dataset = datasetConfig.getDataset();
    String metricName = mergedAnomaly.getMetric();
    AnomalyFunctionDTO anomalyFunctionSpec = anomalyFunctionDAO.findById(mergedAnomaly.getFunctionId());
    BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
    String aggGranularity = constructAggGranularity(datasetConfig);
    long anomalyStartTime = mergedAnomaly.getStartTime();
    long anomalyEndTime = mergedAnomaly.getEndTime();
    TimeRange range = getTimeseriesOffsetedTimes(anomalyStartTime, anomalyEndTime, datasetConfig);
    long currentStartTime = range.getStart();
    long currentEndTime = range.getEnd();
    DimensionMap dimensions = mergedAnomaly.getDimensions();
    TimeGranularity timeGranularity = Utils.getAggregationTimeGranularity(aggGranularity, anomalyFunctionSpec.getCollection());
    long bucketMillis = timeGranularity.toMillis();
    AnomalyDetails anomalyDetails = null;
    try {
        AnomalyDetectionInputContext adInputContext = TimeBasedAnomalyMerger.fetchDataByDimension(currentStartTime, currentEndTime, dimensions, anomalyFunction, mergedAnomalyResultDAO, overrideConfigDAO, true);
        MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
        // Transform time series with scaling factor
        List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
        if (CollectionUtils.isNotEmpty(scalingFactors)) {
            Properties properties = anomalyFunction.getProperties();
            MetricTransfer.rescaleMetric(metricTimeSeries, currentStartTime, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
        }
        List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
        // Known anomalies are ignored (the null parameter) because 1. we can reduce users' waiting time and 2. presentation
        // data does not need to be as accurate as the one used for detecting anomalies
        AnomalyTimelinesView anomalyTimelinesView = anomalyFunction.getTimeSeriesView(metricTimeSeries, bucketMillis, anomalyFunctionSpec.getTopicMetric(), currentStartTime, currentEndTime, knownAnomalies);
        anomalyDetails = constructAnomalyDetails(metricName, dataset, datasetConfig, mergedAnomaly, anomalyFunctionSpec, currentStartTime, currentEndTime, anomalyTimelinesView, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, externalUrl);
    } catch (Exception e) {
        LOG.error("Exception in constructing anomaly wrapper for anomaly {}", mergedAnomaly.getId(), e);
    }
    return anomalyDetails;
}
Also used : BaseAnomalyFunction(com.linkedin.thirdeye.detector.function.BaseAnomalyFunction) AnomalyDetails(com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) ScalingFactor(com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor) AnomalyTimelinesView(com.linkedin.thirdeye.anomaly.views.AnomalyTimelinesView) Properties(java.util.Properties) TimeoutException(java.util.concurrent.TimeoutException) JSONException(org.json.JSONException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) TimeRange(com.linkedin.thirdeye.api.TimeRange) AnomalyDetectionInputContext(com.linkedin.thirdeye.anomaly.detection.AnomalyDetectionInputContext) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) TimeGranularity(com.linkedin.thirdeye.api.TimeGranularity) DimensionMap(com.linkedin.thirdeye.api.DimensionMap) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO)

Example 3 with AnomalyDetails

use of com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails in project pinot by linkedin.

the class AnomaliesResource method constructAnomaliesWrapperFromMergedAnomalies.

/**
   * Constructs AnomaliesWrapper object from a list of merged anomalies
   * @param mergedAnomalies
   * @return
   * @throws ExecutionException
   */
private AnomaliesWrapper constructAnomaliesWrapperFromMergedAnomalies(List<MergedAnomalyResultDTO> mergedAnomalies, int pageNumber) throws ExecutionException {
    AnomaliesWrapper anomaliesWrapper = new AnomaliesWrapper();
    anomaliesWrapper.setTotalAnomalies(mergedAnomalies.size());
    LOG.info("Total anomalies: {}", mergedAnomalies.size());
    // TODO: get page number and page size from client
    int pageSize = DEFAULT_PAGE_SIZE;
    int maxPageNumber = (mergedAnomalies.size() - 1) / pageSize + 1;
    if (pageNumber > maxPageNumber) {
        pageNumber = maxPageNumber;
    }
    if (pageNumber < 1) {
        pageNumber = 1;
    }
    int fromIndex = (pageNumber - 1) * pageSize;
    int toIndex = pageNumber * pageSize;
    if (toIndex > mergedAnomalies.size()) {
        toIndex = mergedAnomalies.size();
    }
    // Show most recent anomalies first, i.e., the anomaly whose end time is most recent then largest id shown at top
    Collections.sort(mergedAnomalies, new MergedAnomalyEndTimeComparator().reversed());
    List<MergedAnomalyResultDTO> displayedAnomalies = mergedAnomalies.subList(fromIndex, toIndex);
    anomaliesWrapper.setNumAnomaliesOnPage(displayedAnomalies.size());
    LOG.info("Page number: {} Page size: {} Num anomalies on page: {}", pageNumber, pageSize, displayedAnomalies.size());
    // for each anomaly, create anomaly details
    List<Future<AnomalyDetails>> anomalyDetailsListFutures = new ArrayList<>();
    for (MergedAnomalyResultDTO mergedAnomaly : displayedAnomalies) {
        Callable<AnomalyDetails> callable = new Callable<AnomalyDetails>() {

            @Override
            public AnomalyDetails call() throws Exception {
                String dataset = mergedAnomaly.getCollection();
                DatasetConfigDTO datasetConfig = CACHE_REGISTRY.getDatasetConfigCache().get(dataset);
                DateTimeFormatter timeSeriesDateFormatter = DateTimeFormat.forPattern(TIME_SERIES_DATE_FORMAT).withZone(Utils.getDataTimeZone(dataset));
                DateTimeFormatter startEndDateFormatterDays = DateTimeFormat.forPattern(START_END_DATE_FORMAT_DAYS).withZone(Utils.getDataTimeZone(dataset));
                DateTimeFormatter startEndDateFormatterHours = DateTimeFormat.forPattern(START_END_DATE_FORMAT_HOURS).withZone(Utils.getDataTimeZone(dataset));
                return getAnomalyDetails(mergedAnomaly, datasetConfig, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, getExternalURL(mergedAnomaly));
            }
        };
        anomalyDetailsListFutures.add(threadPool.submit(callable));
    }
    List<AnomalyDetails> anomalyDetailsList = new ArrayList<>();
    for (Future<AnomalyDetails> anomalyDetailsFuture : anomalyDetailsListFutures) {
        try {
            AnomalyDetails anomalyDetails = anomalyDetailsFuture.get(120, TimeUnit.SECONDS);
            if (anomalyDetails != null) {
                anomalyDetailsList.add(anomalyDetails);
            }
        } catch (InterruptedException | ExecutionException | TimeoutException e) {
            LOG.error("Exception in getting AnomalyDetails", e);
        }
    }
    anomaliesWrapper.setAnomalyDetailsList(anomalyDetailsList);
    return anomaliesWrapper;
}
Also used : ArrayList(java.util.ArrayList) AnomalyDetails(com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails) Callable(java.util.concurrent.Callable) DatasetConfigDTO(com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) Future(java.util.concurrent.Future) AnomaliesWrapper(com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomaliesWrapper) ExecutionException(java.util.concurrent.ExecutionException) DateTimeFormatter(org.joda.time.format.DateTimeFormatter) TimeoutException(java.util.concurrent.TimeoutException)

Aggregations

AnomalyDetails (com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails)3 MergedAnomalyResultDTO (com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO)2 ExecutionException (java.util.concurrent.ExecutionException)2 TimeoutException (java.util.concurrent.TimeoutException)2 AnomalyDetectionInputContext (com.linkedin.thirdeye.anomaly.detection.AnomalyDetectionInputContext)1 AnomalyTimelinesView (com.linkedin.thirdeye.anomaly.views.AnomalyTimelinesView)1 DimensionMap (com.linkedin.thirdeye.api.DimensionMap)1 MetricTimeSeries (com.linkedin.thirdeye.api.MetricTimeSeries)1 TimeGranularity (com.linkedin.thirdeye.api.TimeGranularity)1 TimeRange (com.linkedin.thirdeye.api.TimeRange)1 AnomaliesWrapper (com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomaliesWrapper)1 TimeBucket (com.linkedin.thirdeye.dashboard.views.TimeBucket)1 AnomalyFunctionDTO (com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO)1 DatasetConfigDTO (com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO)1 MetricConfigDTO (com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO)1 BaseAnomalyFunction (com.linkedin.thirdeye.detector.function.BaseAnomalyFunction)1 ScalingFactor (com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 Properties (java.util.Properties)1