Search in sources :

Example 1 with ScalingFactor

use of com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor in project pinot by linkedin.

the class AnomalyMergeExecutor method updateMergedAnomalyWeight.

/**
   * Uses function-specific method to re-computes the weight of merged anomaly.
   *
   * @param anomalyMergedResult the merged anomaly to be updated
   * @param mergeConfig the merge configuration that was applied when merge the merged anomaly
   * @throws Exception if error occurs when retrieving the time series for calculating the weight
   */
private void updateMergedAnomalyWeight(MergedAnomalyResultDTO anomalyMergedResult, AnomalyMergeConfig mergeConfig) throws Exception {
    AnomalyFunctionDTO anomalyFunctionSpec = anomalyMergedResult.getFunction();
    BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
    List<Pair<Long, Long>> startEndTimeRanges = anomalyFunction.getDataRangeIntervals(anomalyMergedResult.getStartTime(), anomalyMergedResult.getEndTime());
    TimeGranularity timeGranularity = new TimeGranularity(anomalyFunctionSpec.getBucketSize(), anomalyFunctionSpec.getBucketUnit());
    MetricTimeSeries metricTimeSeries = TimeSeriesUtil.getTimeSeriesByDimension(anomalyFunctionSpec, startEndTimeRanges, anomalyMergedResult.getDimensions(), timeGranularity, false);
    if (metricTimeSeries != null) {
        DateTime windowStart = new DateTime(anomalyMergedResult.getStartTime());
        DateTime windowEnd = new DateTime(anomalyMergedResult.getEndTime());
        List<MergedAnomalyResultDTO> knownAnomalies = Collections.emptyList();
        // Retrieve history merged anomalies
        if (anomalyFunction.useHistoryAnomaly()) {
            switch(mergeConfig.getMergeStrategy()) {
                case FUNCTION:
                    knownAnomalies = getHistoryMergedAnomalies(anomalyFunction, windowStart.getMillis(), windowEnd.getMillis());
                    break;
                case FUNCTION_DIMENSIONS:
                    knownAnomalies = getHistoryMergedAnomalies(anomalyFunction, windowStart.getMillis(), windowEnd.getMillis(), anomalyMergedResult.getDimensions());
                    break;
                default:
                    throw new IllegalArgumentException("Merge strategy " + mergeConfig.getMergeStrategy() + " not supported");
            }
            if (knownAnomalies.size() > 0) {
                LOG.info("Found {} history anomalies for computing the weight of current merged anomaly.", knownAnomalies.size());
                LOG.info("Checking if any known anomalies overlap with the monitoring window of anomaly detection, which could result in unwanted holes in current values.");
                AnomalyUtils.logAnomaliesOverlapWithWindow(windowStart, windowEnd, knownAnomalies);
            }
        }
        // Transform Time Series
        List<ScalingFactor> scalingFactors = OverrideConfigHelper.getTimeSeriesScalingFactors(overrideConfigDAO, anomalyFunctionSpec.getCollection(), anomalyFunctionSpec.getTopicMetric(), anomalyFunctionSpec.getId(), anomalyFunction.getDataRangeIntervals(windowStart.getMillis(), windowEnd.getMillis()));
        if (CollectionUtils.isNotEmpty(scalingFactors)) {
            Properties properties = anomalyFunction.getProperties();
            MetricTransfer.rescaleMetric(metricTimeSeries, windowStart.getMillis(), scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
        }
        anomalyFunction.updateMergedAnomalyInfo(anomalyMergedResult, metricTimeSeries, windowStart, windowEnd, knownAnomalies);
    }
}
Also used : BaseAnomalyFunction(com.linkedin.thirdeye.detector.function.BaseAnomalyFunction) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) ScalingFactor(com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor) Properties(java.util.Properties) DateTime(org.joda.time.DateTime) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) TimeGranularity(com.linkedin.thirdeye.api.TimeGranularity) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) Pair(com.linkedin.pinot.pql.parsers.utils.Pair)

Example 2 with ScalingFactor

use of com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor in project pinot by linkedin.

the class TimeBasedAnomalyMerger method fetchDataByDimension.

/**
   * Fetch time series, known merged anomalies, and scaling factor for the specified dimension. Note that scaling
   * factor has no dimension information, so all scaling factor in the specified time range will be retrieved.
   *
   * @param windowStartTime the start time for retrieving the data
   * @param windowEndTime the end time for retrieving the data
   * @param dimensions the dimension of the data
   * @param anomalyFunction the anomaly function that produces the anomaly
   * @param mergedResultDAO DAO for merged anomalies
   * @param overrideConfigDAO DAO for override configuration
   * @param endTimeInclusive set to true if the end time should be inclusive; mainly used by the queries from UI
   * @return an anomaly detection input context that contains all the retrieved data
   * @throws Exception if it fails to retrieve time series from DB.
   */
public static AnomalyDetectionInputContext fetchDataByDimension(long windowStartTime, long windowEndTime, DimensionMap dimensions, BaseAnomalyFunction anomalyFunction, MergedAnomalyResultManager mergedResultDAO, OverrideConfigManager overrideConfigDAO, boolean endTimeInclusive) throws Exception {
    AnomalyFunctionDTO functionSpec = anomalyFunction.getSpec();
    List<Pair<Long, Long>> startEndTimeRanges = anomalyFunction.getDataRangeIntervals(windowStartTime, windowEndTime);
    TimeGranularity timeGranularity = new TimeGranularity(functionSpec.getBucketSize(), functionSpec.getBucketUnit());
    AnomalyDetectionInputContext adInputContext = new AnomalyDetectionInputContext();
    // Retrieve Time Series
    MetricTimeSeries metricTimeSeries = TimeSeriesUtil.getTimeSeriesByDimension(functionSpec, startEndTimeRanges, dimensions, timeGranularity, endTimeInclusive);
    Map<DimensionMap, MetricTimeSeries> metricTimeSeriesMap = new HashMap<>();
    metricTimeSeriesMap.put(dimensions, metricTimeSeries);
    adInputContext.setDimensionKeyMetricTimeSeriesMap(metricTimeSeriesMap);
    // Retrieve historical anomaly
    if (anomalyFunction.useHistoryAnomaly()) {
        List<MergedAnomalyResultDTO> knownAnomalies = getBaselineKnownAnomaliesByDimension(anomalyFunction, windowStartTime, windowEndTime, dimensions, mergedResultDAO);
        ListMultimap<DimensionMap, MergedAnomalyResultDTO> mergedAnomalyMap = ArrayListMultimap.create();
        mergedAnomalyMap.putAll(dimensions, knownAnomalies);
        adInputContext.setKnownMergedAnomalies(mergedAnomalyMap);
        if (knownAnomalies.size() > 0) {
            LOG.info("Found {} history anomalies for computing the weight of current merged anomaly.", knownAnomalies.size());
        }
    }
    // Retrieve scaling factor
    List<ScalingFactor> scalingFactors = OverrideConfigHelper.getTimeSeriesScalingFactors(overrideConfigDAO, functionSpec.getCollection(), functionSpec.getTopicMetric(), functionSpec.getId(), anomalyFunction.getDataRangeIntervals(windowStartTime, windowEndTime));
    adInputContext.setScalingFactors(scalingFactors);
    return adInputContext;
}
Also used : HashMap(java.util.HashMap) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) ScalingFactor(com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor) AnomalyDetectionInputContext(com.linkedin.thirdeye.anomaly.detection.AnomalyDetectionInputContext) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) TimeGranularity(com.linkedin.thirdeye.api.TimeGranularity) DimensionMap(com.linkedin.thirdeye.api.DimensionMap) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) Pair(com.linkedin.pinot.pql.parsers.utils.Pair)

Example 3 with ScalingFactor

use of com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor in project pinot by linkedin.

the class TimeBasedAnomalyMerger method computeMergedAnomalyInfo.

/**
   * Uses function-specific method to re-computes the weight of merged anomaly.
   *
   * @param mergedAnomalies the merged anomaly to be updated
   * @param mergeConfig the merge configuration that was applied when merge the merged anomaly
   * @throws Exception if error occurs when retrieving the time series for calculating the weight
   */
private void computeMergedAnomalyInfo(MergedAnomalyResultDTO mergedAnomalies, AnomalyMergeConfig mergeConfig) throws Exception {
    AnomalyFunctionDTO anomalyFunctionSpec = mergedAnomalies.getFunction();
    BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
    long windowStartMillis = mergedAnomalies.getStartTime();
    long windowEndMillis = mergedAnomalies.getEndTime();
    DimensionMap dimensions = mergedAnomalies.getDimensions();
    AnomalyDetectionInputContext adInputContext = fetchDataByDimension(windowStartMillis, windowEndMillis, dimensions, anomalyFunction, mergedResultDAO, overrideConfigDAO, false);
    MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
    if (metricTimeSeries != null) {
        List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
        // Transform time series with scaling factor
        List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
        if (CollectionUtils.isNotEmpty(scalingFactors)) {
            Properties properties = anomalyFunction.getProperties();
            MetricTransfer.rescaleMetric(metricTimeSeries, windowStartMillis, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
        }
        DateTime windowStart = new DateTime(windowStartMillis);
        DateTime windowEnd = new DateTime(windowEndMillis);
        anomalyFunction.updateMergedAnomalyInfo(mergedAnomalies, metricTimeSeries, windowStart, windowEnd, knownAnomalies);
    }
}
Also used : BaseAnomalyFunction(com.linkedin.thirdeye.detector.function.BaseAnomalyFunction) AnomalyDetectionInputContext(com.linkedin.thirdeye.anomaly.detection.AnomalyDetectionInputContext) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) ScalingFactor(com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor) DimensionMap(com.linkedin.thirdeye.api.DimensionMap) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) Properties(java.util.Properties) DateTime(org.joda.time.DateTime)

Example 4 with ScalingFactor

use of com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor in project pinot by linkedin.

the class DetectionTaskRunner method runAnalyze.

private List<RawAnomalyResultDTO> runAnalyze(DateTime windowStart, DateTime windowEnd, AnomalyDetectionInputContext anomalyDetectionInputContext, DimensionMap dimensionMap) {
    String metricName = anomalyFunction.getSpec().getTopicMetric();
    MetricTimeSeries metricTimeSeries = anomalyDetectionInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensionMap);
    // Get current entry's knownMergedAnomalies, which should have the same explored dimensions
    List<MergedAnomalyResultDTO> knownMergedAnomaliesOfAnEntry = anomalyDetectionInputContext.getKnownMergedAnomalies().get(dimensionMap);
    List<MergedAnomalyResultDTO> historyMergedAnomalies;
    if (anomalyFunction.useHistoryAnomaly()) {
        historyMergedAnomalies = retainHistoryMergedAnomalies(windowStart.getMillis(), knownMergedAnomaliesOfAnEntry);
    } else {
        historyMergedAnomalies = Collections.emptyList();
    }
    LOG.info("Analyzing anomaly function with explored dimensions: {}, windowStart: {}, windowEnd: {}", dimensionMap, windowStart, windowEnd);
    AnomalyUtils.logAnomaliesOverlapWithWindow(windowStart, windowEnd, historyMergedAnomalies);
    List<RawAnomalyResultDTO> resultsOfAnEntry = Collections.emptyList();
    try {
        // Run algorithm
        // Scaling time series according to the scaling factor
        List<ScalingFactor> scalingFactors = anomalyDetectionInputContext.getScalingFactors();
        if (CollectionUtils.isNotEmpty(scalingFactors)) {
            Properties properties = anomalyFunction.getProperties();
            MetricTransfer.rescaleMetric(metricTimeSeries, windowStart.getMillis(), scalingFactors, metricName, properties);
        }
        resultsOfAnEntry = anomalyFunction.analyze(dimensionMap, metricTimeSeries, windowStart, windowEnd, historyMergedAnomalies);
    } catch (Exception e) {
        LOG.error("Could not compute for {}", dimensionMap, e);
    }
    // Remove detected anomalies that have existed in database
    if (CollectionUtils.isNotEmpty(resultsOfAnEntry)) {
        List<RawAnomalyResultDTO> existingRawAnomaliesOfAnEntry = anomalyDetectionInputContext.getExistingRawAnomalies().get(dimensionMap);
        resultsOfAnEntry = removeFromExistingRawAnomalies(resultsOfAnEntry, existingRawAnomaliesOfAnEntry);
    }
    if (CollectionUtils.isNotEmpty(resultsOfAnEntry)) {
        List<MergedAnomalyResultDTO> existingMergedAnomalies = retainExistingMergedAnomalies(windowStart.getMillis(), windowEnd.getMillis(), knownMergedAnomaliesOfAnEntry);
        resultsOfAnEntry = removeFromExistingMergedAnomalies(resultsOfAnEntry, existingMergedAnomalies);
    }
    return resultsOfAnEntry;
}
Also used : RawAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) ScalingFactor(com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor) Properties(java.util.Properties) NullArgumentException(org.apache.commons.lang.NullArgumentException) ExecutionException(java.util.concurrent.ExecutionException) JobExecutionException(org.quartz.JobExecutionException)

Example 5 with ScalingFactor

use of com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor in project pinot by linkedin.

the class OverrideConfigHelper method convertToScalingFactors.

/**
   * Convert a list of OverrideConfigDTO to a list of scaling factor, in which each scaling factor
   * are filtered through target level.
   *
   * @param overrideConfigDTOs the list of OverrideConfigDTO
   * @param timeSereisTargetLevel the
   *                              filtration rule for applying OverrideConfigDTO
   * @return a list of scaling factor
   */
public static List<ScalingFactor> convertToScalingFactors(List<OverrideConfigDTO> overrideConfigDTOs, Map<String, String> timeSereisTargetLevel) {
    List<ScalingFactor> results = new ArrayList<>();
    for (OverrideConfigDTO overrideConfigDTO : overrideConfigDTOs) {
        if (OverrideConfigHelper.isEnabled(timeSereisTargetLevel, overrideConfigDTO)) {
            long startTime = overrideConfigDTO.getStartTime();
            long endTime = overrideConfigDTO.getEndTime();
            if (MapUtils.isNotEmpty(overrideConfigDTO.getOverrideProperties())) {
                try {
                    double scalingFactor = Double.parseDouble(overrideConfigDTO.getOverrideProperties().get(ScalingFactor.SCALING_FACTOR));
                    ScalingFactor sf = new ScalingFactor(startTime, endTime, scalingFactor);
                    results.add(sf);
                } catch (Exception e) {
                    LOG.warn("Failed to parse scaling factor from override config:{}, Exception: {}", overrideConfigDTO, e);
                }
            } else {
                LOG.warn("Unable to parse scaling factor due to empty override properties. Config:{}", overrideConfigDTO);
            }
        }
    }
    return results;
}
Also used : OverrideConfigDTO(com.linkedin.thirdeye.datalayer.dto.OverrideConfigDTO) ArrayList(java.util.ArrayList) ScalingFactor(com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor)

Aggregations

ScalingFactor (com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor)8 MetricTimeSeries (com.linkedin.thirdeye.api.MetricTimeSeries)7 MergedAnomalyResultDTO (com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO)7 DimensionMap (com.linkedin.thirdeye.api.DimensionMap)5 AnomalyFunctionDTO (com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO)5 Properties (java.util.Properties)5 AnomalyDetectionInputContext (com.linkedin.thirdeye.anomaly.detection.AnomalyDetectionInputContext)4 TimeGranularity (com.linkedin.thirdeye.api.TimeGranularity)4 BaseAnomalyFunction (com.linkedin.thirdeye.detector.function.BaseAnomalyFunction)4 Pair (com.linkedin.pinot.pql.parsers.utils.Pair)3 AnomalyTimelinesView (com.linkedin.thirdeye.anomaly.views.AnomalyTimelinesView)2 RawAnomalyResultDTO (com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 ExecutionException (java.util.concurrent.ExecutionException)2 DateTime (org.joda.time.DateTime)2 DimensionKey (com.linkedin.thirdeye.api.DimensionKey)1 TimeRange (com.linkedin.thirdeye.api.TimeRange)1 AnomalyDetails (com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails)1 TimeBucket (com.linkedin.thirdeye.dashboard.views.TimeBucket)1