use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class BackwardAnomalyFunctionUtils method buildAnomalyDetectionContext.
/**
* Returns an anomaly detection context from the given information.
*
* @param anomalyFunction the anomaly function for anomaly detection.
* @param timeSeries the given time series.
* @param metric the metric name of the given time series.
* @param exploredDimensions the dimension map of the given time series.
* @param windowStart the start of the interval of the time series.
* @param windowEnd the end of the interval of the time series.
*
* @return an anomaly detection context from the given information.
*/
public static AnomalyDetectionContext buildAnomalyDetectionContext(AnomalyDetectionFunction anomalyFunction, MetricTimeSeries timeSeries, String metric, DimensionMap exploredDimensions, int bucketSize, TimeUnit bucketUnit, DateTime windowStart, DateTime windowEnd) {
// Create the anomaly detection context for the new modularized anomaly function
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(AnomalyDetectionUtils.getBucketInMillis(bucketSize, bucketUnit));
anomalyDetectionContext.setAnomalyDetectionFunction(anomalyFunction);
// Construct TimeSeriesKey
TimeSeriesKey timeSeriesKey = new TimeSeriesKey();
timeSeriesKey.setDimensionMap(exploredDimensions);
timeSeriesKey.setMetricName(metric);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
// Split time series to observed time series and baselines for each metric
for (String metricName : anomalyFunction.getSpec().getMetrics()) {
List<Interval> intervals = anomalyFunction.getTimeSeriesIntervals(windowStart.getMillis(), windowEnd.getMillis());
List<TimeSeries> timeSeriesList = BackwardAnomalyFunctionUtils.splitSetsOfTimeSeries(timeSeries, metricName, intervals);
anomalyDetectionContext.setCurrent(metricName, timeSeriesList.get(0));
timeSeriesList.remove(0);
anomalyDetectionContext.setBaselines(metricName, timeSeriesList);
}
return anomalyDetectionContext;
}
use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class AbstractModularizedAnomalyFunction method getTimeSeriesView.
// TODO: Generate time series view using ViewModel
@Override
public AnomalyTimelinesView getTimeSeriesView(MetricTimeSeries timeSeries, long bucketMillis, String metric, long viewWindowStartTime, long viewWindowEndTime, List<MergedAnomalyResultDTO> knownAnomalies) {
AnomalyDetectionContext anomalyDetectionContext = BackwardAnomalyFunctionUtils.buildAnomalyDetectionContext(this, timeSeries, spec.getTopicMetric(), null, spec.getBucketSize(), spec.getBucketUnit(), new DateTime(viewWindowStartTime), new DateTime(viewWindowEndTime));
String mainMetric = anomalyDetectionContext.getAnomalyDetectionFunction().getSpec().getTopicMetric();
this.transformAndPredictTimeSeries(mainMetric, anomalyDetectionContext);
TimeSeries observedTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
TimeSeries expectedTS = ((ExpectedTimeSeriesPredictionModel) anomalyDetectionContext.getTrainedPredictionModel(mainMetric)).getExpectedTimeSeries();
long expectedTSStartTime = expectedTS.getTimeSeriesInterval().getStartMillis();
// Construct AnomalyTimelinesView
AnomalyTimelinesView anomalyTimelinesView = new AnomalyTimelinesView();
int bucketCount = (int) ((viewWindowEndTime - viewWindowStartTime) / bucketMillis);
for (int i = 0; i < bucketCount; ++i) {
long currentBucketMillis = viewWindowStartTime + i * bucketMillis;
long baselineBucketMillis = expectedTSStartTime + i * bucketMillis;
double observedValue = 0d;
if (observedTS.hasTimestamp(currentBucketMillis)) {
observedValue = observedTS.get(currentBucketMillis);
}
double expectedValue = 0d;
if (expectedTS.hasTimestamp(baselineBucketMillis)) {
expectedValue = expectedTS.get(baselineBucketMillis);
}
TimeBucket timebucket = new TimeBucket(currentBucketMillis, currentBucketMillis + bucketMillis, baselineBucketMillis, baselineBucketMillis + bucketMillis);
anomalyTimelinesView.addTimeBuckets(timebucket);
anomalyTimelinesView.addCurrentValues(observedValue);
anomalyTimelinesView.addBaselineValues(expectedValue);
}
return anomalyTimelinesView;
}
use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method analyzeWoW.
@Test(dataProvider = "timeSeriesDataProvider")
public void analyzeWoW(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/w");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "-0.2");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
List<TimeSeries> singleBaseline = new ArrayList<>();
singleBaseline.add(baselines.get(0));
anomalyDetectionContext.setBaselines(mainMetric, singleBaseline);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
compareWoWRawAnomalies(rawAnomalyResults);
// Test data model
List<Interval> expectedDataRanges = new ArrayList<>();
expectedDataRanges.add(new Interval(observedStartTime, observedStartTime + bucketMillis * 5));
expectedDataRanges.add(new Interval(observedStartTime - oneWeekInMillis, observedStartTime + bucketMillis * 5 - oneWeekInMillis));
List<Interval> actualDataRanges = function.getDataModel().getAllDataIntervals(observedStartTime, observedStartTime + bucketMillis * 5);
Assert.assertEquals(actualDataRanges, expectedDataRanges);
}
use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method recomputeMergedAnomalyWeight.
@Test(dataProvider = "timeSeriesDataProvider")
public void recomputeMergedAnomalyWeight(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
// Expected RawAnomalies without smoothing
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 2);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setWeight(0.3d);
rawAnomaly1.setScore(15d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setWeight(0.22727272727272727);
rawAnomaly2.setScore(15d);
expectedRawAnomalies.add(rawAnomaly2);
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
MergedAnomalyResultDTO mergedAnomaly = new MergedAnomalyResultDTO();
mergedAnomaly.setStartTime(expectedRawAnomalies.get(0).getStartTime());
mergedAnomaly.setEndTime(expectedRawAnomalies.get(1).getEndTime());
mergedAnomaly.setAnomalyResults(expectedRawAnomalies);
function.updateMergedAnomalyInfo(anomalyDetectionContext, mergedAnomaly);
// Test weight; weight is the percentage change between the sums of observed values and
// expected values, respectively. Note that expected values are generated by the trained model,
// which takes as input one or many baseline time series.
final long oneWeekInMillis = TimeUnit.DAYS.toMillis(7);
double observedTotal = 0d;
double baselineTotal = 0d;
int bucketCount = 0;
Interval interval = new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime());
TimeSeries observedTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
List<TimeSeries> baselineTSs = anomalyDetectionContext.getTransformedBaselines(mainMetric);
for (long timestamp : observedTS.timestampSet()) {
if (interval.contains(timestamp)) {
++bucketCount;
observedTotal += observedTS.get(timestamp);
for (int i = 0; i < baselineTSs.size(); ++i) {
TimeSeries baselineTS = baselineTSs.get(i);
long baseTimeStamp = timestamp - oneWeekInMillis * (i + 1);
baselineTotal += baselineTS.get(baseTimeStamp);
}
}
}
baselineTotal /= baselineTSs.size();
// Compare anomaly weight, avg. current, avg. baseline, score, etc
double expectedWeight = (observedTotal - baselineTotal) / baselineTotal;
Assert.assertEquals(mergedAnomaly.getWeight(), expectedWeight, EPSILON);
double avgCurrent = observedTotal / bucketCount;
Assert.assertEquals(mergedAnomaly.getAvgCurrentVal(), avgCurrent, EPSILON);
double avgBaseline = baselineTotal / bucketCount;
Assert.assertEquals(mergedAnomaly.getAvgBaselineVal(), avgBaseline, EPSILON);
// Test Score; score is the average of all raw anomalies' score
double expectedScore = 0d;
for (RawAnomalyResultDTO rawAnomaly : expectedRawAnomalies) {
expectedScore += rawAnomaly.getScore();
}
expectedScore /= expectedRawAnomalies.size();
Assert.assertEquals(mergedAnomaly.getScore(), expectedScore, EPSILON);
}
use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method analyzeWo2WAvg.
@Test(dataProvider = "timeSeriesDataProvider")
public void analyzeWo2WAvg(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
// Expected RawAnomalies without smoothing
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 2);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setWeight(0.3d);
rawAnomaly1.setScore(15d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setWeight(0.22727272727272727);
rawAnomaly2.setScore(15d);
expectedRawAnomalies.add(rawAnomaly2);
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
compareWo2WAvgRawAnomalies(rawAnomalyResults);
// Test data model
List<Interval> expectedDataRanges = new ArrayList<>();
expectedDataRanges.add(new Interval(observedStartTime, observedStartTime + bucketMillis * 5));
expectedDataRanges.add(new Interval(observedStartTime - oneWeekInMillis, observedStartTime + bucketMillis * 5 - oneWeekInMillis));
expectedDataRanges.add(new Interval(observedStartTime - oneWeekInMillis * 2, observedStartTime + bucketMillis * 5 - oneWeekInMillis * 2));
List<Interval> actualDataRanges = function.getDataModel().getAllDataIntervals(observedStartTime, observedStartTime + bucketMillis * 5);
Assert.assertEquals(actualDataRanges, expectedDataRanges);
}
Aggregations