use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method testTotalCountThresholdFunction.
@Test(dataProvider = "timeSeriesDataProvider")
public void testTotalCountThresholdFunction(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
String totalCountTimeSeriesName = "totalCount";
TimeSeries totalCountTimeSeries = new TimeSeries();
{
totalCountTimeSeries.set(observedStartTime, 10d);
totalCountTimeSeries.set(observedStartTime + bucketMillis, 10d);
totalCountTimeSeries.set(observedStartTime + bucketMillis * 2, 10d);
totalCountTimeSeries.set(observedStartTime + bucketMillis * 3, 10d);
totalCountTimeSeries.set(observedStartTime + bucketMillis * 4, 10d);
Interval totalCountTimeSeriesInterval = new Interval(observedStartTime, observedStartTime + bucketMillis * 5);
totalCountTimeSeries.setTimeSeriesInterval(totalCountTimeSeriesInterval);
}
properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_METRIC_NAME, totalCountTimeSeriesName);
properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_THRESHOLD, "51");
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
// Create anomalyDetectionContext using anomaly function spec
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
anomalyDetectionContext.setCurrent(totalCountTimeSeriesName, totalCountTimeSeries);
List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
// No anomalies after smoothing the time series
Assert.assertEquals(rawAnomalyResults.size(), 0);
// Test disabled total count by lowering the threshold
anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_THRESHOLD, "0");
// Create anomaly function spec
functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
anomalyDetectionContext.setCurrent(totalCountTimeSeriesName, totalCountTimeSeries);
rawAnomalyResults = function.analyze(anomalyDetectionContext);
compareWo2WAvgRawAnomalies(rawAnomalyResults);
}
use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class AbstractModularizedAnomalyFunction method updateMergedAnomalyInfo.
@Override
public void updateMergedAnomalyInfo(MergedAnomalyResultDTO anomalyToUpdated, MetricTimeSeries timeSeries, DateTime windowStart, DateTime windowEnd, List<MergedAnomalyResultDTO> knownAnomalies) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = null;
if (!(getMergeModel() instanceof NoPredictionMergeModel)) {
anomalyDetectionContext = BackwardAnomalyFunctionUtils.buildAnomalyDetectionContext(this, timeSeries, spec.getTopicMetric(), anomalyToUpdated.getDimensions(), spec.getBucketSize(), spec.getBucketUnit(), windowStart, windowEnd);
}
updateMergedAnomalyInfo(anomalyDetectionContext, anomalyToUpdated);
}
use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class TestMinMaxThresholdFunction method analyze.
@Test(dataProvider = "timeSeriesDataProvider")
public void analyze(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
properties.put(MinMaxThresholdDetectionModel.MAX_VAL, "20");
properties.put(MinMaxThresholdDetectionModel.MIN_VAL, "12");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(TestWeekOverWeekRuleFunction.toString(properties));
AnomalyDetectionFunction function = new MinMaxThresholdFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> actualAnomalyResults = function.analyze(anomalyDetectionContext);
// Expected RawAnomalies of WoW without smoothing
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis);
rawAnomaly1.setWeight(-0.166666d);
rawAnomaly1.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setWeight(0.1d);
rawAnomaly2.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly2);
RawAnomalyResultDTO rawAnomaly3 = new RawAnomalyResultDTO();
rawAnomaly3.setStartTime(observedStartTime + bucketMillis * 4);
rawAnomaly3.setEndTime(observedStartTime + bucketMillis * 5);
rawAnomaly3.setWeight(-0.33333d);
rawAnomaly3.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly3);
Assert.assertEquals(actualAnomalyResults.size(), expectedRawAnomalies.size());
for (int i = 0; i < actualAnomalyResults.size(); ++i) {
RawAnomalyResultDTO actualAnomaly = actualAnomalyResults.get(i);
RawAnomalyResultDTO expectedAnomaly = actualAnomalyResults.get(i);
Assert.assertEquals(actualAnomaly.getWeight(), expectedAnomaly.getWeight(), EPSILON);
Assert.assertEquals(actualAnomaly.getScore(), expectedAnomaly.getScore(), EPSILON);
}
// Test getTimeSeriesIntervals
List<Interval> expectedDataRanges = new ArrayList<>();
expectedDataRanges.add(new Interval(observedStartTime, observedStartTime + bucketMillis * 5));
List<Interval> actualDataRanges = function.getTimeSeriesIntervals(observedStartTime, observedStartTime + bucketMillis * 5);
Assert.assertEquals(actualDataRanges, expectedDataRanges);
}
use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class TestMinMaxThresholdFunction method recomputeMergedAnomalyWeight.
@Test(dataProvider = "timeSeriesDataProvider")
public void recomputeMergedAnomalyWeight(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
properties.put(MinMaxThresholdDetectionModel.MAX_VAL, "20");
properties.put(MinMaxThresholdDetectionModel.MIN_VAL, "12");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(TestWeekOverWeekRuleFunction.toString(properties));
AnomalyDetectionFunction function = new MinMaxThresholdFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly1.setWeight(0.1d);
rawAnomaly1.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 5);
rawAnomaly2.setWeight(-0.33333d);
rawAnomaly2.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly2);
MergedAnomalyResultDTO mergedAnomaly = new MergedAnomalyResultDTO();
mergedAnomaly.setStartTime(expectedRawAnomalies.get(0).getStartTime());
mergedAnomaly.setEndTime(expectedRawAnomalies.get(1).getEndTime());
mergedAnomaly.setAnomalyResults(expectedRawAnomalies);
function.updateMergedAnomalyInfo(anomalyDetectionContext, mergedAnomaly);
double currentTotal = 0d;
double deviationFromThreshold = 0d;
Interval interval = new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime());
TimeSeries currentTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
for (long timestamp : currentTS.timestampSet()) {
if (interval.contains(timestamp)) {
double value = currentTS.get(timestamp);
currentTotal += value;
deviationFromThreshold += computeDeviationFromMinMax(value, 12d, 20d);
}
}
double score = currentTotal / 2d;
double weight = deviationFromThreshold / 2d;
Assert.assertEquals(mergedAnomaly.getScore(), score, EPSILON);
Assert.assertEquals(mergedAnomaly.getAvgCurrentVal(), score, EPSILON);
Assert.assertEquals(mergedAnomaly.getWeight(), weight, EPSILON);
}
use of com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method analyzeWo2WAvgSmoothedTimeSeries.
@Test(dataProvider = "timeSeriesDataProvider")
public void analyzeWo2WAvgSmoothedTimeSeries(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
properties.put(WeekOverWeekRuleFunction.ENABLE_SMOOTHING, "true");
properties.put(MovingAverageSmoothingFunction.MOVING_AVERAGE_SMOOTHING_WINDOW_SIZE, "3");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
// The transformed observed time series is resized from 5 to 3 due to moving average algorithm
Assert.assertEquals(anomalyDetectionContext.getTransformedCurrent(mainMetric).size(), 3);
// No anomalies after smoothing the time series
Assert.assertEquals(rawAnomalyResults.size(), 0);
}
Aggregations