use of com.linkedin.thirdeye.anomalydetection.context.TimeSeries in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method analyzeWoW.
@Test(dataProvider = "timeSeriesDataProvider")
public void analyzeWoW(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/w");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "-0.2");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
List<TimeSeries> singleBaseline = new ArrayList<>();
singleBaseline.add(baselines.get(0));
anomalyDetectionContext.setBaselines(mainMetric, singleBaseline);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
compareWoWRawAnomalies(rawAnomalyResults);
// Test data model
List<Interval> expectedDataRanges = new ArrayList<>();
expectedDataRanges.add(new Interval(observedStartTime, observedStartTime + bucketMillis * 5));
expectedDataRanges.add(new Interval(observedStartTime - oneWeekInMillis, observedStartTime + bucketMillis * 5 - oneWeekInMillis));
List<Interval> actualDataRanges = function.getDataModel().getAllDataIntervals(observedStartTime, observedStartTime + bucketMillis * 5);
Assert.assertEquals(actualDataRanges, expectedDataRanges);
}
use of com.linkedin.thirdeye.anomalydetection.context.TimeSeries in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method recomputeMergedAnomalyWeight.
@Test(dataProvider = "timeSeriesDataProvider")
public void recomputeMergedAnomalyWeight(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
// Expected RawAnomalies without smoothing
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 2);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setWeight(0.3d);
rawAnomaly1.setScore(15d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setWeight(0.22727272727272727);
rawAnomaly2.setScore(15d);
expectedRawAnomalies.add(rawAnomaly2);
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
MergedAnomalyResultDTO mergedAnomaly = new MergedAnomalyResultDTO();
mergedAnomaly.setStartTime(expectedRawAnomalies.get(0).getStartTime());
mergedAnomaly.setEndTime(expectedRawAnomalies.get(1).getEndTime());
mergedAnomaly.setAnomalyResults(expectedRawAnomalies);
function.updateMergedAnomalyInfo(anomalyDetectionContext, mergedAnomaly);
// Test weight; weight is the percentage change between the sums of observed values and
// expected values, respectively. Note that expected values are generated by the trained model,
// which takes as input one or many baseline time series.
final long oneWeekInMillis = TimeUnit.DAYS.toMillis(7);
double observedTotal = 0d;
double baselineTotal = 0d;
int bucketCount = 0;
Interval interval = new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime());
TimeSeries observedTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
List<TimeSeries> baselineTSs = anomalyDetectionContext.getTransformedBaselines(mainMetric);
for (long timestamp : observedTS.timestampSet()) {
if (interval.contains(timestamp)) {
++bucketCount;
observedTotal += observedTS.get(timestamp);
for (int i = 0; i < baselineTSs.size(); ++i) {
TimeSeries baselineTS = baselineTSs.get(i);
long baseTimeStamp = timestamp - oneWeekInMillis * (i + 1);
baselineTotal += baselineTS.get(baseTimeStamp);
}
}
}
baselineTotal /= baselineTSs.size();
// Compare anomaly weight, avg. current, avg. baseline, score, etc
double expectedWeight = (observedTotal - baselineTotal) / baselineTotal;
Assert.assertEquals(mergedAnomaly.getWeight(), expectedWeight, EPSILON);
double avgCurrent = observedTotal / bucketCount;
Assert.assertEquals(mergedAnomaly.getAvgCurrentVal(), avgCurrent, EPSILON);
double avgBaseline = baselineTotal / bucketCount;
Assert.assertEquals(mergedAnomaly.getAvgBaselineVal(), avgBaseline, EPSILON);
// Test Score; score is the average of all raw anomalies' score
double expectedScore = 0d;
for (RawAnomalyResultDTO rawAnomaly : expectedRawAnomalies) {
expectedScore += rawAnomaly.getScore();
}
expectedScore /= expectedRawAnomalies.size();
Assert.assertEquals(mergedAnomaly.getScore(), expectedScore, EPSILON);
}
use of com.linkedin.thirdeye.anomalydetection.context.TimeSeries in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method timeSeriesDataProvider.
@DataProvider(name = "timeSeriesDataProvider")
public Object[][] timeSeriesDataProvider() {
// The properties for the testing time series
Properties properties = new Properties();
long bucketSizeInMS = TimeUnit.SECONDS.toMillis(1);
// Set up time series key for the testing time series
TimeSeriesKey timeSeriesKey = new TimeSeriesKey();
String metric = mainMetric;
timeSeriesKey.setMetricName(metric);
DimensionMap dimensionMap = new DimensionMap();
dimensionMap.put("dimensionName1", "dimensionValue1");
dimensionMap.put("dimensionName2", "dimensionValue2");
timeSeriesKey.setDimensionMap(dimensionMap);
TimeSeries observedTimeSeries = new TimeSeries();
{
observedTimeSeries.set(observedStartTime, 10d);
observedTimeSeries.set(observedStartTime + bucketMillis, 15d);
observedTimeSeries.set(observedStartTime + bucketMillis * 2, 13d);
observedTimeSeries.set(observedStartTime + bucketMillis * 3, 27d);
observedTimeSeries.set(observedStartTime + bucketMillis * 4, 10d);
Interval observedTimeSeriesInterval = new Interval(observedStartTime, observedStartTime + bucketMillis * 5);
observedTimeSeries.setTimeSeriesInterval(observedTimeSeriesInterval);
}
List<TimeSeries> baselines = new ArrayList<>();
TimeSeries baseline1TimeSeries = new TimeSeries();
{
baseline1TimeSeries.set(baseline1StartTime, 10d);
baseline1TimeSeries.set(baseline1StartTime + bucketMillis, 20d);
baseline1TimeSeries.set(baseline1StartTime + bucketMillis * 2, 15d);
baseline1TimeSeries.set(baseline1StartTime + bucketMillis * 3, 24d);
baseline1TimeSeries.set(baseline1StartTime + bucketMillis * 4, 14d);
Interval baseline1Interval = new Interval(baseline1StartTime, baseline1StartTime + bucketMillis * 5);
baseline1TimeSeries.setTimeSeriesInterval(baseline1Interval);
}
baselines.add(baseline1TimeSeries);
TimeSeries baseline2TimeSeries = new TimeSeries();
{
baseline2TimeSeries.set(baseline2StartTime, 10d);
baseline2TimeSeries.set(baseline2StartTime + bucketMillis, 10d);
baseline2TimeSeries.set(baseline2StartTime + bucketMillis * 2, 5d);
baseline2TimeSeries.set(baseline2StartTime + bucketMillis * 3, 20d);
baseline2TimeSeries.set(baseline2StartTime + bucketMillis * 4, 10d);
Interval baseline2Interval = new Interval(baseline2StartTime, baseline2StartTime + bucketMillis * 5);
baseline2TimeSeries.setTimeSeriesInterval(baseline2Interval);
}
baselines.add(baseline2TimeSeries);
return new Object[][] { { properties, timeSeriesKey, bucketSizeInMS, observedTimeSeries, baselines } };
}
use of com.linkedin.thirdeye.anomalydetection.context.TimeSeries in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method testTotalCountThresholdFunction.
@Test(dataProvider = "timeSeriesDataProvider")
public void testTotalCountThresholdFunction(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
String totalCountTimeSeriesName = "totalCount";
TimeSeries totalCountTimeSeries = new TimeSeries();
{
totalCountTimeSeries.set(observedStartTime, 10d);
totalCountTimeSeries.set(observedStartTime + bucketMillis, 10d);
totalCountTimeSeries.set(observedStartTime + bucketMillis * 2, 10d);
totalCountTimeSeries.set(observedStartTime + bucketMillis * 3, 10d);
totalCountTimeSeries.set(observedStartTime + bucketMillis * 4, 10d);
Interval totalCountTimeSeriesInterval = new Interval(observedStartTime, observedStartTime + bucketMillis * 5);
totalCountTimeSeries.setTimeSeriesInterval(totalCountTimeSeriesInterval);
}
properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_METRIC_NAME, totalCountTimeSeriesName);
properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_THRESHOLD, "51");
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
// Create anomalyDetectionContext using anomaly function spec
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
anomalyDetectionContext.setCurrent(totalCountTimeSeriesName, totalCountTimeSeries);
List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
// No anomalies after smoothing the time series
Assert.assertEquals(rawAnomalyResults.size(), 0);
// Test disabled total count by lowering the threshold
anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_THRESHOLD, "0");
// Create anomaly function spec
functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
anomalyDetectionContext.setCurrent(totalCountTimeSeriesName, totalCountTimeSeries);
rawAnomalyResults = function.analyze(anomalyDetectionContext);
compareWo2WAvgRawAnomalies(rawAnomalyResults);
}
use of com.linkedin.thirdeye.anomalydetection.context.TimeSeries in project pinot by linkedin.
the class MinMaxThresholdMergeModel method update.
@Override
public void update(AnomalyDetectionContext anomalyDetectionContext, MergedAnomalyResultDTO anomalyToUpdated) {
// Get min / max props
Properties props = getProperties();
Double min = null;
if (props.containsKey(MIN_VAL)) {
min = Double.valueOf(props.getProperty(MIN_VAL));
}
Double max = null;
if (props.containsKey(MAX_VAL)) {
max = Double.valueOf(props.getProperty(MAX_VAL));
}
String metricName = anomalyDetectionContext.getAnomalyDetectionFunction().getSpec().getTopicMetric();
TimeSeries timeSeries = anomalyDetectionContext.getTransformedCurrent(metricName);
Interval timeSeriesInterval = timeSeries.getTimeSeriesInterval();
long windowStartInMillis = timeSeriesInterval.getStartMillis();
long windowEndInMillis = timeSeriesInterval.getEndMillis();
double currentAverageValue = 0d;
int currentBucketCount = 0;
double deviationFromThreshold = 0d;
long anomalyStartTime = anomalyToUpdated.getStartTime();
long anomalyEndTime = anomalyToUpdated.getEndTime();
Interval anomalyInterval = new Interval(anomalyStartTime, anomalyEndTime);
for (long time : timeSeries.timestampSet()) {
if (anomalyInterval.contains(time)) {
double value = timeSeries.get(time);
if (value != 0d) {
if (windowStartInMillis <= time && time <= windowEndInMillis) {
currentAverageValue += value;
++currentBucketCount;
deviationFromThreshold += MinMaxThresholdDetectionModel.getDeviationFromThreshold(value, min, max);
}
// else ignore unknown time key
}
}
}
if (currentBucketCount != 0d) {
currentAverageValue /= currentBucketCount;
deviationFromThreshold /= currentBucketCount;
}
anomalyToUpdated.setScore(currentAverageValue);
anomalyToUpdated.setWeight(deviationFromThreshold);
anomalyToUpdated.setAvgCurrentVal(currentAverageValue);
String message = String.format(DEFAULT_MESSAGE_TEMPLATE, deviationFromThreshold, currentAverageValue, min, max);
anomalyToUpdated.setMessage(message);
}
Aggregations