use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class TestMinMaxThresholdFunction method recomputeMergedAnomalyWeight.
@Test(dataProvider = "timeSeriesDataProvider")
public void recomputeMergedAnomalyWeight(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
properties.put(MinMaxThresholdDetectionModel.MAX_VAL, "20");
properties.put(MinMaxThresholdDetectionModel.MIN_VAL, "12");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(TestWeekOverWeekRuleFunction.toString(properties));
AnomalyDetectionFunction function = new MinMaxThresholdFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly1.setWeight(0.1d);
rawAnomaly1.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 5);
rawAnomaly2.setWeight(-0.33333d);
rawAnomaly2.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly2);
MergedAnomalyResultDTO mergedAnomaly = new MergedAnomalyResultDTO();
mergedAnomaly.setStartTime(expectedRawAnomalies.get(0).getStartTime());
mergedAnomaly.setEndTime(expectedRawAnomalies.get(1).getEndTime());
mergedAnomaly.setAnomalyResults(expectedRawAnomalies);
function.updateMergedAnomalyInfo(anomalyDetectionContext, mergedAnomaly);
double currentTotal = 0d;
double deviationFromThreshold = 0d;
Interval interval = new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime());
TimeSeries currentTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
for (long timestamp : currentTS.timestampSet()) {
if (interval.contains(timestamp)) {
double value = currentTS.get(timestamp);
currentTotal += value;
deviationFromThreshold += computeDeviationFromMinMax(value, 12d, 20d);
}
}
double score = currentTotal / 2d;
double weight = deviationFromThreshold / 2d;
Assert.assertEquals(mergedAnomaly.getScore(), score, EPSILON);
Assert.assertEquals(mergedAnomaly.getAvgCurrentVal(), score, EPSILON);
Assert.assertEquals(mergedAnomaly.getWeight(), weight, EPSILON);
}
use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method analyzeWo2WAvgSmoothedTimeSeries.
@Test(dataProvider = "timeSeriesDataProvider")
public void analyzeWo2WAvgSmoothedTimeSeries(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
properties.put(WeekOverWeekRuleFunction.ENABLE_SMOOTHING, "true");
properties.put(MovingAverageSmoothingFunction.MOVING_AVERAGE_SMOOTHING_WINDOW_SIZE, "3");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
// The transformed observed time series is resized from 5 to 3 due to moving average algorithm
Assert.assertEquals(anomalyDetectionContext.getTransformedCurrent(mainMetric).size(), 3);
// No anomalies after smoothing the time series
Assert.assertEquals(rawAnomalyResults.size(), 0);
}
use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method compareWo2WAvgRawAnomalies.
private void compareWo2WAvgRawAnomalies(List<RawAnomalyResultDTO> actualAnomalyResults) {
// Expecting the same anomaly result from analyzeWo2WAvg
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 2);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setWeight(0.3d);
rawAnomaly1.setScore(15d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setWeight(0.22727272727272727);
rawAnomaly2.setScore(15d);
expectedRawAnomalies.add(rawAnomaly2);
compareActualAndExpectedRawAnomalies(actualAnomalyResults, expectedRawAnomalies);
}
use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class AnomalyGraphGenerator method getAnomalyIntervals.
/**
* Merges overlapping anomalies and creates JFreeChart Markers for each merged point or interval.
*/
private List<Marker> getAnomalyIntervals(Map<RawAnomalyResultDTO, String> anomaliesWithLabels) {
TreeMap<RawAnomalyResultDTO, String> chronologicalAnomaliesWithLabels = new TreeMap<RawAnomalyResultDTO, String>(new Comparator<RawAnomalyResultDTO>() {
@Override
public int compare(RawAnomalyResultDTO o1, RawAnomalyResultDTO o2) {
int diff = Long.compare(o1.getStartTime(), o2.getStartTime());
if (diff == 0) {
diff = o1.compareTo(o2);
}
return diff;
}
});
chronologicalAnomaliesWithLabels.putAll(anomaliesWithLabels);
Long intervalStart = null;
Long intervalEnd = null;
// StringBuilder labelBuilder = new StringBuilder();
List<Marker> anomalyMarkers = new ArrayList<>();
for (Entry<RawAnomalyResultDTO, String> entry : chronologicalAnomaliesWithLabels.entrySet()) {
RawAnomalyResultDTO anomalyResult = entry.getKey();
// String label = entry.getValue();
Long anomalyStart = anomalyResult.getStartTime();
Long anomalyEnd = anomalyResult.getEndTime();
anomalyEnd = anomalyEnd == null ? anomalyStart : anomalyEnd;
if (intervalStart == null || anomalyStart > intervalEnd) {
// initialization of intervals
if (intervalStart != null) {
// create a new marker if this isn't the first element/initialization
// ,
Marker anomalyMarker = createGraphMarker(intervalStart, intervalEnd, null);
// labelBuilder.toString());
// labelBuilder.setLength(0);
anomalyMarkers.add(anomalyMarker);
}
intervalStart = anomalyStart;
intervalEnd = anomalyEnd;
} else {
intervalEnd = Math.max(intervalEnd, anomalyEnd);
}
// if (labelBuilder.length() > 0) {
// labelBuilder.append(",");
// }
// labelBuilder.append(label);
}
// add the last marker
if (intervalStart != null) {
// labelBuilder.toString());
Marker anomalyMarker = createGraphMarker(intervalStart, intervalEnd, null);
anomalyMarkers.add(anomalyMarker);
}
// }
return anomalyMarkers;
}
use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class RatioOutlierFunction method analyze.
@Override
public List<RawAnomalyResultDTO> analyze(DimensionMap exploredDimensions, MetricTimeSeries timeSeries, DateTime windowStart, DateTime windowEnd, List<MergedAnomalyResultDTO> knownAnomalies) throws Exception {
List<RawAnomalyResultDTO> anomalyResults = new ArrayList<>();
// Parse function properties
Properties props = getProperties();
// Get min / max props
Double min = null;
if (props.containsKey(MIN_VAL)) {
min = Double.valueOf(props.getProperty(MIN_VAL));
}
Double max = null;
if (props.containsKey(MAX_VAL)) {
max = Double.valueOf(props.getProperty(MAX_VAL));
}
// Metric
String topicMetric = getSpec().getTopicMetric();
// This function only detects anomalies on one metric, i.e., metrics[0]
assert (getSpec().getMetrics().size() == 2);
LOG.info("Testing ratios {} for outliers", String.join(", ", getSpec().getMetrics()));
// Compute the bucket size, so we can iterate in those steps
long bucketMillis = TimeUnit.MILLISECONDS.convert(getSpec().getBucketSize(), getSpec().getBucketUnit());
long numBuckets = (windowEnd.getMillis() - windowStart.getMillis()) / bucketMillis;
Map<String, Double> averages = new HashMap<String, Double>();
for (String m : getSpec().getMetrics()) {
// Compute the weight of this time series (average across whole)
double averageValue = 0;
for (Long time : timeSeries.getTimeWindowSet()) {
averageValue += timeSeries.get(time, m).doubleValue();
}
// avg value of this time series
averageValue /= numBuckets;
averages.put(m, averageValue);
}
String m_a = getSpec().getMetrics().get(0);
String m_b = getSpec().getMetrics().get(1);
for (Long timeBucket : timeSeries.getTimeWindowSet()) {
double value_a = timeSeries.get(timeBucket, m_a).doubleValue();
double value_b = timeSeries.get(timeBucket, m_b).doubleValue();
if (value_b == 0.0d)
continue;
double ratio = value_a / value_b;
double deviationFromThreshold = getDeviationFromThreshold(ratio, min, max);
LOG.info("{}={}, {}={}, ratio={}, min={}, max={}, deviation={}", m_a, value_a, m_b, value_b, ratio, min, max, deviationFromThreshold);
if (deviationFromThreshold != 0.0) {
RawAnomalyResultDTO anomalyResult = new RawAnomalyResultDTO();
anomalyResult.setProperties(getSpec().getProperties());
anomalyResult.setStartTime(timeBucket);
// point-in-time
anomalyResult.setEndTime(timeBucket + bucketMillis);
anomalyResult.setDimensions(exploredDimensions);
anomalyResult.setScore(ratio);
// higher change, higher the severity
anomalyResult.setWeight(Math.abs(deviationFromThreshold));
String message = String.format(DEFAULT_MESSAGE_TEMPLATE, deviationFromThreshold, ratio, min, max);
anomalyResult.setMessage(message);
anomalyResults.add(anomalyResult);
}
}
return anomalyResults;
}
Aggregations