use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomaliesResource method getAnomaliesForMetricIdInRange.
// ----------- HELPER FUNCTIONS
/**
* Get anomalies for metric id in a time range
* @param metricId
* @param startTime
* @param endTime
* @return
*/
private List<MergedAnomalyResultDTO> getAnomaliesForMetricIdInRange(Long metricId, Long startTime, Long endTime) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
String dataset = metricConfig.getDataset();
String metric = metricConfig.getName();
List<MergedAnomalyResultDTO> mergedAnomalies = mergedAnomalyResultDAO.findByCollectionMetricTime(dataset, metric, startTime, endTime, false);
try {
mergedAnomalies = AlertFilterHelper.applyFiltrationRule(mergedAnomalies, alertFilterFactory);
} catch (Exception e) {
LOG.warn("Failed to apply alert filters on anomalies for metricid:{}, start:{}, end:{}, exception:{}", metricId, new DateTime(startTime), new DateTime(endTime), e);
}
return mergedAnomalies;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomaliesResource method getAnomalyCountForMetricInRange.
/**
* Get count of anomalies for metric in time range
* @param metricId
* @param startTime
* @param endTime
* @return
*/
@GET
@Path("getAnomalyCount/{metricId}/{startTime}/{endTime}")
public AnomaliesSummary getAnomalyCountForMetricInRange(@PathParam("metricId") Long metricId, @PathParam("startTime") Long startTime, @PathParam("endTime") Long endTime) {
AnomaliesSummary anomaliesSummary = new AnomaliesSummary();
List<MergedAnomalyResultDTO> mergedAnomalies = getAnomaliesForMetricIdInRange(metricId, startTime, endTime);
int resolvedAnomalies = 0;
int unresolvedAnomalies = 0;
for (MergedAnomalyResultDTO mergedAnomaly : mergedAnomalies) {
AnomalyFeedbackDTO anomalyFeedback = mergedAnomaly.getFeedback();
if (anomalyFeedback == null || anomalyFeedback.getFeedbackType() == null) {
unresolvedAnomalies++;
} else if (anomalyFeedback != null && anomalyFeedback.getFeedbackType() != null && anomalyFeedback.getFeedbackType().equals(AnomalyFeedbackType.ANOMALY)) {
unresolvedAnomalies++;
} else {
resolvedAnomalies++;
}
}
anomaliesSummary.setMetricId(metricId);
anomaliesSummary.setStartTime(startTime);
anomaliesSummary.setEndTime(endTime);
anomaliesSummary.setNumAnomalies(mergedAnomalies.size());
anomaliesSummary.setNumAnomaliesResolved(resolvedAnomalies);
anomaliesSummary.setNumAnomaliesUnresolved(unresolvedAnomalies);
return anomaliesSummary;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TestWeekOverWeekRuleFunction method recomputeMergedAnomalyWeight.
@Test(dataProvider = "timeSeriesDataProvider")
public void recomputeMergedAnomalyWeight(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
// Expected RawAnomalies without smoothing
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 2);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setWeight(0.3d);
rawAnomaly1.setScore(15d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setWeight(0.22727272727272727);
rawAnomaly2.setScore(15d);
expectedRawAnomalies.add(rawAnomaly2);
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
// Append properties for anomaly function specific setting
properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(toString(properties));
WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setBaselines(mainMetric, baselines);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
MergedAnomalyResultDTO mergedAnomaly = new MergedAnomalyResultDTO();
mergedAnomaly.setStartTime(expectedRawAnomalies.get(0).getStartTime());
mergedAnomaly.setEndTime(expectedRawAnomalies.get(1).getEndTime());
mergedAnomaly.setAnomalyResults(expectedRawAnomalies);
function.updateMergedAnomalyInfo(anomalyDetectionContext, mergedAnomaly);
// Test weight; weight is the percentage change between the sums of observed values and
// expected values, respectively. Note that expected values are generated by the trained model,
// which takes as input one or many baseline time series.
final long oneWeekInMillis = TimeUnit.DAYS.toMillis(7);
double observedTotal = 0d;
double baselineTotal = 0d;
int bucketCount = 0;
Interval interval = new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime());
TimeSeries observedTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
List<TimeSeries> baselineTSs = anomalyDetectionContext.getTransformedBaselines(mainMetric);
for (long timestamp : observedTS.timestampSet()) {
if (interval.contains(timestamp)) {
++bucketCount;
observedTotal += observedTS.get(timestamp);
for (int i = 0; i < baselineTSs.size(); ++i) {
TimeSeries baselineTS = baselineTSs.get(i);
long baseTimeStamp = timestamp - oneWeekInMillis * (i + 1);
baselineTotal += baselineTS.get(baseTimeStamp);
}
}
}
baselineTotal /= baselineTSs.size();
// Compare anomaly weight, avg. current, avg. baseline, score, etc
double expectedWeight = (observedTotal - baselineTotal) / baselineTotal;
Assert.assertEquals(mergedAnomaly.getWeight(), expectedWeight, EPSILON);
double avgCurrent = observedTotal / bucketCount;
Assert.assertEquals(mergedAnomaly.getAvgCurrentVal(), avgCurrent, EPSILON);
double avgBaseline = baselineTotal / bucketCount;
Assert.assertEquals(mergedAnomaly.getAvgBaselineVal(), avgBaseline, EPSILON);
// Test Score; score is the average of all raw anomalies' score
double expectedScore = 0d;
for (RawAnomalyResultDTO rawAnomaly : expectedRawAnomalies) {
expectedScore += rawAnomaly.getScore();
}
expectedScore /= expectedRawAnomalies.size();
Assert.assertEquals(mergedAnomaly.getScore(), expectedScore, EPSILON);
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AlertFilterEvaluationUtil method updatePrecisionAndRecall.
/**
* Evaluate alert filter given merged anomalies and output precision and recall
* @param mergedAnomalyResultDTOS
* @throws Exception
*/
public void updatePrecisionAndRecall(List<MergedAnomalyResultDTO> mergedAnomalyResultDTOS) throws Exception {
int TP = 0;
int FP = 0;
int FN = 0;
for (MergedAnomalyResultDTO anomaly : mergedAnomalyResultDTOS) {
boolean predLabel = alertFilter.isQualified(anomaly);
AnomalyFeedbackDTO feedback = anomaly.getFeedback();
boolean label = !(feedback == null || feedback.getFeedbackType() == AnomalyFeedbackType.NOT_ANOMALY);
//predicted true
if (predLabel) {
if (label) {
TP++;
} else {
FP++;
}
} else if (label) {
// else if predicted false but label is true
FN++;
}
}
if (TP + FN == 0) {
throw new Exception("No true labels in dataset. Check data");
}
if (TP + FP == 0) {
throw new Exception("No predicted true labels. Check model input");
}
this.precision = 1.000 * TP / (TP + FP);
this.recall = 1.000 * TP / (TP + FN);
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AlertFilterEvaluationUtil method updateFeedbackSummary.
/**
* Provide feedback summary give a list of merged anomalies
* @param anomalies
*/
public void updateFeedbackSummary(List<MergedAnomalyResultDTO> anomalies) {
int totalAnomalies = 0;
int totalResponses = 0;
int trueAnomalies = 0;
int falseAlarm = 0;
int nonActionable = 0;
for (MergedAnomalyResultDTO anomaly : anomalies) {
totalAnomalies++;
// evaluate feedbacks
AnomalyFeedbackDTO feedback = anomaly.getFeedback();
if (feedback != null) {
totalResponses++;
AnomalyFeedbackType feedbackType = feedback.getFeedbackType();
switch(feedbackType) {
case ANOMALY:
trueAnomalies++;
break;
case ANOMALY_NO_ACTION:
nonActionable++;
break;
case NOT_ANOMALY:
falseAlarm++;
break;
}
}
}
this.totalAnomalies = totalAnomalies;
this.totalResponses = totalResponses;
this.trueAnomalies = trueAnomalies;
this.falseAlarm = falseAlarm;
this.nonActionable = nonActionable;
}
Aggregations