use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class SimplePercentageMergeModel method update.
/**
* The weight of the merged anomaly is calculated by this equation:
* weight = (avg. observed value) / (avg. expected value) - 1;
*
* Note that the values of the holes in the time series are not included in the computation.
* Considering the observed and expected time series:
* observed: 1 2 x 4 x 6
* expected: 1 x x 4 5 6
* The values that are included in the computation are those at slots 1, 4, and 6.
*
* @param anomalyDetectionContext the context that provided a trained
* ExpectedTimeSeriesPredictionModel for computing the weight.
* Moreover, the data range of the time series should equals the
* range of anomaly to be updated.
*
* @param anomalyToUpdated the anomaly of which the information is updated.
*/
@Override
public void update(AnomalyDetectionContext anomalyDetectionContext, MergedAnomalyResultDTO anomalyToUpdated) {
String mainMetric = anomalyDetectionContext.getAnomalyDetectionFunction().getSpec().getTopicMetric();
PredictionModel predictionModel = anomalyDetectionContext.getTrainedPredictionModel(mainMetric);
if (!(predictionModel instanceof ExpectedTimeSeriesPredictionModel)) {
LOGGER.error("SimplePercentageMergeModel expects an ExpectedTimeSeriesPredictionModel but the trained model is not one.");
return;
}
ExpectedTimeSeriesPredictionModel expectedTimeSeriesPredictionModel = (ExpectedTimeSeriesPredictionModel) predictionModel;
TimeSeries expectedTimeSeries = expectedTimeSeriesPredictionModel.getExpectedTimeSeries();
long expectedStartTime = expectedTimeSeries.getTimeSeriesInterval().getStartMillis();
TimeSeries observedTimeSeries = anomalyDetectionContext.getTransformedCurrent(mainMetric);
long observedStartTime = observedTimeSeries.getTimeSeriesInterval().getStartMillis();
double avgCurrent = 0d;
double avgBaseline = 0d;
int count = 0;
Interval anomalyInterval = new Interval(anomalyToUpdated.getStartTime(), anomalyToUpdated.getEndTime());
for (long observedTimestamp : observedTimeSeries.timestampSet()) {
if (anomalyInterval.contains(observedTimestamp)) {
long offset = observedTimestamp - observedStartTime;
long expectedTimestamp = expectedStartTime + offset;
if (expectedTimeSeries.hasTimestamp(expectedTimestamp)) {
avgCurrent += observedTimeSeries.get(observedTimestamp);
avgBaseline += expectedTimeSeries.get(expectedTimestamp);
++count;
}
}
}
double weight = 0d;
if (count != 0 && avgBaseline != 0d) {
weight = (avgCurrent - avgBaseline) / avgBaseline;
avgCurrent /= count;
avgBaseline /= count;
} else {
weight = 0d;
}
// Average score of raw anomalies
List<RawAnomalyResultDTO> rawAnomalyResultDTOs = anomalyToUpdated.getAnomalyResults();
double score = 0d;
if (CollectionUtils.isNotEmpty(rawAnomalyResultDTOs)) {
for (RawAnomalyResultDTO rawAnomaly : rawAnomalyResultDTOs) {
score += rawAnomaly.getScore();
}
score /= rawAnomalyResultDTOs.size();
} else {
score = anomalyToUpdated.getScore();
}
anomalyToUpdated.setWeight(weight);
anomalyToUpdated.setScore(score);
anomalyToUpdated.setAvgCurrentVal(avgCurrent);
anomalyToUpdated.setAvgBaselineVal(avgBaseline);
anomalyToUpdated.setMessage(String.format(DEFAULT_MESSAGE_TEMPLATE, weight * 100, avgCurrent, avgBaseline, score));
}
use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class OnboardResource method deleteExistingAnomalies.
/**
* Delete raw or merged anomalies whose start time is located in the given time ranges, except
* the following two cases:
*
* 1. If a raw anomaly belongs to a merged anomaly whose start time is not located in the given
* time ranges, then the raw anomaly will not be deleted.
*
* 2. If a raw anomaly belongs to a merged anomaly whose start time is located in the given
* time ranges, then it is deleted regardless its start time.
*
* If monitoringWindowStartTime is not given, then start time is set to 0.
* If monitoringWindowEndTime is not given, then end time is set to Long.MAX_VALUE.
* @param monitoringWindowStartTime The start time of the monitoring window (in milli-second)
* @param monitoringWindowEndTime The start time of the monitoring window (in milli-second)
*/
@POST
@Path("function/{id}/deleteExistingAnomalies")
public Map<String, Integer> deleteExistingAnomalies(@PathParam("id") String id, @QueryParam("start") long monitoringWindowStartTime, @QueryParam("end") long monitoringWindowEndTime) {
long functionId = Long.valueOf(id);
AnomalyFunctionDTO anomalyFunction = anomalyFunctionDAO.findById(functionId);
if (anomalyFunction == null) {
LOG.info("Anomaly functionId {} is not found", functionId);
return null;
}
HashMap<String, Integer> returnInfo = new HashMap<>();
// Find merged anomaly result and delete them first
LOG.info("Deleting merged anomaly results in the time range: {} -- {}", new DateTime(monitoringWindowStartTime), new DateTime(monitoringWindowEndTime));
LOG.info("Beginning cleanup merged anomaly results of functionId {} collection {} metric {}", functionId, anomalyFunction.getCollection(), anomalyFunction.getMetric());
int mergedAnomaliesDeleted = 0;
List<MergedAnomalyResultDTO> mergedResults = mergedAnomalyResultDAO.findByStartTimeInRangeAndFunctionId(monitoringWindowStartTime, monitoringWindowEndTime, functionId);
if (CollectionUtils.isNotEmpty(mergedResults)) {
mergedAnomaliesDeleted = deleteMergedResults(mergedResults);
}
returnInfo.put("mergedAnomaliesDeleted", mergedAnomaliesDeleted);
LOG.info("{} merged anomaly results have been deleted", mergedAnomaliesDeleted);
// Find raw anomaly results and delete them
LOG.info("Deleting raw anomaly results in the time range: {} -- {}", new DateTime(monitoringWindowStartTime), new DateTime(monitoringWindowEndTime));
LOG.info("Beginning cleanup merged anomaly results of functionId {} collection {} metric {}", functionId, anomalyFunction.getCollection(), anomalyFunction.getMetric());
int rawAnomaliesDeleted = 0;
List<RawAnomalyResultDTO> rawResults = rawAnomalyResultDAO.findAllByTimeAndFunctionId(monitoringWindowStartTime, monitoringWindowEndTime, functionId);
if (CollectionUtils.isNotEmpty(rawResults)) {
rawAnomaliesDeleted = deleteRawResults(rawResults);
}
returnInfo.put("rawAnomaliesDeleted", rawAnomaliesDeleted);
LOG.info("{} raw anomaly results have been deleted", rawAnomaliesDeleted);
return returnInfo;
}
use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class OnboardResource method deleteMergedResults.
// Delete merged anomaly results from mergedAnomalyResultDAO
private int deleteMergedResults(List<MergedAnomalyResultDTO> mergedResults) {
LOG.info("Deleting merged results");
int mergedAnomaliesDeleted = 0;
for (MergedAnomalyResultDTO mergedResult : mergedResults) {
// Delete raw anomalies of the merged anomaly
List<RawAnomalyResultDTO> rawAnomalyResultDTOs = mergedResult.getAnomalyResults();
//deleteRawResults(rawAnomalyResultDTOs);
LOG.info(".....Deleting merged result id {} for functionId {}", mergedResult.getId(), mergedResult.getFunctionId());
mergedAnomalyResultDAO.delete(mergedResult);
mergedAnomaliesDeleted++;
}
return mergedAnomaliesDeleted;
}
use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class AnomalyFunctionResource method analyze.
@POST
@Path("/analyze")
@Consumes(MediaType.APPLICATION_JSON)
public Response analyze(AnomalyFunctionDTO anomalyFunctionSpec, @QueryParam("startTime") Long startTime, @QueryParam("endTime") Long endTime) throws Exception {
// TODO: replace this with Job/Task framework and job tracker page
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
List<Pair<Long, Long>> startEndTimeRanges = anomalyFunction.getDataRangeIntervals(startTime, endTime);
Map<DimensionKey, MetricTimeSeries> dimensionKeyMetricTimeSeriesMap = TimeSeriesUtil.getTimeSeriesForAnomalyDetection(anomalyFunctionSpec, startEndTimeRanges);
List<RawAnomalyResultDTO> anomalyResults = new ArrayList<>();
List<RawAnomalyResultDTO> results = new ArrayList<>();
List<String> collectionDimensions = DAO_REGISTRY.getDatasetConfigDAO().findByDataset(anomalyFunctionSpec.getCollection()).getDimensions();
for (Map.Entry<DimensionKey, MetricTimeSeries> entry : dimensionKeyMetricTimeSeriesMap.entrySet()) {
DimensionKey dimensionKey = entry.getKey();
DimensionMap dimensionMap = DimensionMap.fromDimensionKey(dimensionKey, collectionDimensions);
if (entry.getValue().getTimeWindowSet().size() < 2) {
LOG.warn("Insufficient data for {} to run anomaly detection function", dimensionMap);
continue;
}
try {
// Run algorithm
MetricTimeSeries metricTimeSeries = entry.getValue();
LOG.info("Analyzing anomaly function with dimensionKey: {}, windowStart: {}, windowEnd: {}", dimensionMap, startTime, endTime);
List<RawAnomalyResultDTO> resultsOfAnEntry = anomalyFunction.analyze(dimensionMap, metricTimeSeries, new DateTime(startTime), new DateTime(endTime), new ArrayList<>());
if (resultsOfAnEntry.size() != 0) {
results.addAll(resultsOfAnEntry);
}
LOG.info("{} has {} anomalies in window {} to {}", dimensionMap, resultsOfAnEntry.size(), new DateTime(startTime), new DateTime(endTime));
} catch (Exception e) {
LOG.error("Could not compute for {}", dimensionMap, e);
}
}
if (results.size() > 0) {
List<RawAnomalyResultDTO> validResults = new ArrayList<>();
for (RawAnomalyResultDTO anomaly : results) {
if (!anomaly.isDataMissing()) {
LOG.info("Found anomaly, sev [{}] start [{}] end [{}]", anomaly.getWeight(), new DateTime(anomaly.getStartTime()), new DateTime(anomaly.getEndTime()));
validResults.add(anomaly);
}
}
anomalyResults.addAll(validResults);
}
return Response.ok(anomalyResults).build();
}
use of com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO in project pinot by linkedin.
the class AnomalyResource method updateAnomalyResultFeedback.
@POST
@Path(value = "anomaly-result/feedback/{anomaly_result_id}")
public void updateAnomalyResultFeedback(@PathParam("anomaly_result_id") long anomalyResultId, String payload) {
try {
RawAnomalyResultDTO result = rawAnomalyResultDAO.findById(anomalyResultId);
if (result == null) {
throw new IllegalArgumentException("AnomalyResult not found with id " + anomalyResultId);
}
AnomalyFeedbackDTO feedbackRequest = OBJECT_MAPPER.readValue(payload, AnomalyFeedbackDTO.class);
AnomalyFeedbackDTO feedback = result.getFeedback();
if (feedback == null) {
feedback = new AnomalyFeedbackDTO();
result.setFeedback(feedback);
}
if (feedbackRequest.getStatus() == null) {
feedback.setStatus(FeedbackStatus.NEW);
} else {
feedback.setStatus(feedbackRequest.getStatus());
}
feedback.setComment(feedbackRequest.getComment());
feedback.setFeedbackType(feedbackRequest.getFeedbackType());
rawAnomalyResultDAO.update(result);
} catch (IOException e) {
throw new IllegalArgumentException("Invalid payload " + payload, e);
}
}
Aggregations