use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class DetectionJobResource method tuneAlertFilter.
/**
*
* @param id anomaly function id
* @param startTime start time of anomalies to tune alert filter
* @param endTime end time of anomalies to tune alert filter
* @param autoTuneType the type of auto tune to invoke (default is "AUTOTUNE")
* @return HTTP response of request: string of alert filter
*/
@POST
@Path("/autotune/filter/{functionId}")
public Response tuneAlertFilter(@PathParam("functionId") long id, @QueryParam("startTime") long startTime, @QueryParam("endTime") long endTime, @QueryParam("autoTuneType") String autoTuneType) {
// get anomalies by function id, start time and end time
AnomalyFunctionDTO anomalyFunctionSpec = DAO_REGISTRY.getAnomalyFunctionDAO().findById(id);
AnomalyFunctionManager anomalyFunctionDAO = DAO_REGISTRY.getAnomalyFunctionDAO();
MergedAnomalyResultManager anomalyMergedResultDAO = DAO_REGISTRY.getMergedAnomalyResultDAO();
List<MergedAnomalyResultDTO> anomalyResultDTOS = anomalyMergedResultDAO.findByStartTimeInRangeAndFunctionId(startTime, endTime, id);
// create alert filter and evaluator
AlertFilter alertFilter = alertFilterFactory.fromSpec(anomalyFunctionSpec.getAlertFilter());
AlertFilterEvaluationUtil evaluator = new AlertFilterEvaluationUtil(alertFilter);
// create alert filter auto tune
AlertFilterAutoTune alertFilterAutotune = alertFilterAutotuneFactory.fromSpec(autoTuneType);
LOG.info("initiated alertFilterAutoTune of Type {}", alertFilterAutotune.getClass().toString());
try {
//evaluate current alert filter (calculate current precision and recall)
evaluator.updatePrecisionAndRecall(anomalyResultDTOS);
LOG.info("AlertFilter of Type {}, has been evaluated with precision: {}, recall: {}", alertFilter.getClass().toString(), evaluator.getPrecision(), evaluator.getRecall());
// get tuned alert filter
Map<String, String> tunedAlertFilter = alertFilterAutotune.tuneAlertFilter(anomalyResultDTOS, evaluator.getPrecision(), evaluator.getRecall());
LOG.info("tuned AlertFilter");
// otherwise do nothing and return alert filter
if (alertFilterAutotune.isUpdated()) {
anomalyFunctionSpec.setAlertFilter(tunedAlertFilter);
anomalyFunctionDAO.update(anomalyFunctionSpec);
LOG.info("Model has been updated");
} else {
LOG.info("Model hasn't been updated because tuned model cannot beat original model");
}
} catch (Exception e) {
LOG.warn("AutoTune throws exception due to: {}", e.getMessage());
}
return Response.ok(alertFilterAutotune.isUpdated()).build();
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class DetectionTaskRunner method runTask.
private void runTask(DateTime windowStart, DateTime windowEnd) throws JobExecutionException, ExecutionException {
LOG.info("Running anomaly detection for time range {} to {}", windowStart, windowEnd);
// TODO: Change to DataFetchers/DataSources
AnomalyDetectionInputContext adContext = fetchData(windowStart, windowEnd);
ListMultimap<DimensionMap, RawAnomalyResultDTO> resultRawAnomalies = dimensionalShuffleAndUnifyAnalyze(windowStart, windowEnd, adContext);
detectionTaskSuccessCounter.inc();
boolean isBackfill = false;
// If the current job is a backfill (adhoc) detection job, set notified flag to true so the merged anomalies do not
// induce alerts and emails.
String jobName = DAO_REGISTRY.getJobDAO().getJobNameByJobId(jobExecutionId);
if (jobName != null && jobName.toLowerCase().startsWith(BACKFILL_PREFIX)) {
isBackfill = true;
}
// Update merged anomalies
TimeBasedAnomalyMerger timeBasedAnomalyMerger = new TimeBasedAnomalyMerger(anomalyFunctionFactory);
ListMultimap<DimensionMap, MergedAnomalyResultDTO> resultMergedAnomalies = timeBasedAnomalyMerger.mergeAnomalies(anomalyFunctionSpec, resultRawAnomalies, isBackfill);
detectionTaskSuccessCounter.inc();
// TODO: Change to DataSink
AnomalyDetectionOutputContext adOutputContext = new AnomalyDetectionOutputContext();
adOutputContext.setRawAnomalies(resultRawAnomalies);
adOutputContext.setMergedAnomalies(resultMergedAnomalies);
storeData(adOutputContext);
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class DetectionTaskRunner method storeData.
private void storeData(AnomalyDetectionOutputContext anomalyDetectionOutputContext) {
RawAnomalyResultManager rawAnomalyDAO = DAO_REGISTRY.getRawAnomalyResultDAO();
MergedAnomalyResultManager mergedAmomalyDAO = DAO_REGISTRY.getMergedAnomalyResultDAO();
for (RawAnomalyResultDTO rawAnomalyResultDTO : anomalyDetectionOutputContext.getRawAnomalies().values()) {
rawAnomalyDAO.save(rawAnomalyResultDTO);
}
for (MergedAnomalyResultDTO mergedAnomalyResultDTO : anomalyDetectionOutputContext.getMergedAnomalies().values()) {
mergedAmomalyDAO.update(mergedAnomalyResultDTO);
}
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class DetectionTaskRunner method runAnalyze.
private List<RawAnomalyResultDTO> runAnalyze(DateTime windowStart, DateTime windowEnd, AnomalyDetectionInputContext anomalyDetectionInputContext, DimensionMap dimensionMap) {
String metricName = anomalyFunction.getSpec().getTopicMetric();
MetricTimeSeries metricTimeSeries = anomalyDetectionInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensionMap);
// Get current entry's knownMergedAnomalies, which should have the same explored dimensions
List<MergedAnomalyResultDTO> knownMergedAnomaliesOfAnEntry = anomalyDetectionInputContext.getKnownMergedAnomalies().get(dimensionMap);
List<MergedAnomalyResultDTO> historyMergedAnomalies;
if (anomalyFunction.useHistoryAnomaly()) {
historyMergedAnomalies = retainHistoryMergedAnomalies(windowStart.getMillis(), knownMergedAnomaliesOfAnEntry);
} else {
historyMergedAnomalies = Collections.emptyList();
}
LOG.info("Analyzing anomaly function with explored dimensions: {}, windowStart: {}, windowEnd: {}", dimensionMap, windowStart, windowEnd);
AnomalyUtils.logAnomaliesOverlapWithWindow(windowStart, windowEnd, historyMergedAnomalies);
List<RawAnomalyResultDTO> resultsOfAnEntry = Collections.emptyList();
try {
// Run algorithm
// Scaling time series according to the scaling factor
List<ScalingFactor> scalingFactors = anomalyDetectionInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, windowStart.getMillis(), scalingFactors, metricName, properties);
}
resultsOfAnEntry = anomalyFunction.analyze(dimensionMap, metricTimeSeries, windowStart, windowEnd, historyMergedAnomalies);
} catch (Exception e) {
LOG.error("Could not compute for {}", dimensionMap, e);
}
// Remove detected anomalies that have existed in database
if (CollectionUtils.isNotEmpty(resultsOfAnEntry)) {
List<RawAnomalyResultDTO> existingRawAnomaliesOfAnEntry = anomalyDetectionInputContext.getExistingRawAnomalies().get(dimensionMap);
resultsOfAnEntry = removeFromExistingRawAnomalies(resultsOfAnEntry, existingRawAnomaliesOfAnEntry);
}
if (CollectionUtils.isNotEmpty(resultsOfAnEntry)) {
List<MergedAnomalyResultDTO> existingMergedAnomalies = retainExistingMergedAnomalies(windowStart.getMillis(), windowEnd.getMillis(), knownMergedAnomaliesOfAnEntry);
resultsOfAnEntry = removeFromExistingMergedAnomalies(resultsOfAnEntry, existingMergedAnomalies);
}
return resultsOfAnEntry;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomalyUtils method logAnomaliesOverlapWithWindow.
/**
* Logs the known anomalies whose window overlaps with the given window, whose range is defined by windowStart
* and windowEnd.
*
* Reason to log the overlapped anomalies: During anomaly detection, the know anomalies are supposedly used to remove
* abnormal baseline values but not current values. This method provides a check before sending the known anomalies to
* anomaly detection functions.
*
* @param windowStart the inclusive start time of the window
* @param windowEnd the exclusive end time of the window
* @param knownAnomalies the known anomalies
*/
public static void logAnomaliesOverlapWithWindow(DateTime windowStart, DateTime windowEnd, List<MergedAnomalyResultDTO> knownAnomalies) {
if (CollectionUtils.isEmpty(knownAnomalies) || windowEnd.compareTo(windowStart) <= 0) {
return;
}
List<MergedAnomalyResultDTO> overlappedAnomalies = new ArrayList<>();
for (MergedAnomalyResultDTO knownAnomaly : knownAnomalies) {
if (knownAnomaly.getStartTime() <= windowEnd.getMillis() && knownAnomaly.getEndTime() >= windowStart.getMillis()) {
overlappedAnomalies.add(knownAnomaly);
}
}
if (overlappedAnomalies.size() > 0) {
StringBuffer sb = new StringBuffer();
String separator = "";
for (MergedAnomalyResultDTO overlappedAnomaly : overlappedAnomalies) {
sb.append(separator).append(overlappedAnomaly.getStartTime()).append("--").append(overlappedAnomaly.getEndTime());
separator = ", ";
}
LOG.warn("{} merged anomalies overlap with this window {} -- {}. Anomalies: {}", overlappedAnomalies.size(), windowStart, windowEnd, sb.toString());
}
}
Aggregations