use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TimeBasedAnomalyMerger method computeMergedAnomalyInfo.
/**
* Uses function-specific method to re-computes the weight of merged anomaly.
*
* @param mergedAnomalies the merged anomaly to be updated
* @param mergeConfig the merge configuration that was applied when merge the merged anomaly
* @throws Exception if error occurs when retrieving the time series for calculating the weight
*/
private void computeMergedAnomalyInfo(MergedAnomalyResultDTO mergedAnomalies, AnomalyMergeConfig mergeConfig) throws Exception {
AnomalyFunctionDTO anomalyFunctionSpec = mergedAnomalies.getFunction();
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
long windowStartMillis = mergedAnomalies.getStartTime();
long windowEndMillis = mergedAnomalies.getEndTime();
DimensionMap dimensions = mergedAnomalies.getDimensions();
AnomalyDetectionInputContext adInputContext = fetchDataByDimension(windowStartMillis, windowEndMillis, dimensions, anomalyFunction, mergedResultDAO, overrideConfigDAO, false);
MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
if (metricTimeSeries != null) {
List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
// Transform time series with scaling factor
List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, windowStartMillis, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
}
DateTime windowStart = new DateTime(windowStartMillis);
DateTime windowEnd = new DateTime(windowEndMillis);
anomalyFunction.updateMergedAnomalyInfo(mergedAnomalies, metricTimeSeries, windowStart, windowEnd, knownAnomalies);
}
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TimeBasedAnomalyMerger method mergeAnomalies.
/**
* Performs a time based merge, which merged anomalies that have the same function id and dimensions, etc.
* This method is supposed to be performed by anomaly detectors right after their anomaly detection.
*
* Time based merge logic works as following:
*
* Step 1: for the given function, find all groups of raw (unprocessed) anomalies based on
* merge strategy (FunctionId and/or dimensions)
*
* Step 2: For each such group, find the base mergedAnomaly
*
* Step 3: perform time based merge
*
* Step 4: Recompute anomaly score / weight
*
* Step 5: persist merged anomalies
*
* @param functionSpec the spec of the function that detects anomalies
* @param isBackfill set to true to disable the alert of the merged anomalies
*
* @return the number of merged anomalies after merging
*/
public ListMultimap<DimensionMap, MergedAnomalyResultDTO> mergeAnomalies(AnomalyFunctionDTO functionSpec, ListMultimap<DimensionMap, RawAnomalyResultDTO> unmergedAnomalies, boolean isBackfill) {
int rawAnomaliesCount = 0;
for (DimensionMap dimensionMap : unmergedAnomalies.keySet()) {
rawAnomaliesCount += unmergedAnomalies.get(dimensionMap).size();
}
LOG.info("Running merge for function id : [{}], found [{}] raw anomalies", functionSpec.getId(), rawAnomaliesCount);
AnomalyMergeConfig mergeConfig = functionSpec.getAnomalyMergeConfig();
if (mergeConfig == null) {
mergeConfig = DEFAULT_TIME_BASED_MERGE_CONFIG;
}
if (unmergedAnomalies.size() == 0) {
return ArrayListMultimap.create();
} else {
ListMultimap<DimensionMap, MergedAnomalyResultDTO> mergedAnomalies = dimensionalShuffleAndUnifyMerge(functionSpec, mergeConfig, unmergedAnomalies);
// Update information of merged anomalies
for (MergedAnomalyResultDTO mergedAnomalyResultDTO : mergedAnomalies.values()) {
if (isBackfill) {
mergedAnomalyResultDTO.setNotified(isBackfill);
}
// else notified flag is left as is
updateMergedAnomalyInfo(mergedAnomalyResultDTO, mergeConfig);
}
return mergedAnomalies;
}
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TimeBasedAnomalyMerger method dimensionalShuffleAndUnifyMerge.
private ListMultimap<DimensionMap, MergedAnomalyResultDTO> dimensionalShuffleAndUnifyMerge(AnomalyFunctionDTO function, AnomalyMergeConfig mergeConfig, ListMultimap<DimensionMap, RawAnomalyResultDTO> dimensionsResultMap) {
ListMultimap<DimensionMap, MergedAnomalyResultDTO> mergedAnomalies = ArrayListMultimap.create();
for (DimensionMap dimensionMap : dimensionsResultMap.keySet()) {
List<RawAnomalyResultDTO> unmergedResultsByDimensions = dimensionsResultMap.get(dimensionMap);
long anomalyWindowStart = Long.MAX_VALUE;
long anomalyWindowEnd = Long.MIN_VALUE;
for (RawAnomalyResultDTO unmergedResultsByDimension : unmergedResultsByDimensions) {
anomalyWindowStart = Math.min(anomalyWindowStart, unmergedResultsByDimension.getStartTime());
anomalyWindowEnd = Math.max(anomalyWindowEnd, unmergedResultsByDimension.getEndTime());
}
// NOTE: We get "latest overlapped (Conflict)" merged anomaly instead of "recent" merged anomaly in order to
// prevent the merge results of current (online) detection interfere the merge results of back-fill (offline)
// detection.
// Moreover, the window start is modified by mergeConfig.getSequentialAllowedGap() in order to allow a gap between
// anomalies to be merged.
MergedAnomalyResultDTO latestOverlappedMergedResult = mergedResultDAO.findLatestConflictByFunctionIdDimensions(function.getId(), dimensionMap.toString(), anomalyWindowStart - mergeConfig.getSequentialAllowedGap(), anomalyWindowEnd);
List<MergedAnomalyResultDTO> mergedResults = AnomalyTimeBasedSummarizer.mergeAnomalies(latestOverlappedMergedResult, unmergedResultsByDimensions, mergeConfig.getMaxMergeDurationLength(), mergeConfig.getSequentialAllowedGap());
for (MergedAnomalyResultDTO mergedResult : mergedResults) {
mergedResult.setFunction(function);
mergedResult.setDimensions(dimensionMap);
}
LOG.info("Merging [{}] raw anomalies into [{}] merged anomalies for function id : [{}] and dimensions : [{}]", unmergedResultsByDimensions.size(), mergedResults.size(), function.getId(), dimensionMap);
mergedAnomalies.putAll(dimensionMap, mergedResults);
}
return mergedAnomalies;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AlertTaskRunner method runTask.
private void runTask() throws Exception {
LOG.info("Starting email report {}", alertConfig.getId());
final String collection = alertConfig.getCollection();
// Get the anomalies in that range
final List<MergedAnomalyResultDTO> allResults = anomalyMergedResultDAO.getAllByTimeEmailIdAndNotifiedFalse(windowStart.getMillis(), windowEnd.getMillis(), alertConfig.getId());
// apply filtration rule
List<MergedAnomalyResultDTO> results = AlertFilterHelper.applyFiltrationRule(allResults, alertFilterFactory);
if (results.isEmpty() && !alertConfig.isSendZeroAnomalyEmail()) {
LOG.info("Zero anomalies found, skipping sending email");
return;
}
// Group by dimension key, then sort according to anomaly result compareTo method.
Map<DimensionMap, List<MergedAnomalyResultDTO>> groupedResults = new TreeMap<>();
for (MergedAnomalyResultDTO result : results) {
DimensionMap dimensions = result.getDimensions();
if (!groupedResults.containsKey(dimensions)) {
groupedResults.put(dimensions, new ArrayList<>());
}
groupedResults.get(dimensions).add(result);
}
// sort each list of anomaly results afterwards
for (List<MergedAnomalyResultDTO> resultsByExploredDimensions : groupedResults.values()) {
Collections.sort(resultsByExploredDimensions);
}
sendAlertForAnomalies(collection, results, groupedResults);
updateNotifiedStatus(results);
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AlertTaskRunner method updateNotifiedStatus.
// TODO : deprecate this, move last notified alert id in the alertConfig
private void updateNotifiedStatus(List<MergedAnomalyResultDTO> mergedResults) {
for (MergedAnomalyResultDTO mergedResult : mergedResults) {
mergedResult.setNotified(true);
anomalyMergedResultDAO.update(mergedResult);
}
}
Aggregations