use of com.linkedin.thirdeye.api.MetricTimeSeries in project pinot by linkedin.
the class AnomalyMergeExecutor method updateMergedAnomalyWeight.
/**
* Uses function-specific method to re-computes the weight of merged anomaly.
*
* @param anomalyMergedResult the merged anomaly to be updated
* @param mergeConfig the merge configuration that was applied when merge the merged anomaly
* @throws Exception if error occurs when retrieving the time series for calculating the weight
*/
private void updateMergedAnomalyWeight(MergedAnomalyResultDTO anomalyMergedResult, AnomalyMergeConfig mergeConfig) throws Exception {
AnomalyFunctionDTO anomalyFunctionSpec = anomalyMergedResult.getFunction();
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
List<Pair<Long, Long>> startEndTimeRanges = anomalyFunction.getDataRangeIntervals(anomalyMergedResult.getStartTime(), anomalyMergedResult.getEndTime());
TimeGranularity timeGranularity = new TimeGranularity(anomalyFunctionSpec.getBucketSize(), anomalyFunctionSpec.getBucketUnit());
MetricTimeSeries metricTimeSeries = TimeSeriesUtil.getTimeSeriesByDimension(anomalyFunctionSpec, startEndTimeRanges, anomalyMergedResult.getDimensions(), timeGranularity, false);
if (metricTimeSeries != null) {
DateTime windowStart = new DateTime(anomalyMergedResult.getStartTime());
DateTime windowEnd = new DateTime(anomalyMergedResult.getEndTime());
List<MergedAnomalyResultDTO> knownAnomalies = Collections.emptyList();
// Retrieve history merged anomalies
if (anomalyFunction.useHistoryAnomaly()) {
switch(mergeConfig.getMergeStrategy()) {
case FUNCTION:
knownAnomalies = getHistoryMergedAnomalies(anomalyFunction, windowStart.getMillis(), windowEnd.getMillis());
break;
case FUNCTION_DIMENSIONS:
knownAnomalies = getHistoryMergedAnomalies(anomalyFunction, windowStart.getMillis(), windowEnd.getMillis(), anomalyMergedResult.getDimensions());
break;
default:
throw new IllegalArgumentException("Merge strategy " + mergeConfig.getMergeStrategy() + " not supported");
}
if (knownAnomalies.size() > 0) {
LOG.info("Found {} history anomalies for computing the weight of current merged anomaly.", knownAnomalies.size());
LOG.info("Checking if any known anomalies overlap with the monitoring window of anomaly detection, which could result in unwanted holes in current values.");
AnomalyUtils.logAnomaliesOverlapWithWindow(windowStart, windowEnd, knownAnomalies);
}
}
// Transform Time Series
List<ScalingFactor> scalingFactors = OverrideConfigHelper.getTimeSeriesScalingFactors(overrideConfigDAO, anomalyFunctionSpec.getCollection(), anomalyFunctionSpec.getTopicMetric(), anomalyFunctionSpec.getId(), anomalyFunction.getDataRangeIntervals(windowStart.getMillis(), windowEnd.getMillis()));
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, windowStart.getMillis(), scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
}
anomalyFunction.updateMergedAnomalyInfo(anomalyMergedResult, metricTimeSeries, windowStart, windowEnd, knownAnomalies);
}
}
use of com.linkedin.thirdeye.api.MetricTimeSeries in project pinot by linkedin.
the class TimeBasedAnomalyMerger method fetchDataByDimension.
/**
* Fetch time series, known merged anomalies, and scaling factor for the specified dimension. Note that scaling
* factor has no dimension information, so all scaling factor in the specified time range will be retrieved.
*
* @param windowStartTime the start time for retrieving the data
* @param windowEndTime the end time for retrieving the data
* @param dimensions the dimension of the data
* @param anomalyFunction the anomaly function that produces the anomaly
* @param mergedResultDAO DAO for merged anomalies
* @param overrideConfigDAO DAO for override configuration
* @param endTimeInclusive set to true if the end time should be inclusive; mainly used by the queries from UI
* @return an anomaly detection input context that contains all the retrieved data
* @throws Exception if it fails to retrieve time series from DB.
*/
public static AnomalyDetectionInputContext fetchDataByDimension(long windowStartTime, long windowEndTime, DimensionMap dimensions, BaseAnomalyFunction anomalyFunction, MergedAnomalyResultManager mergedResultDAO, OverrideConfigManager overrideConfigDAO, boolean endTimeInclusive) throws Exception {
AnomalyFunctionDTO functionSpec = anomalyFunction.getSpec();
List<Pair<Long, Long>> startEndTimeRanges = anomalyFunction.getDataRangeIntervals(windowStartTime, windowEndTime);
TimeGranularity timeGranularity = new TimeGranularity(functionSpec.getBucketSize(), functionSpec.getBucketUnit());
AnomalyDetectionInputContext adInputContext = new AnomalyDetectionInputContext();
// Retrieve Time Series
MetricTimeSeries metricTimeSeries = TimeSeriesUtil.getTimeSeriesByDimension(functionSpec, startEndTimeRanges, dimensions, timeGranularity, endTimeInclusive);
Map<DimensionMap, MetricTimeSeries> metricTimeSeriesMap = new HashMap<>();
metricTimeSeriesMap.put(dimensions, metricTimeSeries);
adInputContext.setDimensionKeyMetricTimeSeriesMap(metricTimeSeriesMap);
// Retrieve historical anomaly
if (anomalyFunction.useHistoryAnomaly()) {
List<MergedAnomalyResultDTO> knownAnomalies = getBaselineKnownAnomaliesByDimension(anomalyFunction, windowStartTime, windowEndTime, dimensions, mergedResultDAO);
ListMultimap<DimensionMap, MergedAnomalyResultDTO> mergedAnomalyMap = ArrayListMultimap.create();
mergedAnomalyMap.putAll(dimensions, knownAnomalies);
adInputContext.setKnownMergedAnomalies(mergedAnomalyMap);
if (knownAnomalies.size() > 0) {
LOG.info("Found {} history anomalies for computing the weight of current merged anomaly.", knownAnomalies.size());
}
}
// Retrieve scaling factor
List<ScalingFactor> scalingFactors = OverrideConfigHelper.getTimeSeriesScalingFactors(overrideConfigDAO, functionSpec.getCollection(), functionSpec.getTopicMetric(), functionSpec.getId(), anomalyFunction.getDataRangeIntervals(windowStartTime, windowEndTime));
adInputContext.setScalingFactors(scalingFactors);
return adInputContext;
}
use of com.linkedin.thirdeye.api.MetricTimeSeries in project pinot by linkedin.
the class TimeBasedAnomalyMerger method computeMergedAnomalyInfo.
/**
* Uses function-specific method to re-computes the weight of merged anomaly.
*
* @param mergedAnomalies the merged anomaly to be updated
* @param mergeConfig the merge configuration that was applied when merge the merged anomaly
* @throws Exception if error occurs when retrieving the time series for calculating the weight
*/
private void computeMergedAnomalyInfo(MergedAnomalyResultDTO mergedAnomalies, AnomalyMergeConfig mergeConfig) throws Exception {
AnomalyFunctionDTO anomalyFunctionSpec = mergedAnomalies.getFunction();
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
long windowStartMillis = mergedAnomalies.getStartTime();
long windowEndMillis = mergedAnomalies.getEndTime();
DimensionMap dimensions = mergedAnomalies.getDimensions();
AnomalyDetectionInputContext adInputContext = fetchDataByDimension(windowStartMillis, windowEndMillis, dimensions, anomalyFunction, mergedResultDAO, overrideConfigDAO, false);
MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
if (metricTimeSeries != null) {
List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
// Transform time series with scaling factor
List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, windowStartMillis, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
}
DateTime windowStart = new DateTime(windowStartMillis);
DateTime windowEnd = new DateTime(windowEndMillis);
anomalyFunction.updateMergedAnomalyInfo(mergedAnomalies, metricTimeSeries, windowStart, windowEnd, knownAnomalies);
}
}
use of com.linkedin.thirdeye.api.MetricTimeSeries in project pinot by linkedin.
the class DetectionTaskRunner method runAnalyze.
private List<RawAnomalyResultDTO> runAnalyze(DateTime windowStart, DateTime windowEnd, AnomalyDetectionInputContext anomalyDetectionInputContext, DimensionMap dimensionMap) {
String metricName = anomalyFunction.getSpec().getTopicMetric();
MetricTimeSeries metricTimeSeries = anomalyDetectionInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensionMap);
// Get current entry's knownMergedAnomalies, which should have the same explored dimensions
List<MergedAnomalyResultDTO> knownMergedAnomaliesOfAnEntry = anomalyDetectionInputContext.getKnownMergedAnomalies().get(dimensionMap);
List<MergedAnomalyResultDTO> historyMergedAnomalies;
if (anomalyFunction.useHistoryAnomaly()) {
historyMergedAnomalies = retainHistoryMergedAnomalies(windowStart.getMillis(), knownMergedAnomaliesOfAnEntry);
} else {
historyMergedAnomalies = Collections.emptyList();
}
LOG.info("Analyzing anomaly function with explored dimensions: {}, windowStart: {}, windowEnd: {}", dimensionMap, windowStart, windowEnd);
AnomalyUtils.logAnomaliesOverlapWithWindow(windowStart, windowEnd, historyMergedAnomalies);
List<RawAnomalyResultDTO> resultsOfAnEntry = Collections.emptyList();
try {
// Run algorithm
// Scaling time series according to the scaling factor
List<ScalingFactor> scalingFactors = anomalyDetectionInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, windowStart.getMillis(), scalingFactors, metricName, properties);
}
resultsOfAnEntry = anomalyFunction.analyze(dimensionMap, metricTimeSeries, windowStart, windowEnd, historyMergedAnomalies);
} catch (Exception e) {
LOG.error("Could not compute for {}", dimensionMap, e);
}
// Remove detected anomalies that have existed in database
if (CollectionUtils.isNotEmpty(resultsOfAnEntry)) {
List<RawAnomalyResultDTO> existingRawAnomaliesOfAnEntry = anomalyDetectionInputContext.getExistingRawAnomalies().get(dimensionMap);
resultsOfAnEntry = removeFromExistingRawAnomalies(resultsOfAnEntry, existingRawAnomaliesOfAnEntry);
}
if (CollectionUtils.isNotEmpty(resultsOfAnEntry)) {
List<MergedAnomalyResultDTO> existingMergedAnomalies = retainExistingMergedAnomalies(windowStart.getMillis(), windowEnd.getMillis(), knownMergedAnomaliesOfAnEntry);
resultsOfAnEntry = removeFromExistingMergedAnomalies(resultsOfAnEntry, existingMergedAnomalies);
}
return resultsOfAnEntry;
}
use of com.linkedin.thirdeye.api.MetricTimeSeries in project pinot by linkedin.
the class TimeSeriesUtil method getTimeSeriesByDimension.
/**
* Returns the metric time series that were given to the anomaly function for anomaly detection. If the dimension to
* retrieve is OTHER, this method retrieves all combinations of dimensions and calculate the metric time series for
* OTHER dimension on-the-fly.
*
* @param anomalyFunctionSpec spec of the anomaly function
* @param startEndTimeRanges the time ranges to retrieve the data for constructing the time series
* @param dimensionMap a dimension map that is used to construct the filter for retrieving the corresponding data
* that was used to detected the anomaly
* @param timeGranularity time granularity of the time series
* @param endTimeInclusive set to true if the end time should be inclusive; mainly used by the query for UI
* @return the time series in the same format as those used by the given anomaly function for anomaly detection
*
* @throws JobExecutionException
* @throws ExecutionException
*/
public static MetricTimeSeries getTimeSeriesByDimension(AnomalyFunctionDTO anomalyFunctionSpec, List<Pair<Long, Long>> startEndTimeRanges, DimensionMap dimensionMap, TimeGranularity timeGranularity, boolean endTimeInclusive) throws JobExecutionException, ExecutionException {
// Get the original filter
Multimap<String, String> filters;
String filterString = anomalyFunctionSpec.getFilters();
if (StringUtils.isNotBlank(filterString)) {
filters = ThirdEyeUtils.getFilterSet(filterString);
} else {
filters = HashMultimap.create();
}
// Decorate filters according to dimensionMap
filters = ThirdEyeUtils.getFilterSetFromDimensionMap(dimensionMap, filters);
boolean hasOTHERDimensionName = false;
for (String dimensionValue : dimensionMap.values()) {
if (dimensionValue.equalsIgnoreCase(ResponseParserUtils.OTHER)) {
hasOTHERDimensionName = true;
break;
}
}
// groupByDimensions (i.e., exploreDimensions) is empty by default because the query for getting the time series
// will have the decorated filters according to anomalies' explore dimensions.
// However, if there exists any dimension with value "OTHER, then we need to honor the origin groupBy in order to
// construct the data for OTHER
List<String> groupByDimensions = Collections.emptyList();
if (hasOTHERDimensionName && StringUtils.isNotBlank(anomalyFunctionSpec.getExploreDimensions().trim())) {
groupByDimensions = Arrays.asList(anomalyFunctionSpec.getExploreDimensions().trim().split(","));
}
TimeSeriesResponse response = getTimeSeriesResponseImpl(anomalyFunctionSpec, startEndTimeRanges, timeGranularity, filters, groupByDimensions, endTimeInclusive);
try {
Map<DimensionKey, MetricTimeSeries> metricTimeSeriesMap = TimeSeriesResponseConverter.toMap(response, Utils.getSchemaDimensionNames(anomalyFunctionSpec.getCollection()));
return extractMetricTimeSeriesByDimension(metricTimeSeriesMap);
} catch (Exception e) {
LOG.warn("Unable to get schema dimension name for retrieving metric time series: {}", e.toString());
return null;
}
}
Aggregations