use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TestMinMaxThresholdFunction method recomputeMergedAnomalyWeight.
@Test(dataProvider = "timeSeriesDataProvider")
public void recomputeMergedAnomalyWeight(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
properties.put(MinMaxThresholdDetectionModel.MAX_VAL, "20");
properties.put(MinMaxThresholdDetectionModel.MIN_VAL, "12");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(TestWeekOverWeekRuleFunction.toString(properties));
AnomalyDetectionFunction function = new MinMaxThresholdFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly1.setWeight(0.1d);
rawAnomaly1.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 5);
rawAnomaly2.setWeight(-0.33333d);
rawAnomaly2.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly2);
MergedAnomalyResultDTO mergedAnomaly = new MergedAnomalyResultDTO();
mergedAnomaly.setStartTime(expectedRawAnomalies.get(0).getStartTime());
mergedAnomaly.setEndTime(expectedRawAnomalies.get(1).getEndTime());
mergedAnomaly.setAnomalyResults(expectedRawAnomalies);
function.updateMergedAnomalyInfo(anomalyDetectionContext, mergedAnomaly);
double currentTotal = 0d;
double deviationFromThreshold = 0d;
Interval interval = new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime());
TimeSeries currentTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
for (long timestamp : currentTS.timestampSet()) {
if (interval.contains(timestamp)) {
double value = currentTS.get(timestamp);
currentTotal += value;
deviationFromThreshold += computeDeviationFromMinMax(value, 12d, 20d);
}
}
double score = currentTotal / 2d;
double weight = deviationFromThreshold / 2d;
Assert.assertEquals(mergedAnomaly.getScore(), score, EPSILON);
Assert.assertEquals(mergedAnomaly.getAvgCurrentVal(), score, EPSILON);
Assert.assertEquals(mergedAnomaly.getWeight(), weight, EPSILON);
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomaliesResource method getAnomalyDetails.
/**
* Generates Anomaly Details for each merged anomaly
* @param mergedAnomaly
* @param datasetConfig
* @param timeSeriesDateFormatter
* @param startEndDateFormatterHours
* @param startEndDateFormatterDays
* @param externalUrl
* @return
*/
private AnomalyDetails getAnomalyDetails(MergedAnomalyResultDTO mergedAnomaly, DatasetConfigDTO datasetConfig, DateTimeFormatter timeSeriesDateFormatter, DateTimeFormatter startEndDateFormatterHours, DateTimeFormatter startEndDateFormatterDays, String externalUrl) throws Exception {
String dataset = datasetConfig.getDataset();
String metricName = mergedAnomaly.getMetric();
AnomalyFunctionDTO anomalyFunctionSpec = anomalyFunctionDAO.findById(mergedAnomaly.getFunctionId());
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
String aggGranularity = constructAggGranularity(datasetConfig);
long anomalyStartTime = mergedAnomaly.getStartTime();
long anomalyEndTime = mergedAnomaly.getEndTime();
TimeRange range = getTimeseriesOffsetedTimes(anomalyStartTime, anomalyEndTime, datasetConfig);
long currentStartTime = range.getStart();
long currentEndTime = range.getEnd();
DimensionMap dimensions = mergedAnomaly.getDimensions();
TimeGranularity timeGranularity = Utils.getAggregationTimeGranularity(aggGranularity, anomalyFunctionSpec.getCollection());
long bucketMillis = timeGranularity.toMillis();
AnomalyDetails anomalyDetails = null;
try {
AnomalyDetectionInputContext adInputContext = TimeBasedAnomalyMerger.fetchDataByDimension(currentStartTime, currentEndTime, dimensions, anomalyFunction, mergedAnomalyResultDAO, overrideConfigDAO, true);
MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
// Transform time series with scaling factor
List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, currentStartTime, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
}
List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
// Known anomalies are ignored (the null parameter) because 1. we can reduce users' waiting time and 2. presentation
// data does not need to be as accurate as the one used for detecting anomalies
AnomalyTimelinesView anomalyTimelinesView = anomalyFunction.getTimeSeriesView(metricTimeSeries, bucketMillis, anomalyFunctionSpec.getTopicMetric(), currentStartTime, currentEndTime, knownAnomalies);
anomalyDetails = constructAnomalyDetails(metricName, dataset, datasetConfig, mergedAnomaly, anomalyFunctionSpec, currentStartTime, currentEndTime, anomalyTimelinesView, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, externalUrl);
} catch (Exception e) {
LOG.error("Exception in constructing anomaly wrapper for anomaly {}", mergedAnomaly.getId(), e);
}
return anomalyDetails;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomaliesResource method constructAnomaliesWrapperFromMergedAnomalies.
/**
* Constructs AnomaliesWrapper object from a list of merged anomalies
* @param mergedAnomalies
* @return
* @throws ExecutionException
*/
private AnomaliesWrapper constructAnomaliesWrapperFromMergedAnomalies(List<MergedAnomalyResultDTO> mergedAnomalies, int pageNumber) throws ExecutionException {
AnomaliesWrapper anomaliesWrapper = new AnomaliesWrapper();
anomaliesWrapper.setTotalAnomalies(mergedAnomalies.size());
LOG.info("Total anomalies: {}", mergedAnomalies.size());
// TODO: get page number and page size from client
int pageSize = DEFAULT_PAGE_SIZE;
int maxPageNumber = (mergedAnomalies.size() - 1) / pageSize + 1;
if (pageNumber > maxPageNumber) {
pageNumber = maxPageNumber;
}
if (pageNumber < 1) {
pageNumber = 1;
}
int fromIndex = (pageNumber - 1) * pageSize;
int toIndex = pageNumber * pageSize;
if (toIndex > mergedAnomalies.size()) {
toIndex = mergedAnomalies.size();
}
// Show most recent anomalies first, i.e., the anomaly whose end time is most recent then largest id shown at top
Collections.sort(mergedAnomalies, new MergedAnomalyEndTimeComparator().reversed());
List<MergedAnomalyResultDTO> displayedAnomalies = mergedAnomalies.subList(fromIndex, toIndex);
anomaliesWrapper.setNumAnomaliesOnPage(displayedAnomalies.size());
LOG.info("Page number: {} Page size: {} Num anomalies on page: {}", pageNumber, pageSize, displayedAnomalies.size());
// for each anomaly, create anomaly details
List<Future<AnomalyDetails>> anomalyDetailsListFutures = new ArrayList<>();
for (MergedAnomalyResultDTO mergedAnomaly : displayedAnomalies) {
Callable<AnomalyDetails> callable = new Callable<AnomalyDetails>() {
@Override
public AnomalyDetails call() throws Exception {
String dataset = mergedAnomaly.getCollection();
DatasetConfigDTO datasetConfig = CACHE_REGISTRY.getDatasetConfigCache().get(dataset);
DateTimeFormatter timeSeriesDateFormatter = DateTimeFormat.forPattern(TIME_SERIES_DATE_FORMAT).withZone(Utils.getDataTimeZone(dataset));
DateTimeFormatter startEndDateFormatterDays = DateTimeFormat.forPattern(START_END_DATE_FORMAT_DAYS).withZone(Utils.getDataTimeZone(dataset));
DateTimeFormatter startEndDateFormatterHours = DateTimeFormat.forPattern(START_END_DATE_FORMAT_HOURS).withZone(Utils.getDataTimeZone(dataset));
return getAnomalyDetails(mergedAnomaly, datasetConfig, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, getExternalURL(mergedAnomaly));
}
};
anomalyDetailsListFutures.add(threadPool.submit(callable));
}
List<AnomalyDetails> anomalyDetailsList = new ArrayList<>();
for (Future<AnomalyDetails> anomalyDetailsFuture : anomalyDetailsListFutures) {
try {
AnomalyDetails anomalyDetails = anomalyDetailsFuture.get(120, TimeUnit.SECONDS);
if (anomalyDetails != null) {
anomalyDetailsList.add(anomalyDetails);
}
} catch (InterruptedException | ExecutionException | TimeoutException e) {
LOG.error("Exception in getting AnomalyDetails", e);
}
}
anomaliesWrapper.setAnomalyDetailsList(anomalyDetailsList);
return anomaliesWrapper;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class MergedAnomalyResultManagerImpl method findById.
public MergedAnomalyResultDTO findById(Long id, boolean loadRawAnomalies) {
MergedAnomalyResultBean mergedAnomalyResultBean = genericPojoDao.get(id, MergedAnomalyResultBean.class);
if (mergedAnomalyResultBean != null) {
MergedAnomalyResultDTO mergedAnomalyResultDTO;
mergedAnomalyResultDTO = convertMergedAnomalyBean2DTO(mergedAnomalyResultBean, loadRawAnomalies);
return mergedAnomalyResultDTO;
} else {
return null;
}
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class MergedAnomalyResultManagerImpl method batchConvertMergedAnomalyBean2DTO.
protected List<MergedAnomalyResultDTO> batchConvertMergedAnomalyBean2DTO(List<MergedAnomalyResultBean> mergedAnomalyResultBeanList, boolean loadRawAnomalies) {
List<Future<MergedAnomalyResultDTO>> mergedAnomalyResultDTOFutureList = new ArrayList<>(mergedAnomalyResultBeanList.size());
for (MergedAnomalyResultBean mergedAnomalyResultBean : mergedAnomalyResultBeanList) {
Future<MergedAnomalyResultDTO> future = executorService.submit(() -> convertMergedAnomalyBean2DTO(mergedAnomalyResultBean, loadRawAnomalies));
mergedAnomalyResultDTOFutureList.add(future);
}
List<MergedAnomalyResultDTO> mergedAnomalyResultDTOList = new ArrayList<>(mergedAnomalyResultBeanList.size());
for (Future future : mergedAnomalyResultDTOFutureList) {
try {
mergedAnomalyResultDTOList.add((MergedAnomalyResultDTO) future.get(60, TimeUnit.SECONDS));
} catch (InterruptedException | TimeoutException | ExecutionException e) {
LOG.warn("Failed to convert MergedAnomalyResultDTO from bean: {}", e.toString());
}
}
return mergedAnomalyResultDTOList;
}
Aggregations