use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TestAlertFilterUtil method getMockMergedAnomalies.
private List<MergedAnomalyResultDTO> getMockMergedAnomalies(int posIdx, int negIdx) {
List<MergedAnomalyResultDTO> anomalyResultDTOS = new ArrayList<>();
int[] ws = { 1, 1, 2, 3, 4, 4, 5, 6, 7 };
double[] severity = { 2.0, 4.0, 2.0, 3.0, 1.0, 3.0, 2.0, 1.0, 3.0 };
AnomalyFeedbackDTO positiveFeedback = new AnomalyFeedbackDTO();
AnomalyFeedbackDTO negativeFeedback = new AnomalyFeedbackDTO();
positiveFeedback.setFeedbackType(ANOMALY);
negativeFeedback.setFeedbackType(NOT_ANOMALY);
for (int i = 0; i < ws.length; i++) {
MergedAnomalyResultDTO anomaly = new MergedAnomalyResultDTO();
anomaly.setStartTime(0l);
anomaly.setEndTime(ws[i] * 3600 * 1000l);
anomaly.setWeight(severity[i]);
if (i == posIdx) {
anomaly.setFeedback(positiveFeedback);
} else if (i == negIdx) {
anomaly.setFeedback(negativeFeedback);
} else {
anomaly.setFeedback(null);
}
anomalyResultDTOS.add(anomaly);
}
return anomalyResultDTOS;
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class RunAdhocDatabaseQueriesTool method updateNotified.
private void updateNotified() {
List<MergedAnomalyResultDTO> mergedResults = mergedResultDAO.findAll();
for (MergedAnomalyResultDTO mergedResult : mergedResults) {
mergedResult.setNotified(true);
mergedResultDAO.update(mergedResult);
}
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class TestMinMaxThresholdFunction method recomputeMergedAnomalyWeight.
@Test(dataProvider = "timeSeriesDataProvider")
public void recomputeMergedAnomalyWeight(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries) throws Exception {
AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
properties.put(MinMaxThresholdDetectionModel.MAX_VAL, "20");
properties.put(MinMaxThresholdDetectionModel.MIN_VAL, "12");
// Create anomaly function spec
AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
functionSpec.setMetric(mainMetric);
functionSpec.setProperties(TestWeekOverWeekRuleFunction.toString(properties));
AnomalyDetectionFunction function = new MinMaxThresholdFunction();
function.init(functionSpec);
anomalyDetectionContext.setAnomalyDetectionFunction(function);
anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 3);
rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 4);
rawAnomaly1.setWeight(0.1d);
rawAnomaly1.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly1);
RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 4);
rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 5);
rawAnomaly2.setWeight(-0.33333d);
rawAnomaly2.setScore(13.6d);
expectedRawAnomalies.add(rawAnomaly2);
MergedAnomalyResultDTO mergedAnomaly = new MergedAnomalyResultDTO();
mergedAnomaly.setStartTime(expectedRawAnomalies.get(0).getStartTime());
mergedAnomaly.setEndTime(expectedRawAnomalies.get(1).getEndTime());
mergedAnomaly.setAnomalyResults(expectedRawAnomalies);
function.updateMergedAnomalyInfo(anomalyDetectionContext, mergedAnomaly);
double currentTotal = 0d;
double deviationFromThreshold = 0d;
Interval interval = new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime());
TimeSeries currentTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
for (long timestamp : currentTS.timestampSet()) {
if (interval.contains(timestamp)) {
double value = currentTS.get(timestamp);
currentTotal += value;
deviationFromThreshold += computeDeviationFromMinMax(value, 12d, 20d);
}
}
double score = currentTotal / 2d;
double weight = deviationFromThreshold / 2d;
Assert.assertEquals(mergedAnomaly.getScore(), score, EPSILON);
Assert.assertEquals(mergedAnomaly.getAvgCurrentVal(), score, EPSILON);
Assert.assertEquals(mergedAnomaly.getWeight(), weight, EPSILON);
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomalyResource method deleteAnomalyFunctions.
// Delete anomaly function
@DELETE
@Path("/anomaly-function/delete")
public Response deleteAnomalyFunctions(@NotNull @QueryParam("id") Long id, @QueryParam("functionName") String functionName) throws IOException {
if (id == null) {
throw new IllegalArgumentException("id is a required query param");
}
// call endpoint to stop if active
AnomalyFunctionDTO anomalyFunctionSpec = anomalyFunctionDAO.findById(id);
if (anomalyFunctionSpec == null) {
throw new IllegalStateException("No anomalyFunctionSpec with id " + id);
}
// delete dependent entities
// email config mapping
List<EmailConfigurationDTO> emailConfigurations = emailConfigurationDAO.findByFunctionId(id);
for (EmailConfigurationDTO emailConfiguration : emailConfigurations) {
emailConfiguration.getFunctions().remove(anomalyFunctionSpec);
emailConfigurationDAO.update(emailConfiguration);
}
// raw result mapping
List<RawAnomalyResultDTO> rawResults = rawAnomalyResultDAO.findAllByTimeAndFunctionId(0, System.currentTimeMillis(), id);
for (RawAnomalyResultDTO result : rawResults) {
rawAnomalyResultDAO.delete(result);
}
// merged anomaly mapping
List<MergedAnomalyResultDTO> mergedResults = anomalyMergedResultDAO.findByFunctionId(id);
for (MergedAnomalyResultDTO result : mergedResults) {
anomalyMergedResultDAO.delete(result);
}
// delete from db
anomalyFunctionDAO.deleteById(id);
return Response.noContent().build();
}
use of com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO in project pinot by linkedin.
the class AnomalyResource method viewMergedAnomaliesInRange.
// View merged anomalies for collection
@GET
@Path("/anomalies/view")
public List<MergedAnomalyResultDTO> viewMergedAnomaliesInRange(@NotNull @QueryParam("dataset") String dataset, @QueryParam("startTimeIso") String startTimeIso, @QueryParam("endTimeIso") String endTimeIso, @QueryParam("metric") String metric, @QueryParam("dimensions") String exploredDimensions, @DefaultValue("true") @QueryParam("applyAlertFilter") boolean applyAlertFiler) {
if (StringUtils.isBlank(dataset)) {
throw new IllegalArgumentException("dataset is a required query param");
}
DateTime endTime = DateTime.now();
if (StringUtils.isNotEmpty(endTimeIso)) {
endTime = ISODateTimeFormat.dateTimeParser().parseDateTime(endTimeIso);
}
DateTime startTime = endTime.minusDays(7);
if (StringUtils.isNotEmpty(startTimeIso)) {
startTime = ISODateTimeFormat.dateTimeParser().parseDateTime(startTimeIso);
}
List<MergedAnomalyResultDTO> anomalyResults = new ArrayList<>();
try {
if (StringUtils.isNotBlank(exploredDimensions)) {
// Decode dimensions map from request, which may contain encode symbols such as "%20D", etc.
exploredDimensions = URLDecoder.decode(exploredDimensions, UTF8);
try {
// Ensure the dimension names are sorted in order to match the string in backend database
DimensionMap sortedDimensions = OBJECT_MAPPER.readValue(exploredDimensions, DimensionMap.class);
exploredDimensions = OBJECT_MAPPER.writeValueAsString(sortedDimensions);
} catch (IOException e) {
LOG.warn("exploreDimensions may not be sorted because failed to read it as a json string: {}", e.toString());
}
}
boolean loadRawAnomalies = false;
if (StringUtils.isNotBlank(metric)) {
if (StringUtils.isNotBlank(exploredDimensions)) {
anomalyResults = anomalyMergedResultDAO.findByCollectionMetricDimensionsTime(dataset, metric, exploredDimensions, startTime.getMillis(), endTime.getMillis(), loadRawAnomalies);
} else {
anomalyResults = anomalyMergedResultDAO.findByCollectionMetricTime(dataset, metric, startTime.getMillis(), endTime.getMillis(), loadRawAnomalies);
}
} else {
anomalyResults = anomalyMergedResultDAO.findByCollectionTime(dataset, startTime.getMillis(), endTime.getMillis(), loadRawAnomalies);
}
} catch (Exception e) {
LOG.error("Exception in fetching anomalies", e);
}
if (applyAlertFiler) {
// TODO: why need try catch?
try {
anomalyResults = AlertFilterHelper.applyFiltrationRule(anomalyResults, alertFilterFactory);
} catch (Exception e) {
LOG.warn("Failed to apply alert filters on anomalies for dataset:{}, metric:{}, start:{}, end:{}, exception:{}", dataset, metric, startTimeIso, endTimeIso, e);
}
}
return anomalyResults;
}
Aggregations