Search in sources :

Example 16 with AnomalyFunctionDTO

use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.

the class EntityManagerResource method updateEntity.

@POST
public Response updateEntity(@QueryParam("entityType") String entityTypeStr, String jsonPayload) {
    if (Strings.isNullOrEmpty(entityTypeStr)) {
        throw new WebApplicationException("EntryType can not be null");
    }
    EntityType entityType = EntityType.valueOf(entityTypeStr);
    try {
        switch(entityType) {
            case ANOMALY_FUNCTION:
                AnomalyFunctionDTO anomalyFunctionDTO = OBJECT_MAPPER.readValue(jsonPayload, AnomalyFunctionDTO.class);
                if (anomalyFunctionDTO.getId() == null) {
                    anomalyFunctionManager.save(anomalyFunctionDTO);
                } else {
                    anomalyFunctionManager.update(anomalyFunctionDTO);
                }
                break;
            case EMAIL_CONFIGURATION:
                EmailConfigurationDTO emailConfigurationDTO = OBJECT_MAPPER.readValue(jsonPayload, EmailConfigurationDTO.class);
                emailConfigurationManager.update(emailConfigurationDTO);
                break;
            case DASHBOARD_CONFIG:
                DashboardConfigDTO dashboardConfigDTO = OBJECT_MAPPER.readValue(jsonPayload, DashboardConfigDTO.class);
                dashboardConfigManager.update(dashboardConfigDTO);
                break;
            case DATASET_CONFIG:
                DatasetConfigDTO datasetConfigDTO = OBJECT_MAPPER.readValue(jsonPayload, DatasetConfigDTO.class);
                datasetConfigManager.update(datasetConfigDTO);
                break;
            case METRIC_CONFIG:
                MetricConfigDTO metricConfigDTO = OBJECT_MAPPER.readValue(jsonPayload, MetricConfigDTO.class);
                metricConfigManager.update(metricConfigDTO);
                break;
            case OVERRIDE_CONFIG:
                OverrideConfigDTO overrideConfigDTO = OBJECT_MAPPER.readValue(jsonPayload, OverrideConfigDTO.class);
                if (overrideConfigDTO.getId() == null) {
                    overrideConfigManager.save(overrideConfigDTO);
                } else {
                    overrideConfigManager.update(overrideConfigDTO);
                }
                break;
            case ALERT_CONFIG:
                AlertConfigDTO alertConfigDTO = OBJECT_MAPPER.readValue(jsonPayload, AlertConfigDTO.class);
                if (alertConfigDTO.getId() == null) {
                    alertConfigManager.save(alertConfigDTO);
                } else {
                    alertConfigManager.update(alertConfigDTO);
                }
                break;
        }
    } catch (IOException e) {
        LOG.error("Error saving the entity with payload : " + jsonPayload, e);
        throw new WebApplicationException(e);
    }
    return Response.ok().build();
}
Also used : DatasetConfigDTO(com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO) MetricConfigDTO(com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO) OverrideConfigDTO(com.linkedin.thirdeye.datalayer.dto.OverrideConfigDTO) WebApplicationException(javax.ws.rs.WebApplicationException) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) EmailConfigurationDTO(com.linkedin.thirdeye.datalayer.dto.EmailConfigurationDTO) AlertConfigDTO(com.linkedin.thirdeye.datalayer.dto.AlertConfigDTO) IOException(java.io.IOException) DashboardConfigDTO(com.linkedin.thirdeye.datalayer.dto.DashboardConfigDTO) POST(javax.ws.rs.POST)

Example 17 with AnomalyFunctionDTO

use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.

the class TestWeekOverWeekRuleFunction method analyzeWoW.

@Test(dataProvider = "timeSeriesDataProvider")
public void analyzeWoW(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
    AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
    anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
    // Append properties for anomaly function specific setting
    properties.put(WeekOverWeekRuleFunction.BASELINE, "w/w");
    properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "-0.2");
    // Create anomaly function spec
    AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
    functionSpec.setMetric(mainMetric);
    functionSpec.setProperties(toString(properties));
    WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
    function.init(functionSpec);
    anomalyDetectionContext.setAnomalyDetectionFunction(function);
    anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
    List<TimeSeries> singleBaseline = new ArrayList<>();
    singleBaseline.add(baselines.get(0));
    anomalyDetectionContext.setBaselines(mainMetric, singleBaseline);
    anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
    List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
    compareWoWRawAnomalies(rawAnomalyResults);
    // Test data model
    List<Interval> expectedDataRanges = new ArrayList<>();
    expectedDataRanges.add(new Interval(observedStartTime, observedStartTime + bucketMillis * 5));
    expectedDataRanges.add(new Interval(observedStartTime - oneWeekInMillis, observedStartTime + bucketMillis * 5 - oneWeekInMillis));
    List<Interval> actualDataRanges = function.getDataModel().getAllDataIntervals(observedStartTime, observedStartTime + bucketMillis * 5);
    Assert.assertEquals(actualDataRanges, expectedDataRanges);
}
Also used : AnomalyDetectionContext(com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext) TimeSeries(com.linkedin.thirdeye.anomalydetection.context.TimeSeries) RawAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO) ArrayList(java.util.ArrayList) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) Interval(org.joda.time.Interval) Test(org.testng.annotations.Test)

Example 18 with AnomalyFunctionDTO

use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.

the class TestWeekOverWeekRuleFunction method recomputeMergedAnomalyWeight.

@Test(dataProvider = "timeSeriesDataProvider")
public void recomputeMergedAnomalyWeight(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
    // Expected RawAnomalies without smoothing
    List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
    RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
    rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 2);
    rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 3);
    rawAnomaly1.setWeight(0.3d);
    rawAnomaly1.setScore(15d);
    expectedRawAnomalies.add(rawAnomaly1);
    RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
    rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 3);
    rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 4);
    rawAnomaly2.setWeight(0.22727272727272727);
    rawAnomaly2.setScore(15d);
    expectedRawAnomalies.add(rawAnomaly2);
    AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
    anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
    // Append properties for anomaly function specific setting
    properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
    properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
    // Create anomaly function spec
    AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
    functionSpec.setMetric(mainMetric);
    functionSpec.setProperties(toString(properties));
    WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
    function.init(functionSpec);
    anomalyDetectionContext.setAnomalyDetectionFunction(function);
    anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
    anomalyDetectionContext.setBaselines(mainMetric, baselines);
    anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
    MergedAnomalyResultDTO mergedAnomaly = new MergedAnomalyResultDTO();
    mergedAnomaly.setStartTime(expectedRawAnomalies.get(0).getStartTime());
    mergedAnomaly.setEndTime(expectedRawAnomalies.get(1).getEndTime());
    mergedAnomaly.setAnomalyResults(expectedRawAnomalies);
    function.updateMergedAnomalyInfo(anomalyDetectionContext, mergedAnomaly);
    // Test weight; weight is the percentage change between the sums of observed values and
    // expected values, respectively. Note that expected values are generated by the trained model,
    // which takes as input one or many baseline time series.
    final long oneWeekInMillis = TimeUnit.DAYS.toMillis(7);
    double observedTotal = 0d;
    double baselineTotal = 0d;
    int bucketCount = 0;
    Interval interval = new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime());
    TimeSeries observedTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
    List<TimeSeries> baselineTSs = anomalyDetectionContext.getTransformedBaselines(mainMetric);
    for (long timestamp : observedTS.timestampSet()) {
        if (interval.contains(timestamp)) {
            ++bucketCount;
            observedTotal += observedTS.get(timestamp);
            for (int i = 0; i < baselineTSs.size(); ++i) {
                TimeSeries baselineTS = baselineTSs.get(i);
                long baseTimeStamp = timestamp - oneWeekInMillis * (i + 1);
                baselineTotal += baselineTS.get(baseTimeStamp);
            }
        }
    }
    baselineTotal /= baselineTSs.size();
    // Compare anomaly weight, avg. current, avg. baseline, score, etc
    double expectedWeight = (observedTotal - baselineTotal) / baselineTotal;
    Assert.assertEquals(mergedAnomaly.getWeight(), expectedWeight, EPSILON);
    double avgCurrent = observedTotal / bucketCount;
    Assert.assertEquals(mergedAnomaly.getAvgCurrentVal(), avgCurrent, EPSILON);
    double avgBaseline = baselineTotal / bucketCount;
    Assert.assertEquals(mergedAnomaly.getAvgBaselineVal(), avgBaseline, EPSILON);
    // Test Score; score is the average of all raw anomalies' score
    double expectedScore = 0d;
    for (RawAnomalyResultDTO rawAnomaly : expectedRawAnomalies) {
        expectedScore += rawAnomaly.getScore();
    }
    expectedScore /= expectedRawAnomalies.size();
    Assert.assertEquals(mergedAnomaly.getScore(), expectedScore, EPSILON);
}
Also used : TimeSeries(com.linkedin.thirdeye.anomalydetection.context.TimeSeries) ArrayList(java.util.ArrayList) RawAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO) AnomalyDetectionContext(com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) Interval(org.joda.time.Interval) Test(org.testng.annotations.Test)

Example 19 with AnomalyFunctionDTO

use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.

the class TestWeekOverWeekRuleFunction method analyzeWo2WAvg.

@Test(dataProvider = "timeSeriesDataProvider")
public void analyzeWo2WAvg(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
    // Expected RawAnomalies without smoothing
    List<RawAnomalyResultDTO> expectedRawAnomalies = new ArrayList<>();
    RawAnomalyResultDTO rawAnomaly1 = new RawAnomalyResultDTO();
    rawAnomaly1.setStartTime(observedStartTime + bucketMillis * 2);
    rawAnomaly1.setEndTime(observedStartTime + bucketMillis * 3);
    rawAnomaly1.setWeight(0.3d);
    rawAnomaly1.setScore(15d);
    expectedRawAnomalies.add(rawAnomaly1);
    RawAnomalyResultDTO rawAnomaly2 = new RawAnomalyResultDTO();
    rawAnomaly2.setStartTime(observedStartTime + bucketMillis * 3);
    rawAnomaly2.setEndTime(observedStartTime + bucketMillis * 4);
    rawAnomaly2.setWeight(0.22727272727272727);
    rawAnomaly2.setScore(15d);
    expectedRawAnomalies.add(rawAnomaly2);
    AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
    anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
    // Append properties for anomaly function specific setting
    properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
    properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
    // Create anomaly function spec
    AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
    functionSpec.setMetric(mainMetric);
    functionSpec.setProperties(toString(properties));
    WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
    function.init(functionSpec);
    anomalyDetectionContext.setAnomalyDetectionFunction(function);
    anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
    anomalyDetectionContext.setBaselines(mainMetric, baselines);
    anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
    List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
    compareWo2WAvgRawAnomalies(rawAnomalyResults);
    // Test data model
    List<Interval> expectedDataRanges = new ArrayList<>();
    expectedDataRanges.add(new Interval(observedStartTime, observedStartTime + bucketMillis * 5));
    expectedDataRanges.add(new Interval(observedStartTime - oneWeekInMillis, observedStartTime + bucketMillis * 5 - oneWeekInMillis));
    expectedDataRanges.add(new Interval(observedStartTime - oneWeekInMillis * 2, observedStartTime + bucketMillis * 5 - oneWeekInMillis * 2));
    List<Interval> actualDataRanges = function.getDataModel().getAllDataIntervals(observedStartTime, observedStartTime + bucketMillis * 5);
    Assert.assertEquals(actualDataRanges, expectedDataRanges);
}
Also used : RawAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO) AnomalyDetectionContext(com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext) ArrayList(java.util.ArrayList) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) Interval(org.joda.time.Interval) Test(org.testng.annotations.Test)

Example 20 with AnomalyFunctionDTO

use of com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO in project pinot by linkedin.

the class TestWeekOverWeekRuleFunction method testTotalCountThresholdFunction.

@Test(dataProvider = "timeSeriesDataProvider")
public void testTotalCountThresholdFunction(Properties properties, TimeSeriesKey timeSeriesKey, long bucketSizeInMs, TimeSeries observedTimeSeries, List<TimeSeries> baselines) throws Exception {
    AnomalyDetectionContext anomalyDetectionContext = new AnomalyDetectionContext();
    anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
    // Append properties for anomaly function specific setting
    String totalCountTimeSeriesName = "totalCount";
    TimeSeries totalCountTimeSeries = new TimeSeries();
    {
        totalCountTimeSeries.set(observedStartTime, 10d);
        totalCountTimeSeries.set(observedStartTime + bucketMillis, 10d);
        totalCountTimeSeries.set(observedStartTime + bucketMillis * 2, 10d);
        totalCountTimeSeries.set(observedStartTime + bucketMillis * 3, 10d);
        totalCountTimeSeries.set(observedStartTime + bucketMillis * 4, 10d);
        Interval totalCountTimeSeriesInterval = new Interval(observedStartTime, observedStartTime + bucketMillis * 5);
        totalCountTimeSeries.setTimeSeriesInterval(totalCountTimeSeriesInterval);
    }
    properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_METRIC_NAME, totalCountTimeSeriesName);
    properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_THRESHOLD, "51");
    properties.put(WeekOverWeekRuleFunction.BASELINE, "w/2wAvg");
    properties.put(SimpleThresholdDetectionModel.CHANGE_THRESHOLD, "0.2");
    // Create anomaly function spec
    AnomalyFunctionDTO functionSpec = new AnomalyFunctionDTO();
    functionSpec.setMetric(mainMetric);
    functionSpec.setProperties(toString(properties));
    // Create anomalyDetectionContext using anomaly function spec
    WeekOverWeekRuleFunction function = new WeekOverWeekRuleFunction();
    function.init(functionSpec);
    anomalyDetectionContext.setAnomalyDetectionFunction(function);
    anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
    anomalyDetectionContext.setBaselines(mainMetric, baselines);
    anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
    anomalyDetectionContext.setCurrent(totalCountTimeSeriesName, totalCountTimeSeries);
    List<RawAnomalyResultDTO> rawAnomalyResults = function.analyze(anomalyDetectionContext);
    // No anomalies after smoothing the time series
    Assert.assertEquals(rawAnomalyResults.size(), 0);
    // Test disabled total count by lowering the threshold
    anomalyDetectionContext = new AnomalyDetectionContext();
    anomalyDetectionContext.setBucketSizeInMS(bucketSizeInMs);
    properties.put(TotalCountThresholdRemovalFunction.TOTAL_COUNT_THRESHOLD, "0");
    // Create anomaly function spec
    functionSpec = new AnomalyFunctionDTO();
    functionSpec.setMetric(mainMetric);
    functionSpec.setProperties(toString(properties));
    function = new WeekOverWeekRuleFunction();
    function.init(functionSpec);
    anomalyDetectionContext.setAnomalyDetectionFunction(function);
    anomalyDetectionContext.setCurrent(mainMetric, observedTimeSeries);
    anomalyDetectionContext.setBaselines(mainMetric, baselines);
    anomalyDetectionContext.setTimeSeriesKey(timeSeriesKey);
    anomalyDetectionContext.setCurrent(totalCountTimeSeriesName, totalCountTimeSeries);
    rawAnomalyResults = function.analyze(anomalyDetectionContext);
    compareWo2WAvgRawAnomalies(rawAnomalyResults);
}
Also used : AnomalyDetectionContext(com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext) TimeSeries(com.linkedin.thirdeye.anomalydetection.context.TimeSeries) RawAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) Interval(org.joda.time.Interval) Test(org.testng.annotations.Test)

Aggregations

AnomalyFunctionDTO (com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO)74 ArrayList (java.util.ArrayList)23 DateTime (org.joda.time.DateTime)20 RawAnomalyResultDTO (com.linkedin.thirdeye.datalayer.dto.RawAnomalyResultDTO)19 MergedAnomalyResultDTO (com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO)17 Test (org.testng.annotations.Test)16 Path (javax.ws.rs.Path)11 DatasetConfigDTO (com.linkedin.thirdeye.datalayer.dto.DatasetConfigDTO)9 POST (javax.ws.rs.POST)8 AnomalyDetectionContext (com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext)7 TimeGranularity (com.linkedin.thirdeye.api.TimeGranularity)7 EmailConfigurationDTO (com.linkedin.thirdeye.datalayer.dto.EmailConfigurationDTO)7 AnomalyFunctionBean (com.linkedin.thirdeye.datalayer.pojo.AnomalyFunctionBean)6 Interval (org.joda.time.Interval)6 MetricTimeSeries (com.linkedin.thirdeye.api.MetricTimeSeries)5 AnomalyFeedbackDTO (com.linkedin.thirdeye.datalayer.dto.AnomalyFeedbackDTO)5 DetectionStatusDTO (com.linkedin.thirdeye.datalayer.dto.DetectionStatusDTO)5 ScalingFactor (com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor)5 HashMap (java.util.HashMap)5 NullArgumentException (org.apache.commons.lang.NullArgumentException)5