Search in sources :

Example 1 with TimeBucket

use of com.linkedin.thirdeye.dashboard.views.TimeBucket in project pinot by linkedin.

the class AbstractModularizedAnomalyFunction method getTimeSeriesView.

// TODO: Generate time series view using ViewModel
@Override
public AnomalyTimelinesView getTimeSeriesView(MetricTimeSeries timeSeries, long bucketMillis, String metric, long viewWindowStartTime, long viewWindowEndTime, List<MergedAnomalyResultDTO> knownAnomalies) {
    AnomalyDetectionContext anomalyDetectionContext = BackwardAnomalyFunctionUtils.buildAnomalyDetectionContext(this, timeSeries, spec.getTopicMetric(), null, spec.getBucketSize(), spec.getBucketUnit(), new DateTime(viewWindowStartTime), new DateTime(viewWindowEndTime));
    String mainMetric = anomalyDetectionContext.getAnomalyDetectionFunction().getSpec().getTopicMetric();
    this.transformAndPredictTimeSeries(mainMetric, anomalyDetectionContext);
    TimeSeries observedTS = anomalyDetectionContext.getTransformedCurrent(mainMetric);
    TimeSeries expectedTS = ((ExpectedTimeSeriesPredictionModel) anomalyDetectionContext.getTrainedPredictionModel(mainMetric)).getExpectedTimeSeries();
    long expectedTSStartTime = expectedTS.getTimeSeriesInterval().getStartMillis();
    // Construct AnomalyTimelinesView
    AnomalyTimelinesView anomalyTimelinesView = new AnomalyTimelinesView();
    int bucketCount = (int) ((viewWindowEndTime - viewWindowStartTime) / bucketMillis);
    for (int i = 0; i < bucketCount; ++i) {
        long currentBucketMillis = viewWindowStartTime + i * bucketMillis;
        long baselineBucketMillis = expectedTSStartTime + i * bucketMillis;
        double observedValue = 0d;
        if (observedTS.hasTimestamp(currentBucketMillis)) {
            observedValue = observedTS.get(currentBucketMillis);
        }
        double expectedValue = 0d;
        if (expectedTS.hasTimestamp(baselineBucketMillis)) {
            expectedValue = expectedTS.get(baselineBucketMillis);
        }
        TimeBucket timebucket = new TimeBucket(currentBucketMillis, currentBucketMillis + bucketMillis, baselineBucketMillis, baselineBucketMillis + bucketMillis);
        anomalyTimelinesView.addTimeBuckets(timebucket);
        anomalyTimelinesView.addCurrentValues(observedValue);
        anomalyTimelinesView.addBaselineValues(expectedValue);
    }
    return anomalyTimelinesView;
}
Also used : AnomalyDetectionContext(com.linkedin.thirdeye.anomalydetection.context.AnomalyDetectionContext) TimeSeries(com.linkedin.thirdeye.anomalydetection.context.TimeSeries) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) ExpectedTimeSeriesPredictionModel(com.linkedin.thirdeye.anomalydetection.model.prediction.ExpectedTimeSeriesPredictionModel) TimeBucket(com.linkedin.thirdeye.dashboard.views.TimeBucket) AnomalyTimelinesView(com.linkedin.thirdeye.anomaly.views.AnomalyTimelinesView) DateTime(org.joda.time.DateTime)

Example 2 with TimeBucket

use of com.linkedin.thirdeye.dashboard.views.TimeBucket in project pinot by linkedin.

the class AnomalyResource method getAnomalyMergedResultTimeSeries.

/**
   * Returns the time series for the given anomaly.
   *
   * If viewWindowStartTime and/or viewWindowEndTime is not given, then a window is padded automatically. The padded
   * windows is half of the anomaly window size. For instance, if the anomaly lasts for 4 hours, then the pad window
   * size is 2 hours. The max padding size is 1 day.
   *
   * @param anomalyResultId the id of the given anomaly
   * @param viewWindowStartTime start time of the time series, inclusive
   * @param viewWindowEndTime end time of the time series, inclusive
   * @return the time series of the given anomaly
   * @throws Exception when it fails to retrieve collection, i.e., dataset, information
   */
@GET
@Path("/anomaly-merged-result/timeseries/{anomaly_merged_result_id}")
public AnomalyTimelinesView getAnomalyMergedResultTimeSeries(@NotNull @PathParam("anomaly_merged_result_id") long anomalyResultId, @NotNull @QueryParam("aggTimeGranularity") String aggTimeGranularity, @QueryParam("start") long viewWindowStartTime, @QueryParam("end") long viewWindowEndTime) throws Exception {
    boolean loadRawAnomalies = false;
    MergedAnomalyResultDTO anomalyResult = anomalyMergedResultDAO.findById(anomalyResultId, loadRawAnomalies);
    DimensionMap dimensions = anomalyResult.getDimensions();
    AnomalyFunctionDTO anomalyFunctionSpec = anomalyResult.getFunction();
    BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
    // By default, the padding window size is half of the anomaly window.
    if (viewWindowStartTime == 0 || viewWindowEndTime == 0) {
        long anomalyWindowStartTime = anomalyResult.getStartTime();
        long anomalyWindowEndTime = anomalyResult.getEndTime();
        long bucketMillis = TimeUnit.MILLISECONDS.convert(anomalyFunctionSpec.getBucketSize(), anomalyFunctionSpec.getBucketUnit());
        long bucketCount = (anomalyWindowEndTime - anomalyWindowStartTime) / bucketMillis;
        long paddingMillis = Math.max(1, (bucketCount / 2)) * bucketMillis;
        if (paddingMillis > TimeUnit.DAYS.toMillis(1)) {
            paddingMillis = TimeUnit.DAYS.toMillis(1);
        }
        if (viewWindowStartTime == 0) {
            viewWindowStartTime = anomalyWindowStartTime - paddingMillis;
        }
        if (viewWindowEndTime == 0) {
            viewWindowEndTime = anomalyWindowEndTime + paddingMillis;
        }
    }
    TimeGranularity timeGranularity = Utils.getAggregationTimeGranularity(aggTimeGranularity, anomalyFunctionSpec.getCollection());
    long bucketMillis = timeGranularity.toMillis();
    // ThirdEye backend is end time exclusive, so one more bucket is appended to make end time inclusive for frontend.
    viewWindowEndTime += bucketMillis;
    long maxDataTime = collectionMaxDataTimeCache.get(anomalyResult.getCollection());
    if (viewWindowEndTime > maxDataTime) {
        viewWindowEndTime = (anomalyResult.getEndTime() > maxDataTime) ? anomalyResult.getEndTime() : maxDataTime;
    }
    AnomalyDetectionInputContext adInputContext = TimeBasedAnomalyMerger.fetchDataByDimension(viewWindowStartTime, viewWindowEndTime, dimensions, anomalyFunction, anomalyMergedResultDAO, overrideConfigDAO, false);
    MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
    if (metricTimeSeries == null) {
        // the timeseries for the given anomaly
        return new AnomalyTimelinesView();
    }
    // Transform time series with scaling factor
    List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
    if (CollectionUtils.isNotEmpty(scalingFactors)) {
        Properties properties = anomalyFunction.getProperties();
        MetricTransfer.rescaleMetric(metricTimeSeries, viewWindowStartTime, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
    }
    List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
    // Known anomalies are ignored (the null parameter) because 1. we can reduce users' waiting time and 2. presentation
    // data does not need to be as accurate as the one used for detecting anomalies
    AnomalyTimelinesView anomalyTimelinesView = anomalyFunction.getTimeSeriesView(metricTimeSeries, bucketMillis, anomalyFunctionSpec.getTopicMetric(), viewWindowStartTime, viewWindowEndTime, knownAnomalies);
    // Generate summary for frontend
    List<TimeBucket> timeBuckets = anomalyTimelinesView.getTimeBuckets();
    if (timeBuckets.size() > 0) {
        TimeBucket firstBucket = timeBuckets.get(0);
        anomalyTimelinesView.addSummary("currentStart", Long.toString(firstBucket.getCurrentStart()));
        anomalyTimelinesView.addSummary("baselineStart", Long.toString(firstBucket.getBaselineStart()));
        TimeBucket lastBucket = timeBuckets.get(timeBuckets.size() - 1);
        anomalyTimelinesView.addSummary("currentEnd", Long.toString(lastBucket.getCurrentStart()));
        anomalyTimelinesView.addSummary("baselineEnd", Long.toString(lastBucket.getBaselineEnd()));
    }
    return anomalyTimelinesView;
}
Also used : BaseAnomalyFunction(com.linkedin.thirdeye.detector.function.BaseAnomalyFunction) TimeBucket(com.linkedin.thirdeye.dashboard.views.TimeBucket) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) ScalingFactor(com.linkedin.thirdeye.detector.metric.transfer.ScalingFactor) AnomalyTimelinesView(com.linkedin.thirdeye.anomaly.views.AnomalyTimelinesView) Properties(java.util.Properties) AnomalyDetectionInputContext(com.linkedin.thirdeye.anomaly.detection.AnomalyDetectionInputContext) MergedAnomalyResultDTO(com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO) TimeGranularity(com.linkedin.thirdeye.api.TimeGranularity) DimensionMap(com.linkedin.thirdeye.api.DimensionMap) AnomalyFunctionDTO(com.linkedin.thirdeye.datalayer.dto.AnomalyFunctionDTO) Path(javax.ws.rs.Path) GET(javax.ws.rs.GET)

Example 3 with TimeBucket

use of com.linkedin.thirdeye.dashboard.views.TimeBucket in project pinot by linkedin.

the class AnomaliesResource method constructAnomalyDetails.

/** Construct anomaly details using all details fetched from calls
   *
   * @param metricName
   * @param dataset
   * @param datasetConfig
   * @param mergedAnomaly
   * @param anomalyFunction
   * @param currentStartTime inclusive
   * @param currentEndTime inclusive
   * @param anomalyTimelinesView
   * @param timeSeriesDateFormatter
   * @param startEndDateFormatterHours
   * @param startEndDateFormatterDays
   * @return
   * @throws JSONException
   */
private AnomalyDetails constructAnomalyDetails(String metricName, String dataset, DatasetConfigDTO datasetConfig, MergedAnomalyResultDTO mergedAnomaly, AnomalyFunctionDTO anomalyFunction, long currentStartTime, long currentEndTime, AnomalyTimelinesView anomalyTimelinesView, DateTimeFormatter timeSeriesDateFormatter, DateTimeFormatter startEndDateFormatterHours, DateTimeFormatter startEndDateFormatterDays, String externalUrl) throws JSONException {
    MetricConfigDTO metricConfigDTO = metricConfigDAO.findByMetricAndDataset(metricName, dataset);
    AnomalyDetails anomalyDetails = new AnomalyDetails();
    anomalyDetails.setMetric(metricName);
    anomalyDetails.setDataset(dataset);
    if (metricConfigDTO != null) {
        anomalyDetails.setMetricId(metricConfigDTO.getId());
    }
    // The filter ensures that the returned time series from anomalies function only includes the values that are
    // located inside the request windows (i.e., between currentStartTime and currentEndTime, inclusive).
    List<TimeBucket> timeBuckets = anomalyTimelinesView.getTimeBuckets();
    int timeStartIndex = -1;
    int timeEndIndex = -1;
    for (int i = 0; i < timeBuckets.size(); ++i) {
        long currentTimeStamp = timeBuckets.get(i).getCurrentStart();
        if (timeStartIndex < 0 && currentTimeStamp >= currentStartTime) {
            timeStartIndex = i;
            timeEndIndex = i + 1;
        } else if (currentTimeStamp <= currentEndTime) {
            timeEndIndex = i + 1;
        } else if (currentTimeStamp > currentEndTime) {
            break;
        }
    }
    if (timeStartIndex < 0 || timeEndIndex < 0) {
        timeStartIndex = 0;
        timeEndIndex = 0;
    }
    // get this from timeseries calls
    List<String> dateValues = getDateFromTimeSeriesObject(timeBuckets.subList(timeStartIndex, timeEndIndex), timeSeriesDateFormatter);
    anomalyDetails.setDates(dateValues);
    anomalyDetails.setCurrentEnd(getFormattedDateTime(currentEndTime, datasetConfig, startEndDateFormatterHours, startEndDateFormatterDays));
    anomalyDetails.setCurrentStart(getFormattedDateTime(currentStartTime, datasetConfig, startEndDateFormatterHours, startEndDateFormatterDays));
    List<String> baselineValues = getDataFromTimeSeriesObject(anomalyTimelinesView.getBaselineValues().subList(timeStartIndex, timeEndIndex));
    anomalyDetails.setBaselineValues(baselineValues);
    List<String> currentValues = getDataFromTimeSeriesObject(anomalyTimelinesView.getCurrentValues().subList(timeStartIndex, timeEndIndex));
    anomalyDetails.setCurrentValues(currentValues);
    // from function and anomaly
    anomalyDetails.setAnomalyId(mergedAnomaly.getId());
    anomalyDetails.setAnomalyRegionStart(timeSeriesDateFormatter.print(mergedAnomaly.getStartTime()));
    anomalyDetails.setAnomalyRegionEnd(timeSeriesDateFormatter.print(mergedAnomaly.getEndTime()));
    Map<String, String> messageDataMap = getAnomalyMessageDataMap(mergedAnomaly.getMessage());
    anomalyDetails.setCurrent(messageDataMap.get(ANOMALY_CURRENT_VAL_KEY));
    anomalyDetails.setBaseline(messageDataMap.get(ANOMALY_BASELINE_VAL_KEY));
    anomalyDetails.setAnomalyFunctionId(anomalyFunction.getId());
    anomalyDetails.setAnomalyFunctionName(anomalyFunction.getFunctionName());
    anomalyDetails.setAnomalyFunctionType(anomalyFunction.getType());
    anomalyDetails.setAnomalyFunctionProps(anomalyFunction.getProperties());
    anomalyDetails.setAnomalyFunctionDimension(mergedAnomaly.getDimensions().toString());
    if (mergedAnomaly.getFeedback() != null) {
        anomalyDetails.setAnomalyFeedback(AnomalyDetails.getFeedbackStringFromFeedbackType(mergedAnomaly.getFeedback().getFeedbackType()));
    }
    anomalyDetails.setExternalUrl(externalUrl);
    return anomalyDetails;
}
Also used : MetricConfigDTO(com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO) TimeBucket(com.linkedin.thirdeye.dashboard.views.TimeBucket) AnomalyDetails(com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomalyDetails)

Example 4 with TimeBucket

use of com.linkedin.thirdeye.dashboard.views.TimeBucket in project pinot by linkedin.

the class TimeSeriesResource method getContributorDataForDimension.

private TimeSeriesCompareMetricView getContributorDataForDimension(long metricId, long currentStart, long currentEnd, long baselineStart, long baselineEnd, String dimension, String filters, String granularity) {
    MetricConfigDTO metricConfigDTO = metricConfigDAO.findById(metricId);
    TimeSeriesCompareMetricView timeSeriesCompareMetricView = new TimeSeriesCompareMetricView(metricConfigDTO.getName(), metricId, currentStart, currentEnd);
    try {
        String dataset = metricConfigDTO.getDataset();
        ContributorViewRequest request = new ContributorViewRequest();
        request.setCollection(dataset);
        MetricExpression metricExpression = ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfigDTO);
        request.setMetricExpressions(Arrays.asList(metricExpression));
        DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
        request.setBaselineStart(new DateTime(baselineStart, timeZoneForCollection));
        request.setBaselineEnd(new DateTime(baselineEnd, timeZoneForCollection));
        request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
        request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
        request.setTimeGranularity(Utils.getAggregationTimeGranularity(granularity, dataset));
        if (filters != null && !filters.isEmpty()) {
            filters = URLDecoder.decode(filters, "UTF-8");
            request.setFilters(ThirdEyeUtils.convertToMultiMap(filters));
        }
        request.setGroupByDimensions(Arrays.asList(dimension));
        ContributorViewHandler handler = new ContributorViewHandler(queryCache);
        ContributorViewResponse response = handler.process(request);
        // Assign the time buckets
        List<Long> timeBucketsCurrent = new ArrayList<>();
        List<Long> timeBucketsBaseline = new ArrayList<>();
        timeSeriesCompareMetricView.setTimeBucketsCurrent(timeBucketsCurrent);
        timeSeriesCompareMetricView.setTimeBucketsBaseline(timeBucketsBaseline);
        Map<String, ValuesContainer> subDimensionValuesMap = new LinkedHashMap<>();
        timeSeriesCompareMetricView.setSubDimensionContributionMap(subDimensionValuesMap);
        int timeBuckets = response.getTimeBuckets().size();
        // this is for over all values
        ValuesContainer vw = new ValuesContainer();
        subDimensionValuesMap.put(ALL, vw);
        vw.setCurrentValues(new double[timeBuckets]);
        vw.setBaselineValues(new double[timeBuckets]);
        vw.setPercentageChange(new String[timeBuckets]);
        vw.setCumulativeCurrentValues(new double[timeBuckets]);
        vw.setCumulativeBaselineValues(new double[timeBuckets]);
        vw.setCumulativePercentageChange(new String[timeBuckets]);
        // lets find the indices
        int subDimensionIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("dimensionValue");
        int currentValueIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("currentValue");
        int baselineValueIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("baselineValue");
        int percentageChangeIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("percentageChange");
        int cumCurrentValueIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("cumulativeCurrentValue");
        int cumBaselineValueIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("cumulativeBaselineValue");
        int cumPercentageChangeIndex = response.getResponseData().getSchema().getColumnsToIndexMapping().get("cumulativePercentageChange");
        // populate current and baseline time buckets
        for (int i = 0; i < timeBuckets; i++) {
            TimeBucket tb = response.getTimeBuckets().get(i);
            timeBucketsCurrent.add(tb.getCurrentStart());
            timeBucketsBaseline.add(tb.getBaselineStart());
        }
        // set current and baseline values for sub dimensions
        for (int i = 0; i < response.getResponseData().getResponseData().size(); i++) {
            String[] data = response.getResponseData().getResponseData().get(i);
            String subDimension = data[subDimensionIndex];
            Double currentVal = Double.valueOf(data[currentValueIndex]);
            Double baselineVal = Double.valueOf(data[baselineValueIndex]);
            Double percentageChangeVal = Double.valueOf(data[percentageChangeIndex]);
            Double cumCurrentVal = Double.valueOf(data[cumCurrentValueIndex]);
            Double cumBaselineVal = Double.valueOf(data[cumBaselineValueIndex]);
            Double cumPercentageChangeVal = Double.valueOf(data[cumPercentageChangeIndex]);
            int index = i % timeBuckets;
            // set overAll values
            vw.getCurrentValues()[index] += currentVal;
            vw.getBaselineValues()[index] += baselineVal;
            vw.getCumulativeCurrentValues()[index] += cumCurrentVal;
            vw.getCumulativeBaselineValues()[index] += cumBaselineVal;
            // set individual sub-dimension values
            if (!subDimensionValuesMap.containsKey(subDimension)) {
                ValuesContainer subDimVals = new ValuesContainer();
                subDimVals.setCurrentValues(new double[timeBuckets]);
                subDimVals.setBaselineValues(new double[timeBuckets]);
                subDimVals.setPercentageChange(new String[timeBuckets]);
                subDimVals.setCumulativeCurrentValues(new double[timeBuckets]);
                subDimVals.setCumulativeBaselineValues(new double[timeBuckets]);
                subDimVals.setCumulativePercentageChange(new String[timeBuckets]);
                subDimensionValuesMap.put(subDimension, subDimVals);
            }
            subDimensionValuesMap.get(subDimension).getCurrentValues()[index] = currentVal;
            subDimensionValuesMap.get(subDimension).getBaselineValues()[index] = baselineVal;
            subDimensionValuesMap.get(subDimension).getPercentageChange()[index] = String.format(DECIMAL_FORMAT, percentageChangeVal);
            subDimensionValuesMap.get(subDimension).getCumulativeCurrentValues()[index] = cumCurrentVal;
            subDimensionValuesMap.get(subDimension).getCumulativeBaselineValues()[index] = cumBaselineVal;
            subDimensionValuesMap.get(subDimension).getCumulativePercentageChange()[index] = String.format(DECIMAL_FORMAT, cumPercentageChangeVal);
        }
        // TODO : compute cumulative values for all
        for (int i = 0; i < vw.getCurrentValues().length; i++) {
            vw.getPercentageChange()[i] = String.format(DECIMAL_FORMAT, getPercentageChange(vw.getCurrentValues()[i], vw.getBaselineValues()[i]));
            vw.getCumulativePercentageChange()[i] = String.format(DECIMAL_FORMAT, getPercentageChange(vw.getCumulativeCurrentValues()[i], vw.getCumulativeBaselineValues()[i]));
        }
    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
        throw new WebApplicationException(e);
    }
    return timeSeriesCompareMetricView;
}
Also used : MetricConfigDTO(com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO) WebApplicationException(javax.ws.rs.WebApplicationException) TimeSeriesCompareMetricView(com.linkedin.thirdeye.dashboard.resources.v2.pojo.TimeSeriesCompareMetricView) ArrayList(java.util.ArrayList) TimeBucket(com.linkedin.thirdeye.dashboard.views.TimeBucket) ContributorViewRequest(com.linkedin.thirdeye.dashboard.views.contributor.ContributorViewRequest) MetricExpression(com.linkedin.thirdeye.client.MetricExpression) DateTimeZone(org.joda.time.DateTimeZone) DateTime(org.joda.time.DateTime) WebApplicationException(javax.ws.rs.WebApplicationException) LinkedHashMap(java.util.LinkedHashMap) ContributorViewResponse(com.linkedin.thirdeye.dashboard.views.contributor.ContributorViewResponse) ContributorViewHandler(com.linkedin.thirdeye.dashboard.views.contributor.ContributorViewHandler) ValuesContainer(com.linkedin.thirdeye.dashboard.resources.v2.pojo.ValuesContainer)

Example 5 with TimeBucket

use of com.linkedin.thirdeye.dashboard.views.TimeBucket in project pinot by linkedin.

the class ContributionViewTableBuilder method build.

public ContributionViewTable build() {
    finished = true;
    for (String dimensionValue : dimensionValueSet) {
        ContributionCell prevCell = null;
        for (TimeBucket timeBucket : timeBuckets) {
            double baselineTotal = baselineStatsMap.get(timeBucket).getSum();
            double currentTotal = currentStatsMap.get(timeBucket).getSum();
            double cumulativeBaselineTotal = cumulativeBaselineStatsMap.get(timeBucket).getSum();
            double cumulativeCurrentTotal = cumulativeCurrentStatsMap.get(timeBucket).getSum();
            ContributionCell cell = timeBucketToDimensionValuesMap.get(timeBucket).get(dimensionValue);
            if (cell == null) {
                double cumulativeBaselineValue = 0;
                double cumulativeCurrentValue = 0;
                if (prevCell != null) {
                    cumulativeBaselineValue = prevCell.getCumulativeBaselineValue();
                    cumulativeCurrentValue = prevCell.getCumulativeCurrentValue();
                }
                cell = new ContributionCell(dimensionValue, timeBucket, 0, 0, cumulativeBaselineValue, cumulativeCurrentValue);
                cells.add(cell);
            }
            cell.updateContributionStats(baselineTotal, currentTotal, cumulativeBaselineTotal, cumulativeCurrentTotal);
        }
    }
    return new ContributionViewTable(metricName, dimensionName, cells);
}
Also used : TimeBucket(com.linkedin.thirdeye.dashboard.views.TimeBucket)

Aggregations

TimeBucket (com.linkedin.thirdeye.dashboard.views.TimeBucket)12 ArrayList (java.util.ArrayList)6 AnomalyTimelinesView (com.linkedin.thirdeye.anomaly.views.AnomalyTimelinesView)4 MetricExpression (com.linkedin.thirdeye.client.MetricExpression)4 Row (com.linkedin.thirdeye.client.comparison.Row)4 MetricConfigDTO (com.linkedin.thirdeye.datalayer.dto.MetricConfigDTO)3 LinkedHashMap (java.util.LinkedHashMap)3 DateTime (org.joda.time.DateTime)3 MetricTimeSeries (com.linkedin.thirdeye.api.MetricTimeSeries)2 Metric (com.linkedin.thirdeye.client.comparison.Row.Metric)2 TimeOnTimeComparisonHandler (com.linkedin.thirdeye.client.comparison.TimeOnTimeComparisonHandler)2 TimeOnTimeComparisonRequest (com.linkedin.thirdeye.client.comparison.TimeOnTimeComparisonRequest)2 TimeOnTimeComparisonResponse (com.linkedin.thirdeye.client.comparison.TimeOnTimeComparisonResponse)2 TimeSeriesCompareMetricView (com.linkedin.thirdeye.dashboard.resources.v2.pojo.TimeSeriesCompareMetricView)2 ValuesContainer (com.linkedin.thirdeye.dashboard.resources.v2.pojo.ValuesContainer)2 GenericResponse (com.linkedin.thirdeye.dashboard.views.GenericResponse)2 ResponseSchema (com.linkedin.thirdeye.dashboard.views.GenericResponse.ResponseSchema)2 HashMap (java.util.HashMap)2 Properties (java.util.Properties)2 AnomalyDetectionInputContext (com.linkedin.thirdeye.anomaly.detection.AnomalyDetectionInputContext)1