use of com.linkedin.thirdeye.api.MetricTimeSeries in project pinot by linkedin.
the class TimeSeriesResponseConverter method toMap.
/**
* Convert the response to a Map<DimensionKey, MetricTimeSeries>. DimensionKey is generated based
* off of schemaDimensions, while the MetricTimeSeries objects are generated based on the rows
* within the response input. The metrics returned in the MetricTimeSeries instances correspond to
* the metric names as opposed to the full metric function (eg __COUNT instead of SUM(__COUNT))
*/
public static Map<DimensionKey, MetricTimeSeries> toMap(TimeSeriesResponse response, List<String> schemaDimensions) {
DimensionKeyGenerator dimensionKeyGenerator = new DimensionKeyGenerator(schemaDimensions);
List<String> metrics = new ArrayList<>(response.getMetrics());
Set<String> metricSet = new HashSet<>(metrics);
List<MetricType> types = Collections.nCopies(metrics.size(), MetricType.DOUBLE);
MetricSchema metricSchema = new MetricSchema(metrics, types);
SetMultimap<DimensionKey, TimeSeriesRow> dimensionKeyToRows = HashMultimap.create();
// group the rows by their dimension key
for (int i = 0; i < response.getNumRows(); i++) {
TimeSeriesRow row = response.getRow(i);
DimensionKey dimensionKey = dimensionKeyGenerator.get(row.getDimensionNames(), row.getDimensionValues());
dimensionKeyToRows.put(dimensionKey, row);
}
Map<DimensionKey, MetricTimeSeries> result = new HashMap<>();
for (Entry<DimensionKey, Collection<TimeSeriesRow>> entry : dimensionKeyToRows.asMap().entrySet()) {
DimensionKey key = entry.getKey();
MetricTimeSeries metricTimeSeries = new MetricTimeSeries(metricSchema);
result.put(key, metricTimeSeries);
for (TimeSeriesRow timeSeriesRow : entry.getValue()) {
long timestamp = timeSeriesRow.getStart();
for (TimeSeriesMetric metric : timeSeriesRow.getMetrics()) {
String metricName = metric.getMetricName();
// contain additional info, eg the raw metrics required for calculating derived ones.
if (metricSet.contains(metricName)) {
Double value = metric.getValue();
metricTimeSeries.increment(timestamp, metricName, value);
}
}
}
}
return result;
}
use of com.linkedin.thirdeye.api.MetricTimeSeries in project pinot by linkedin.
the class AnomaliesResource method getAnomalyDetails.
/**
* Generates Anomaly Details for each merged anomaly
* @param mergedAnomaly
* @param datasetConfig
* @param timeSeriesDateFormatter
* @param startEndDateFormatterHours
* @param startEndDateFormatterDays
* @param externalUrl
* @return
*/
private AnomalyDetails getAnomalyDetails(MergedAnomalyResultDTO mergedAnomaly, DatasetConfigDTO datasetConfig, DateTimeFormatter timeSeriesDateFormatter, DateTimeFormatter startEndDateFormatterHours, DateTimeFormatter startEndDateFormatterDays, String externalUrl) throws Exception {
String dataset = datasetConfig.getDataset();
String metricName = mergedAnomaly.getMetric();
AnomalyFunctionDTO anomalyFunctionSpec = anomalyFunctionDAO.findById(mergedAnomaly.getFunctionId());
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
String aggGranularity = constructAggGranularity(datasetConfig);
long anomalyStartTime = mergedAnomaly.getStartTime();
long anomalyEndTime = mergedAnomaly.getEndTime();
TimeRange range = getTimeseriesOffsetedTimes(anomalyStartTime, anomalyEndTime, datasetConfig);
long currentStartTime = range.getStart();
long currentEndTime = range.getEnd();
DimensionMap dimensions = mergedAnomaly.getDimensions();
TimeGranularity timeGranularity = Utils.getAggregationTimeGranularity(aggGranularity, anomalyFunctionSpec.getCollection());
long bucketMillis = timeGranularity.toMillis();
AnomalyDetails anomalyDetails = null;
try {
AnomalyDetectionInputContext adInputContext = TimeBasedAnomalyMerger.fetchDataByDimension(currentStartTime, currentEndTime, dimensions, anomalyFunction, mergedAnomalyResultDAO, overrideConfigDAO, true);
MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
// Transform time series with scaling factor
List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, currentStartTime, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
}
List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
// Known anomalies are ignored (the null parameter) because 1. we can reduce users' waiting time and 2. presentation
// data does not need to be as accurate as the one used for detecting anomalies
AnomalyTimelinesView anomalyTimelinesView = anomalyFunction.getTimeSeriesView(metricTimeSeries, bucketMillis, anomalyFunctionSpec.getTopicMetric(), currentStartTime, currentEndTime, knownAnomalies);
anomalyDetails = constructAnomalyDetails(metricName, dataset, datasetConfig, mergedAnomaly, anomalyFunctionSpec, currentStartTime, currentEndTime, anomalyTimelinesView, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, externalUrl);
} catch (Exception e) {
LOG.error("Exception in constructing anomaly wrapper for anomaly {}", mergedAnomaly.getId(), e);
}
return anomalyDetails;
}
Aggregations