use of com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomaliesSummary in project pinot by linkedin.
the class AnomaliesResource method getAnomalyCountForMetricInRange.
/**
* Get count of anomalies for metric in time range
* @param metricId
* @param startTime
* @param endTime
* @return
*/
@GET
@Path("getAnomalyCount/{metricId}/{startTime}/{endTime}")
public AnomaliesSummary getAnomalyCountForMetricInRange(@PathParam("metricId") Long metricId, @PathParam("startTime") Long startTime, @PathParam("endTime") Long endTime) {
AnomaliesSummary anomaliesSummary = new AnomaliesSummary();
List<MergedAnomalyResultDTO> mergedAnomalies = getAnomaliesForMetricIdInRange(metricId, startTime, endTime);
int resolvedAnomalies = 0;
int unresolvedAnomalies = 0;
for (MergedAnomalyResultDTO mergedAnomaly : mergedAnomalies) {
AnomalyFeedbackDTO anomalyFeedback = mergedAnomaly.getFeedback();
if (anomalyFeedback == null || anomalyFeedback.getFeedbackType() == null) {
unresolvedAnomalies++;
} else if (anomalyFeedback != null && anomalyFeedback.getFeedbackType() != null && anomalyFeedback.getFeedbackType().equals(AnomalyFeedbackType.ANOMALY)) {
unresolvedAnomalies++;
} else {
resolvedAnomalies++;
}
}
anomaliesSummary.setMetricId(metricId);
anomaliesSummary.setStartTime(startTime);
anomaliesSummary.setEndTime(endTime);
anomaliesSummary.setNumAnomalies(mergedAnomalies.size());
anomaliesSummary.setNumAnomaliesResolved(resolvedAnomalies);
anomaliesSummary.setNumAnomaliesUnresolved(unresolvedAnomalies);
return anomaliesSummary;
}
use of com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomaliesSummary in project pinot by linkedin.
the class DataResource method getAnomalySummary.
@GET
@Path("dashboard/anomalysummary")
public Map<String, List<AnomaliesSummary>> getAnomalySummary(@QueryParam("dashboard") String dashboard, @QueryParam("timeRanges") String timeRanges) {
List<Long> metricIds = getMetricIdsByDashboard(dashboard);
List<String> timeRangesList = Lists.newArrayList(timeRanges.split(","));
Map<String, Long> timeRangeToDurationMap = new HashMap<>();
for (String timeRange : timeRangesList) {
String[] tokens = timeRange.split("_");
long duration = TimeUnit.MILLISECONDS.convert(Long.valueOf(tokens[0]), TimeUnit.valueOf(tokens[1]));
timeRangeToDurationMap.put(timeRange, duration);
}
Map<String, List<AnomaliesSummary>> metricAliasToAnomaliesSummariesMap = new HashMap<>();
for (Long metricId : metricIds) {
List<AnomaliesSummary> summaries = new ArrayList<>();
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
String metricAlias = metricConfig.getAlias();
String dataset = metricConfig.getDataset();
long endTime = Utils.getMaxDataTimeForDataset(dataset);
for (String timeRange : timeRangesList) {
long startTime = endTime - timeRangeToDurationMap.get(timeRange);
AnomaliesSummary summary = anomaliesResoure.getAnomalyCountForMetricInRange(metricId, startTime, endTime);
summaries.add(summary);
}
metricAliasToAnomaliesSummariesMap.put(metricAlias, summaries);
}
return metricAliasToAnomaliesSummariesMap;
}
use of com.linkedin.thirdeye.dashboard.resources.v2.pojo.AnomaliesSummary in project pinot by linkedin.
the class DataResource method getMetricSummary.
/**
* Returns percentage change between current values and baseline values. The values are
* aggregated according to the number of buckets. If the bucket number is 1, then all values
* between the given time ranges are sorted to the corresponding bucket and aggregated.
*
* Note: For current implementation, we assume the number of buckets is always 1.
*/
@GET
@Path("dashboard/metricsummary")
public List<MetricSummary> getMetricSummary(@QueryParam("dashboard") String dashboard, @QueryParam("timeRange") String timeRange) {
List<MetricSummary> metricsSummary = new ArrayList<>();
if (StringUtils.isBlank(dashboard)) {
return metricsSummary;
}
List<Long> metricIds = getMetricIdsByDashboard(dashboard);
// Sort metric's id and metric expression by collections
Multimap<String, Long> datasetToMetrics = ArrayListMultimap.create();
Multimap<String, MetricExpression> datasetToMetricExpressions = ArrayListMultimap.create();
Map<Long, MetricConfigDTO> metricIdToMetricConfig = new HashMap<>();
for (long metricId : metricIds) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
metricIdToMetricConfig.put(metricId, metricConfig);
datasetToMetrics.put(metricConfig.getDataset(), metricId);
datasetToMetricExpressions.put(metricConfig.getDataset(), ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfig));
}
// Create query request for each collection
for (String dataset : datasetToMetrics.keySet()) {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(dataset);
request.setMetricExpressions(new ArrayList<>(datasetToMetricExpressions.get(dataset)));
// The input start and end time (i.e., currentStart, currentEnd, baselineStart, and
// baselineEnd) are given in millisecond since epoch, which is timezone insensitive. On the
// other hand, the start and end time of the request to be sent to backend database (e.g.,
// Pinot) could be converted to SimpleDateFormat, which is timezone sensitive. Therefore,
// we need to store user's start and end time in DateTime objects with data's timezone
// in order to ensure that the conversion to SimpleDateFormat is always correct regardless
// user and server's timezone, including daylight saving time.
String[] tokens = timeRange.split("_");
TimeGranularity timeGranularity = new TimeGranularity(Integer.valueOf(tokens[0]), TimeUnit.valueOf(tokens[1]));
long currentEnd = Utils.getMaxDataTimeForDataset(dataset);
long currentStart = currentEnd - TimeUnit.MILLISECONDS.convert(Long.valueOf(tokens[0]), TimeUnit.valueOf(tokens[1]));
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
request.setBaselineStart(new DateTime(currentStart, timeZoneForCollection).minusDays(7));
request.setBaselineEnd(new DateTime(currentEnd, timeZoneForCollection).minusDays(7));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(timeGranularity);
TabularViewHandler handler = new TabularViewHandler(queryCache);
try {
TabularViewResponse tabularViewResponse = handler.process(request);
for (String metric : tabularViewResponse.getMetrics()) {
MetricDataset metricDataset = new MetricDataset(metric, dataset);
MetricConfigDTO metricConfig = CACHE_REGISTRY_INSTANCE.getMetricConfigCache().get(metricDataset);
Long metricId = metricConfig.getId();
GenericResponse response = tabularViewResponse.getData().get(metric);
MetricSummary metricSummary = new MetricSummary();
metricSummary.setMetricId(metricId);
metricSummary.setMetricName(metricConfig.getName());
metricSummary.setMetricAlias(metricConfig.getAlias());
String[] responseData = response.getResponseData().get(0);
double baselineValue = Double.valueOf(responseData[0]);
double curentvalue = Double.valueOf(responseData[1]);
double percentageChange = (curentvalue - baselineValue) * 100 / baselineValue;
metricSummary.setBaselineValue(baselineValue);
metricSummary.setCurrentValue(curentvalue);
metricSummary.setWowPercentageChange(percentageChange);
AnomaliesSummary anomaliesSummary = anomaliesResoure.getAnomalyCountForMetricInRange(metricId, currentStart, currentEnd);
metricSummary.setAnomaliesSummary(anomaliesSummary);
metricsSummary.add(metricSummary);
}
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
}
}
return metricsSummary;
}
Aggregations