use of com.linkedin.thirdeye.api.TimeRange in project pinot by linkedin.
the class AnomaliesResource method getTimeseriesOffsetedTimes.
private TimeRange getTimeseriesOffsetedTimes(long anomalyStartTime, long anomalyEndTime, DatasetConfigDTO datasetConfig) {
TimeUnit dataTimeunit = datasetConfig.getTimeUnit();
Period offsetPeriod;
switch(dataTimeunit) {
case // 3 days
DAYS:
offsetPeriod = new Period(0, 0, 0, 3, 0, 0, 0, 0);
break;
case // 10 hours
HOURS:
offsetPeriod = new Period(0, 0, 0, 0, 10, 0, 0, 0);
break;
case // 60 minutes
MINUTES:
offsetPeriod = new Period(0, 0, 0, 0, 0, 60, 0, 0);
break;
default:
offsetPeriod = new Period();
}
DateTimeZone dateTimeZone = DateTimeZone.forID(datasetConfig.getTimezone());
DateTime anomalyStartDateTime = new DateTime(anomalyStartTime, dateTimeZone);
DateTime anomalyEndDateTime = new DateTime(anomalyEndTime, dateTimeZone);
anomalyStartDateTime = anomalyStartDateTime.minus(offsetPeriod);
anomalyEndDateTime = anomalyEndDateTime.plus(offsetPeriod);
anomalyStartTime = anomalyStartDateTime.getMillis();
anomalyEndTime = anomalyEndDateTime.getMillis();
try {
Long maxDataTime = CACHE_REGISTRY.getCollectionMaxDataTimeCache().get(datasetConfig.getDataset());
if (anomalyEndTime > maxDataTime) {
anomalyEndTime = maxDataTime;
}
} catch (ExecutionException e) {
LOG.error("Exception when reading max time for {}", datasetConfig.getDataset(), e);
}
TimeRange range = new TimeRange(anomalyStartTime, anomalyEndTime);
return range;
}
use of com.linkedin.thirdeye.api.TimeRange in project pinot by linkedin.
the class DataResource method getTimeRangeFromLabel.
/**
* convert label from WowSummaryModel to a TimeRange
* @param dataset
* @param timeZoneForCollection
* @param label
* @return
*/
private TimeRange getTimeRangeFromLabel(String dataset, DateTimeZone timeZoneForCollection, String label) {
long start = 0;
long end = 0;
long datasetMaxTime = Utils.getMaxDataTimeForDataset(dataset);
switch(label) {
case "Most Recent Hour":
end = datasetMaxTime;
start = end - TimeUnit.MILLISECONDS.convert(1, TimeUnit.HOURS);
break;
case "Today":
end = System.currentTimeMillis();
start = new DateTime().withTimeAtStartOfDay().getMillis();
break;
case "Yesterday":
end = new DateTime().withTimeAtStartOfDay().getMillis();
start = end - TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
break;
case "Last 7 Days":
end = System.currentTimeMillis();
start = new DateTime(end).minusDays(6).withTimeAtStartOfDay().getMillis();
break;
default:
}
TimeRange timeRange = new TimeRange(start, end);
return timeRange;
}
use of com.linkedin.thirdeye.api.TimeRange in project pinot by linkedin.
the class AnomaliesResource method getAnomalyDetails.
/**
* Generates Anomaly Details for each merged anomaly
* @param mergedAnomaly
* @param datasetConfig
* @param timeSeriesDateFormatter
* @param startEndDateFormatterHours
* @param startEndDateFormatterDays
* @param externalUrl
* @return
*/
private AnomalyDetails getAnomalyDetails(MergedAnomalyResultDTO mergedAnomaly, DatasetConfigDTO datasetConfig, DateTimeFormatter timeSeriesDateFormatter, DateTimeFormatter startEndDateFormatterHours, DateTimeFormatter startEndDateFormatterDays, String externalUrl) throws Exception {
String dataset = datasetConfig.getDataset();
String metricName = mergedAnomaly.getMetric();
AnomalyFunctionDTO anomalyFunctionSpec = anomalyFunctionDAO.findById(mergedAnomaly.getFunctionId());
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
String aggGranularity = constructAggGranularity(datasetConfig);
long anomalyStartTime = mergedAnomaly.getStartTime();
long anomalyEndTime = mergedAnomaly.getEndTime();
TimeRange range = getTimeseriesOffsetedTimes(anomalyStartTime, anomalyEndTime, datasetConfig);
long currentStartTime = range.getStart();
long currentEndTime = range.getEnd();
DimensionMap dimensions = mergedAnomaly.getDimensions();
TimeGranularity timeGranularity = Utils.getAggregationTimeGranularity(aggGranularity, anomalyFunctionSpec.getCollection());
long bucketMillis = timeGranularity.toMillis();
AnomalyDetails anomalyDetails = null;
try {
AnomalyDetectionInputContext adInputContext = TimeBasedAnomalyMerger.fetchDataByDimension(currentStartTime, currentEndTime, dimensions, anomalyFunction, mergedAnomalyResultDAO, overrideConfigDAO, true);
MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
// Transform time series with scaling factor
List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, currentStartTime, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
}
List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
// Known anomalies are ignored (the null parameter) because 1. we can reduce users' waiting time and 2. presentation
// data does not need to be as accurate as the one used for detecting anomalies
AnomalyTimelinesView anomalyTimelinesView = anomalyFunction.getTimeSeriesView(metricTimeSeries, bucketMillis, anomalyFunctionSpec.getTopicMetric(), currentStartTime, currentEndTime, knownAnomalies);
anomalyDetails = constructAnomalyDetails(metricName, dataset, datasetConfig, mergedAnomaly, anomalyFunctionSpec, currentStartTime, currentEndTime, anomalyTimelinesView, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, externalUrl);
} catch (Exception e) {
LOG.error("Exception in constructing anomaly wrapper for anomaly {}", mergedAnomaly.getId(), e);
}
return anomalyDetails;
}
use of com.linkedin.thirdeye.api.TimeRange in project pinot by linkedin.
the class DataResource method getWowSummary.
@GET
@Path("dashboard/wowsummary")
public WowSummary getWowSummary(@QueryParam("dashboard") String dashboard, @QueryParam("timeRanges") String timeRanges) {
WowSummary wowSummary = new WowSummary();
if (StringUtils.isBlank(dashboard)) {
return wowSummary;
}
List<Long> metricIds = getMetricIdsByDashboard(dashboard);
List<String> timeRangeLabels = Lists.newArrayList(timeRanges.split(","));
// Sort metric's id and metric expression by collections
Multimap<String, Long> datasetToMetrics = ArrayListMultimap.create();
Multimap<String, MetricExpression> datasetToMetricExpressions = ArrayListMultimap.create();
Map<Long, MetricConfigDTO> metricIdToMetricConfig = new HashMap<>();
for (long metricId : metricIds) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
metricIdToMetricConfig.put(metricId, metricConfig);
datasetToMetrics.put(metricConfig.getDataset(), metricId);
datasetToMetricExpressions.put(metricConfig.getDataset(), ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfig));
}
Multimap<String, MetricSummary> metricAliasToMetricSummariesMap = ArrayListMultimap.create();
// Create query request for each collection
for (String dataset : datasetToMetrics.keySet()) {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(dataset);
request.setMetricExpressions(new ArrayList<>(datasetToMetricExpressions.get(dataset)));
// user and server's timezone, including daylight saving time.
for (String timeRangeLabel : timeRangeLabels) {
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
TimeRange timeRange = getTimeRangeFromLabel(dataset, timeZoneForCollection, timeRangeLabel);
long currentEnd = timeRange.getEnd();
long currentStart = timeRange.getStart();
System.out.println(timeRangeLabel + "Current start end " + new DateTime(currentStart) + " " + new DateTime(currentEnd));
TimeGranularity timeGranularity = new TimeGranularity(1, TimeUnit.HOURS);
request.setBaselineStart(new DateTime(currentStart, timeZoneForCollection).minusDays(7));
request.setBaselineEnd(new DateTime(currentEnd, timeZoneForCollection).minusDays(7));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(timeGranularity);
TabularViewHandler handler = new TabularViewHandler(queryCache);
try {
TabularViewResponse tabularViewResponse = handler.process(request);
for (String metric : tabularViewResponse.getMetrics()) {
MetricDataset metricDataset = new MetricDataset(metric, dataset);
MetricConfigDTO metricConfig = CACHE_REGISTRY_INSTANCE.getMetricConfigCache().get(metricDataset);
Long metricId = metricConfig.getId();
String metricAlias = metricConfig.getAlias();
GenericResponse response = tabularViewResponse.getData().get(metric);
MetricSummary metricSummary = new MetricSummary();
metricSummary.setMetricId(metricId);
metricSummary.setMetricName(metricConfig.getName());
metricSummary.setMetricAlias(metricAlias);
List<String[]> data = response.getResponseData();
double baselineValue = 0;
double currentValue = 0;
for (String[] responseData : data) {
baselineValue = baselineValue + Double.valueOf(responseData[0]);
currentValue = currentValue + Double.valueOf(responseData[1]);
}
double percentageChange = (currentValue - baselineValue) * 100 / baselineValue;
metricSummary.setBaselineValue(baselineValue);
metricSummary.setCurrentValue(currentValue);
metricSummary.setWowPercentageChange(percentageChange);
metricAliasToMetricSummariesMap.put(metricAlias, metricSummary);
}
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
}
}
}
wowSummary.setMetricAliasToMetricSummariesMap(metricAliasToMetricSummariesMap);
return wowSummary;
}
Aggregations