use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class DataCompletenessTaskUtilsTest method testGetDateTimeFormatterForDataset.
@Test
public void testGetDateTimeFormatterForDataset() {
DateTimeZone zone = DateTimeZone.UTC;
long dateTimeInMS = new DateTime(2017, 01, 12, 15, 30, zone).getMillis();
String columnName = "Date";
// DAYS bucket
TimeGranularity timeGranularity = new TimeGranularity(1, TimeUnit.DAYS);
String timeFormat = TimeSpec.SINCE_EPOCH_FORMAT;
TimeSpec timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
DateTimeFormatter dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS), "20170112");
zone = DateTimeZone.forID("America/Los_Angeles");
long dateTimeInMS1 = new DateTime(2017, 01, 12, 05, 30, zone).getMillis();
// DAYS bucket
timeGranularity = new TimeGranularity(1, TimeUnit.DAYS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS1), "20170112");
// HOURS bucket
zone = DateTimeZone.UTC;
dateTimeInMS = new DateTime(2017, 01, 12, 15, 30, zone).getMillis();
timeGranularity = new TimeGranularity(1, TimeUnit.HOURS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS), "2017011215");
// MINUTES bucket
timeGranularity = new TimeGranularity(1, TimeUnit.MINUTES);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS), "201701121530");
// DEFAULT bucket
timeGranularity = new TimeGranularity(1, TimeUnit.MILLISECONDS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
dateTimeFormatter = DataCompletenessTaskUtils.getDateTimeFormatterForDataset(timeSpec, zone);
Assert.assertEquals(dateTimeFormatter.print(dateTimeInMS), "2017011215");
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class ContributorTest method main.
public static void main(String[] args) throws Exception {
ContributorViewRequest request = new ContributorViewRequest();
String collection = "thirdeyeAbook";
DateTime baselineStart = new DateTime(2016, 3, 23, 00, 00);
List<MetricExpression> metricExpressions = new ArrayList<>();
metricExpressions.add(new MetricExpression("__COUNT", "__COUNT"));
request.setCollection(collection);
request.setBaselineStart(baselineStart);
request.setBaselineEnd(baselineStart.plusDays(1));
request.setCurrentStart(baselineStart.plusDays(7));
request.setCurrentEnd(baselineStart.plusDays(8));
request.setTimeGranularity(new TimeGranularity(1, TimeUnit.HOURS));
request.setMetricExpressions(metricExpressions);
// TODO
PinotThirdEyeClient pinotThirdEyeClient = PinotThirdEyeClient.getDefaultTestClient();
// make
// this
// configurable;
QueryCache queryCache = new QueryCache(pinotThirdEyeClient, Executors.newFixedThreadPool(10));
ContributorViewHandler handler = new ContributorViewHandler(queryCache);
ContributorViewResponse response = handler.process(request);
ObjectMapper mapper = new ObjectMapper();
String jsonResponse = mapper.writeValueAsString(response);
System.out.println(jsonResponse);
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class HeatMapTest method main.
public static void main(String[] args) throws Exception {
HeatMapViewRequest request = new HeatMapViewRequest();
String collection = "thirdeyeAbook";
DateTime baselineStart = new DateTime(2016, 3, 23, 00, 00);
List<MetricExpression> metricExpressions = new ArrayList<>();
metricExpressions.add(new MetricExpression("__COUNT", "__COUNT"));
request.setCollection(collection);
request.setBaselineStart(baselineStart);
request.setBaselineEnd(baselineStart.plusHours(1));
request.setCurrentStart(baselineStart.plusDays(7));
request.setCurrentEnd(baselineStart.plusDays(7).plusHours(1));
request.setTimeGranularity(new TimeGranularity(1, TimeUnit.HOURS));
request.setMetricExpressions(metricExpressions);
// TODO
PinotThirdEyeClient pinotThirdEyeClient = PinotThirdEyeClient.getDefaultTestClient();
// make
// this
// configurable;
QueryCache queryCache = new QueryCache(pinotThirdEyeClient, Executors.newFixedThreadPool(10));
HeatMapViewHandler handler = new HeatMapViewHandler(queryCache);
HeatMapViewResponse response = handler.process(request);
ObjectMapper mapper = new ObjectMapper();
String jsonResponse = mapper.writeValueAsString(response);
System.out.println(jsonResponse);
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class AnomaliesResource method getAnomalyDetails.
/**
* Generates Anomaly Details for each merged anomaly
* @param mergedAnomaly
* @param datasetConfig
* @param timeSeriesDateFormatter
* @param startEndDateFormatterHours
* @param startEndDateFormatterDays
* @param externalUrl
* @return
*/
private AnomalyDetails getAnomalyDetails(MergedAnomalyResultDTO mergedAnomaly, DatasetConfigDTO datasetConfig, DateTimeFormatter timeSeriesDateFormatter, DateTimeFormatter startEndDateFormatterHours, DateTimeFormatter startEndDateFormatterDays, String externalUrl) throws Exception {
String dataset = datasetConfig.getDataset();
String metricName = mergedAnomaly.getMetric();
AnomalyFunctionDTO anomalyFunctionSpec = anomalyFunctionDAO.findById(mergedAnomaly.getFunctionId());
BaseAnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
String aggGranularity = constructAggGranularity(datasetConfig);
long anomalyStartTime = mergedAnomaly.getStartTime();
long anomalyEndTime = mergedAnomaly.getEndTime();
TimeRange range = getTimeseriesOffsetedTimes(anomalyStartTime, anomalyEndTime, datasetConfig);
long currentStartTime = range.getStart();
long currentEndTime = range.getEnd();
DimensionMap dimensions = mergedAnomaly.getDimensions();
TimeGranularity timeGranularity = Utils.getAggregationTimeGranularity(aggGranularity, anomalyFunctionSpec.getCollection());
long bucketMillis = timeGranularity.toMillis();
AnomalyDetails anomalyDetails = null;
try {
AnomalyDetectionInputContext adInputContext = TimeBasedAnomalyMerger.fetchDataByDimension(currentStartTime, currentEndTime, dimensions, anomalyFunction, mergedAnomalyResultDAO, overrideConfigDAO, true);
MetricTimeSeries metricTimeSeries = adInputContext.getDimensionKeyMetricTimeSeriesMap().get(dimensions);
// Transform time series with scaling factor
List<ScalingFactor> scalingFactors = adInputContext.getScalingFactors();
if (CollectionUtils.isNotEmpty(scalingFactors)) {
Properties properties = anomalyFunction.getProperties();
MetricTransfer.rescaleMetric(metricTimeSeries, currentStartTime, scalingFactors, anomalyFunctionSpec.getTopicMetric(), properties);
}
List<MergedAnomalyResultDTO> knownAnomalies = adInputContext.getKnownMergedAnomalies().get(dimensions);
// Known anomalies are ignored (the null parameter) because 1. we can reduce users' waiting time and 2. presentation
// data does not need to be as accurate as the one used for detecting anomalies
AnomalyTimelinesView anomalyTimelinesView = anomalyFunction.getTimeSeriesView(metricTimeSeries, bucketMillis, anomalyFunctionSpec.getTopicMetric(), currentStartTime, currentEndTime, knownAnomalies);
anomalyDetails = constructAnomalyDetails(metricName, dataset, datasetConfig, mergedAnomaly, anomalyFunctionSpec, currentStartTime, currentEndTime, anomalyTimelinesView, timeSeriesDateFormatter, startEndDateFormatterHours, startEndDateFormatterDays, externalUrl);
} catch (Exception e) {
LOG.error("Exception in constructing anomaly wrapper for anomaly {}", mergedAnomaly.getId(), e);
}
return anomalyDetails;
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class DataResource method getWowSummary.
@GET
@Path("dashboard/wowsummary")
public WowSummary getWowSummary(@QueryParam("dashboard") String dashboard, @QueryParam("timeRanges") String timeRanges) {
WowSummary wowSummary = new WowSummary();
if (StringUtils.isBlank(dashboard)) {
return wowSummary;
}
List<Long> metricIds = getMetricIdsByDashboard(dashboard);
List<String> timeRangeLabels = Lists.newArrayList(timeRanges.split(","));
// Sort metric's id and metric expression by collections
Multimap<String, Long> datasetToMetrics = ArrayListMultimap.create();
Multimap<String, MetricExpression> datasetToMetricExpressions = ArrayListMultimap.create();
Map<Long, MetricConfigDTO> metricIdToMetricConfig = new HashMap<>();
for (long metricId : metricIds) {
MetricConfigDTO metricConfig = metricConfigDAO.findById(metricId);
metricIdToMetricConfig.put(metricId, metricConfig);
datasetToMetrics.put(metricConfig.getDataset(), metricId);
datasetToMetricExpressions.put(metricConfig.getDataset(), ThirdEyeUtils.getMetricExpressionFromMetricConfig(metricConfig));
}
Multimap<String, MetricSummary> metricAliasToMetricSummariesMap = ArrayListMultimap.create();
// Create query request for each collection
for (String dataset : datasetToMetrics.keySet()) {
TabularViewRequest request = new TabularViewRequest();
request.setCollection(dataset);
request.setMetricExpressions(new ArrayList<>(datasetToMetricExpressions.get(dataset)));
// user and server's timezone, including daylight saving time.
for (String timeRangeLabel : timeRangeLabels) {
DateTimeZone timeZoneForCollection = Utils.getDataTimeZone(dataset);
TimeRange timeRange = getTimeRangeFromLabel(dataset, timeZoneForCollection, timeRangeLabel);
long currentEnd = timeRange.getEnd();
long currentStart = timeRange.getStart();
System.out.println(timeRangeLabel + "Current start end " + new DateTime(currentStart) + " " + new DateTime(currentEnd));
TimeGranularity timeGranularity = new TimeGranularity(1, TimeUnit.HOURS);
request.setBaselineStart(new DateTime(currentStart, timeZoneForCollection).minusDays(7));
request.setBaselineEnd(new DateTime(currentEnd, timeZoneForCollection).minusDays(7));
request.setCurrentStart(new DateTime(currentStart, timeZoneForCollection));
request.setCurrentEnd(new DateTime(currentEnd, timeZoneForCollection));
request.setTimeGranularity(timeGranularity);
TabularViewHandler handler = new TabularViewHandler(queryCache);
try {
TabularViewResponse tabularViewResponse = handler.process(request);
for (String metric : tabularViewResponse.getMetrics()) {
MetricDataset metricDataset = new MetricDataset(metric, dataset);
MetricConfigDTO metricConfig = CACHE_REGISTRY_INSTANCE.getMetricConfigCache().get(metricDataset);
Long metricId = metricConfig.getId();
String metricAlias = metricConfig.getAlias();
GenericResponse response = tabularViewResponse.getData().get(metric);
MetricSummary metricSummary = new MetricSummary();
metricSummary.setMetricId(metricId);
metricSummary.setMetricName(metricConfig.getName());
metricSummary.setMetricAlias(metricAlias);
List<String[]> data = response.getResponseData();
double baselineValue = 0;
double currentValue = 0;
for (String[] responseData : data) {
baselineValue = baselineValue + Double.valueOf(responseData[0]);
currentValue = currentValue + Double.valueOf(responseData[1]);
}
double percentageChange = (currentValue - baselineValue) * 100 / baselineValue;
metricSummary.setBaselineValue(baselineValue);
metricSummary.setCurrentValue(currentValue);
metricSummary.setWowPercentageChange(percentageChange);
metricAliasToMetricSummariesMap.put(metricAlias, metricSummary);
}
} catch (Exception e) {
LOG.error("Exception while processing /data/tabular call", e);
}
}
}
wowSummary.setMetricAliasToMetricSummariesMap(metricAliasToMetricSummariesMap);
return wowSummary;
}
Aggregations