use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class TimeOnTimeTest method generateGroupByTimeRequest.
// TABULAR
private static TimeOnTimeComparisonRequest generateGroupByTimeRequest() {
TimeOnTimeComparisonRequest comparisonRequest = new TimeOnTimeComparisonRequest();
String collection = "thirdeyeAbook";
comparisonRequest.setCollectionName(collection);
comparisonRequest.setBaselineStart(new DateTime(2016, 4, 1, 00, 00));
comparisonRequest.setBaselineEnd(new DateTime(2016, 4, 2, 00, 00));
comparisonRequest.setCurrentStart(new DateTime(2016, 4, 8, 00, 00));
comparisonRequest.setCurrentEnd(new DateTime(2016, 4, 9, 00, 00));
List<MetricFunction> metricFunctions = new ArrayList<>();
metricFunctions.add(new MetricFunction(MetricAggFunction.SUM, "__COUNT"));
List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metricFunctions);
metricExpressions.add(new MetricExpression("submit_rate", "submits/impressions"));
comparisonRequest.setMetricExpressions(metricExpressions);
comparisonRequest.setAggregationTimeGranularity(new TimeGranularity(1, TimeUnit.HOURS));
return comparisonRequest;
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class TimeSeriesTest method generateGroupByTimeAndDimension.
private static TimeSeriesRequest generateGroupByTimeAndDimension() {
TimeSeriesRequest timeSeriesRequest = new TimeSeriesRequest();
timeSeriesRequest.setCollectionName(THIRDEYE_ABOOK);
timeSeriesRequest.setStart(START);
timeSeriesRequest.setEnd(START.plusHours(3));
timeSeriesRequest.setGroupByDimensions(ABOOK_DIMENSIONS);
List<MetricFunction> metricFunctions = new ArrayList<>();
metricFunctions.add(DEFAULT_METRIC_FUNCTION);
List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metricFunctions);
timeSeriesRequest.setMetricExpressions(metricExpressions);
timeSeriesRequest.setAggregationTimeGranularity(new TimeGranularity(1, TimeUnit.HOURS));
return timeSeriesRequest;
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class TimeSeriesTest method generateGroupByTimeRequest.
private static TimeSeriesRequest generateGroupByTimeRequest() {
TimeSeriesRequest timeSeriesRequest = new TimeSeriesRequest();
timeSeriesRequest.setCollectionName(THIRDEYE_ABOOK);
timeSeriesRequest.setStart(START);
timeSeriesRequest.setEnd(START.plusDays(1));
List<MetricFunction> metricFunctions = new ArrayList<>();
metricFunctions.add(DEFAULT_METRIC_FUNCTION);
List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metricFunctions);
metricExpressions.add(SUBMIT_RATE_EXPRESSION);
timeSeriesRequest.setMetricExpressions(metricExpressions);
timeSeriesRequest.setAggregationTimeGranularity(new TimeGranularity(1, TimeUnit.HOURS));
return timeSeriesRequest;
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class DataCompletenessTaskUtilsTest method testGetBucketSizeForDataset.
@Test
public void testGetBucketSizeForDataset() throws Exception {
String columnName = "Date";
// DAYS bucket
TimeGranularity timeGranularity = new TimeGranularity(1, TimeUnit.DAYS);
String timeFormat = TimeSpec.SINCE_EPOCH_FORMAT;
TimeSpec timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
long bucketSize = DataCompletenessTaskUtils.getBucketSizeInMSForDataset(timeSpec);
Assert.assertEquals(bucketSize, 24 * 60 * 60_000);
// HOURS bucket
timeGranularity = new TimeGranularity(1, TimeUnit.HOURS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
bucketSize = DataCompletenessTaskUtils.getBucketSizeInMSForDataset(timeSpec);
Assert.assertEquals(bucketSize, 60 * 60_000);
// MINUTES returns 30 MINUTES bucket
timeGranularity = new TimeGranularity(1, TimeUnit.MINUTES);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
bucketSize = DataCompletenessTaskUtils.getBucketSizeInMSForDataset(timeSpec);
Assert.assertEquals(bucketSize, 30 * 60_000);
// DEFAULT bucket
timeGranularity = new TimeGranularity(1, TimeUnit.MILLISECONDS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
bucketSize = DataCompletenessTaskUtils.getBucketSizeInMSForDataset(timeSpec);
Assert.assertEquals(bucketSize, 60 * 60_000);
}
use of com.linkedin.thirdeye.api.TimeGranularity in project pinot by linkedin.
the class DataCompletenessTaskUtilsTest method testGetBucketNameToTimeValuesMap.
@Test
public void testGetBucketNameToTimeValuesMap() {
DateTimeZone zone = DateTimeZone.forID("America/Los_Angeles");
// SDF
String columnName = "Date";
TimeGranularity timeGranularity = new TimeGranularity(1, TimeUnit.DAYS);
String timeFormat = "SIMPLE_DATE_FORMAT:yyyyMMdd";
TimeSpec timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
// DAYS
Map<String, Long> bucketNameToBucketValue = new HashMap<>();
bucketNameToBucketValue.put("20170112", new DateTime(2017, 01, 12, 0, 0, zone).getMillis());
bucketNameToBucketValue.put("20170113", new DateTime(2017, 01, 13, 0, 0, zone).getMillis());
bucketNameToBucketValue.put("20170114", new DateTime(2017, 01, 14, 0, 0, zone).getMillis());
Map<String, Long> expectedValues = new HashMap<>();
expectedValues.put("20170112", 20170112L);
expectedValues.put("20170113", 20170113L);
expectedValues.put("20170114", 20170114L);
ListMultimap<String, Long> bucketNameToTimeValuesMap = DataCompletenessTaskUtils.getBucketNameToTimeValuesMap(timeSpec, bucketNameToBucketValue);
for (Entry<String, Long> entry : bucketNameToTimeValuesMap.entries()) {
String bucketName = entry.getKey();
Assert.assertEquals(entry.getValue(), expectedValues.get(bucketName));
}
// EPOCH
zone = DateTimeZone.UTC;
timeFormat = TimeSpec.SINCE_EPOCH_FORMAT;
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
// HOURS
timeGranularity = new TimeGranularity(1, TimeUnit.HOURS);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
bucketNameToBucketValue = new HashMap<>();
bucketNameToBucketValue.put("2017011200", new DateTime(2017, 01, 12, 0, 0, zone).getMillis());
bucketNameToBucketValue.put("2017011201", new DateTime(2017, 01, 12, 1, 0, zone).getMillis());
bucketNameToBucketValue.put("2017011202", new DateTime(2017, 01, 12, 2, 0, zone).getMillis());
expectedValues = new HashMap<>();
// hours since epoch values
expectedValues.put("2017011200", 412272L);
expectedValues.put("2017011201", 412273L);
expectedValues.put("2017011202", 412274L);
bucketNameToTimeValuesMap = DataCompletenessTaskUtils.getBucketNameToTimeValuesMap(timeSpec, bucketNameToBucketValue);
for (Entry<String, Long> entry : bucketNameToTimeValuesMap.entries()) {
String bucketName = entry.getKey();
Assert.assertEquals(entry.getValue(), expectedValues.get(bucketName));
}
// MINUTES
timeGranularity = new TimeGranularity(10, TimeUnit.MINUTES);
timeSpec = new TimeSpec(columnName, timeGranularity, timeFormat);
bucketNameToBucketValue = new HashMap<>();
bucketNameToBucketValue.put("201701120000", new DateTime(2017, 01, 12, 0, 0, zone).getMillis());
bucketNameToBucketValue.put("201701120030", new DateTime(2017, 01, 12, 0, 30, zone).getMillis());
bucketNameToBucketValue.put("201701120100", new DateTime(2017, 01, 12, 1, 00, zone).getMillis());
bucketNameToBucketValue.put("201701120130", new DateTime(2017, 01, 12, 1, 30, zone).getMillis());
Map<String, List<Long>> expectedValuesList = new HashMap<>();
// 10 minutes since epoch values
expectedValuesList.put("201701120000", Lists.newArrayList(2473632L, 2473633L, 2473634L));
expectedValuesList.put("201701120030", Lists.newArrayList(2473635L, 2473636L, 2473637L));
expectedValuesList.put("201701120100", Lists.newArrayList(2473638L, 2473639L, 2473640L));
expectedValuesList.put("201701120130", Lists.newArrayList(2473641L, 2473642L, 2473643L));
bucketNameToTimeValuesMap = DataCompletenessTaskUtils.getBucketNameToTimeValuesMap(timeSpec, bucketNameToBucketValue);
for (String bucketName : bucketNameToTimeValuesMap.keySet()) {
List<Long> timeValues = bucketNameToTimeValuesMap.get(bucketName);
Collections.sort(timeValues);
Assert.assertEquals(timeValues, expectedValuesList.get(bucketName));
}
}
Aggregations