use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class HllSketchSqlAggregatorTest method testAvgDailyCountDistinctHllSketch.
@Test
public void testAvgDailyCountDistinctHllSketch() throws Exception {
// Can't vectorize due to outer query, which runs on an inline datasource.
cannotVectorize();
final List<Object[]> expectedResults = ImmutableList.of(new Object[] { 1L });
testQuery("SELECT\n" + " AVG(u)\n" + "FROM (" + " SELECT FLOOR(__time TO DAY), APPROX_COUNT_DISTINCT_DS_HLL(cnt) AS u\n" + " FROM druid.foo\n" + " GROUP BY 1\n" + ")", ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(new PeriodGranularity(Period.days(1), null, DateTimeZone.UTC)).aggregators(Collections.singletonList(new HllSketchBuildAggregatorFactory("a0:a", "cnt", null, null, ROUND))).postAggregators(ImmutableList.of(new FinalizingFieldAccessPostAggregator("a0", "a0:a"))).context(QUERY_CONTEXT_DEFAULT).build().withOverriddenContext(BaseCalciteQueryTest.getTimeseriesContextWithFloorTime(ImmutableMap.of(TimeseriesQuery.SKIP_EMPTY_BUCKETS, true, BaseQuery.SQL_QUERY_ID, "dummy"), "d0")))).setInterval(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).setGranularity(Granularities.ALL).setAggregatorSpecs(NullHandling.replaceWithDefault() ? Arrays.asList(new LongSumAggregatorFactory("_a0:sum", "a0"), new CountAggregatorFactory("_a0:count")) : Arrays.asList(new LongSumAggregatorFactory("_a0:sum", "a0"), new FilteredAggregatorFactory(new CountAggregatorFactory("_a0:count"), BaseCalciteQueryTest.not(BaseCalciteQueryTest.selector("a0", null, null))))).setPostAggregatorSpecs(ImmutableList.of(new ArithmeticPostAggregator("_a0", "quotient", ImmutableList.of(new FieldAccessPostAggregator(null, "_a0:sum"), new FieldAccessPostAggregator(null, "_a0:count"))))).setContext(QUERY_CONTEXT_DEFAULT).build()), expectedResults);
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class QueryGranularityTest method testIterableWeek.
@Test
public void testIterableWeek() {
final DateTimeZone tz = DateTimes.inferTzFromString("America/Los_Angeles");
final DateTime baseTime = new DateTime("2012-11-03T10:00:00", tz);
assertSameInterval(Lists.newArrayList(new DateTime("2012-10-29T00:00:00.000-07:00", tz), new DateTime("2012-11-05T00:00:00.000-08:00", tz), new DateTime("2012-11-12T00:00:00.000-08:00", tz), new DateTime("2012-11-19T00:00:00.000-08:00", tz)), new PeriodGranularity(new Period("P1W"), null, tz).getIterable(new Interval(baseTime, baseTime.plus(Weeks.weeks(3)))));
assertSameInterval(Lists.newArrayList(new DateTime("2012-11-03T10:00:00.000-07:00", tz), new DateTime("2012-11-10T10:00:00.000-08:00", tz), new DateTime("2012-11-17T10:00:00.000-08:00", tz)), new PeriodGranularity(new Period("P1W"), baseTime, tz).getIterable(new Interval(baseTime, baseTime.plus(Weeks.weeks(3)))));
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class QueryGranularityTest method testPeriodTruncateDays.
@Test
public void testPeriodTruncateDays() {
final DateTimeZone tz = DateTimes.inferTzFromString("America/Los_Angeles");
final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00");
PeriodGranularity periodOrigin = new PeriodGranularity(new Period("P2D"), origin, tz);
assertSameDateTime(Lists.newArrayList(new DateTime("2011-12-31T05:00:00.000-08:00", tz), new DateTime("2012-01-02T05:00:00.000-08:00", tz), new DateTime("2012-01-04T05:00:00.000-08:00", tz)), Lists.newArrayList(periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), periodOrigin.bucketStart(DateTimes.of("2012-01-04T07:20:04.123-08:00"))));
PeriodGranularity periodNoOrigin = new PeriodGranularity(new Period("P2D"), null, tz);
assertSameDateTime(Lists.newArrayList(new DateTime("2012-01-01T00:00:00.000-08:00", tz), new DateTime("2012-01-01T00:00:00.000-08:00", tz), new DateTime("2012-01-03T00:00:00.000-08:00", tz)), Lists.newArrayList(periodNoOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), periodNoOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), periodNoOrigin.bucketStart(DateTimes.of("2012-01-04T07:20:04.123-08:00"))));
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class QueryGranularityTest method testTruncateDhaka.
@Test
public void testTruncateDhaka() {
final DateTimeZone tz = DateTimeZone.forTimeZone(TimeZone.getTimeZone("Asia/Dhaka"));
final DateTime date = new DateTime("2011-03-15T21:42:23.898+06:00", tz);
final PeriodGranularity year = new PeriodGranularity(new Period("P1Y"), null, tz);
final PeriodGranularity hour = new PeriodGranularity(new Period("PT1H"), null, tz);
final PeriodGranularity twoHour = new PeriodGranularity(new Period("PT2H"), null, tz);
assertBucketStart(year, date, new DateTime("2011-01-01T00:00:00.000+06:00", tz));
assertBucketStart(hour, date, new DateTime("2011-03-15T21:00:00.000+06:00", tz));
assertBucketStart(twoHour, date, new DateTime("2011-03-15T20:00:00.000+06:00", tz));
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class GroupByQueryRunnerTest method testHavingSpec.
@Test
public void testHavingSpec() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.of(new GreaterThanHavingSpec("rows", 2L), new EqualToHavingSpec("idx", 217L))));
final GroupByQuery fullQuery = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L));
TestHelper.assertExpectedObjects(expectedResults, GroupByQueryRunnerTestHelper.runQuery(factory, runner, fullQuery), "havingspec");
}
Aggregations