use of org.apache.druid.query.topn.TopNQuery in project druid by druid-io.
the class CalciteArraysQueryTest method testArrayAggGroupByArrayAggFromSubquery.
@Test
public void testArrayAggGroupByArrayAggFromSubquery() throws Exception {
cannotVectorize();
testQuery("SELECT dim2, arr, COUNT(*) FROM (SELECT dim2, ARRAY_AGG(DISTINCT dim1) as arr FROM foo WHERE dim1 is not null GROUP BY 1 LIMIT 5) GROUP BY 1,2", QUERY_CONTEXT_NO_STRINGIFY_ARRAY, ImmutableList.of(GroupByQuery.builder().setDataSource(new TopNQuery(new TableDataSource(CalciteTests.DATASOURCE1), null, new DefaultDimensionSpec("dim2", "d0", ColumnType.STRING), new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC), 5, querySegmentSpec(Filtration.eternity()), new NotDimFilter(new SelectorDimFilter("dim1", null, null)), Granularities.ALL, aggregators(new ExpressionLambdaAggregatorFactory("a0", ImmutableSet.of("dim1"), "__acc", "ARRAY<STRING>[]", "ARRAY<STRING>[]", true, true, false, "array_set_add(\"__acc\", \"dim1\")", "array_set_add_all(\"__acc\", \"a0\")", null, null, new HumanReadableBytes(1024), ExprMacroTable.nil())), null, QUERY_CONTEXT_NO_STRINGIFY_ARRAY)).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(null).setGranularity(Granularities.ALL).setDimensions(dimensions(new DefaultDimensionSpec("d0", "_d0", ColumnType.STRING), new DefaultDimensionSpec("a0", "_d1", ColumnType.STRING_ARRAY))).setAggregatorSpecs(aggregators(new CountAggregatorFactory("_a0"))).setContext(QUERY_CONTEXT_NO_STRINGIFY_ARRAY).build()), useDefault ? ImmutableList.of(new Object[] { "", ImmutableList.of("10.1", "2", "abc"), 1L }, new Object[] { "a", ImmutableList.of("1"), 1L }, new Object[] { "abc", ImmutableList.of("def"), 1L }) : ImmutableList.of(new Object[] { null, ImmutableList.of("10.1", "abc"), 1L }, new Object[] { "", ImmutableList.of("2"), 1L }, new Object[] { "a", ImmutableList.of("", "1"), 1L }, new Object[] { "abc", ImmutableList.of("def"), 1L }));
}
use of org.apache.druid.query.topn.TopNQuery in project druid by druid-io.
the class CachingClusteredClientTest method testTopNCachingTimeZone.
@Test
@SuppressWarnings("unchecked")
public void testTopNCachingTimeZone() {
final TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(DATA_SOURCE).dimension(TOP_DIM).metric("imps").threshold(3).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(PT1H_TZ_GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TopNQueryQueryToolChest(new TopNQueryConfig()));
testQueryCaching(runner, builder.randomQueryId().build(), Intervals.of("2011-11-04/2011-11-08"), makeTopNResultsWithoutRename(new DateTime("2011-11-04", TIMEZONE), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-11-05", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-11-06", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-11-07", TIMEZONE), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986));
TopNQuery query = builder.intervals("2011-11-04/2011-11-08").metric("imps").aggregators(RENAMED_AGGS).postAggregators(DIFF_ORDER_POST_AGGS).randomQueryId().build();
TestHelper.assertExpectedResults(makeRenamedTopNResults(new DateTime("2011-11-04", TIMEZONE), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-11-05", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-11-06", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-11-07", TIMEZONE), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986), runner.run(QueryPlus.wrap(query)));
}
use of org.apache.druid.query.topn.TopNQuery in project druid by druid-io.
the class CachingClusteredClientTest method testTopNOnPostAggMetricCaching.
@Test
public void testTopNOnPostAggMetricCaching() {
final TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(DATA_SOURCE).dimension(TOP_DIM).metric("avg_imps_per_row_double").threshold(3).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TopNQueryQueryToolChest(new TopNQueryConfig()));
testQueryCaching(runner, builder.randomQueryId().build(), Intervals.of("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), Intervals.of("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983), Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983));
TopNQuery query = builder.intervals("2011-01-01/2011-01-10").metric("avg_imps_per_row_double").aggregators(AGGS).postAggregators(DIFF_ORDER_POST_AGGS).randomQueryId().build();
TestHelper.assertExpectedResults(makeTopNResultsWithoutRename(DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983), runner.run(QueryPlus.wrap(query)));
}
Aggregations