use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testTopNCachingEmptyResults.
@Test
@SuppressWarnings("unchecked")
public void testTopNCachingEmptyResults() {
final TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(DATA_SOURCE).dimension(TOP_DIM).metric("imps").threshold(3).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TopNQueryQueryToolChest(new TopNQueryConfig()));
testQueryCaching(runner, builder.randomQueryId().build(), Intervals.of("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), Intervals.of("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983), Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983));
TopNQuery query = builder.intervals("2011-01-01/2011-01-10").metric("imps").aggregators(RENAMED_AGGS).postAggregators(DIFF_ORDER_POST_AGGS).randomQueryId().build();
TestHelper.assertExpectedResults(makeRenamedTopNResults(DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983), runner.run(QueryPlus.wrap(query)));
}
use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testHashBasedPruningQueryContextEnabledWithPartitionFunctionAndPartitionDimensionsDoSegmentPruning.
@Test
public void testHashBasedPruningQueryContextEnabledWithPartitionFunctionAndPartitionDimensionsDoSegmentPruning() {
DimFilter filter = new AndDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC), // Equivalent filter of dim3 below is InDimFilter("dim3", Arrays.asList("c"), null)
new AndDimFilter(new InDimFilter("dim3", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim3", "aaa", "ddd", false, false, false, null, StringComparators.LEXICOGRAPHIC)));
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).context(CONTEXT).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).randomQueryId();
TimeseriesQuery query = builder.build();
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
final DruidServer lastServer = servers[random.nextInt(servers.length)];
List<String> partitionDimensions1 = ImmutableList.of("dim1");
ServerSelector selector1 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 0, 6);
ServerSelector selector2 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 1, 6);
ServerSelector selector3 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 2, 6);
ServerSelector selector4 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 3, 6);
ServerSelector selector5 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 4, 6);
ServerSelector selector6 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 5, 6);
List<String> partitionDimensions2 = ImmutableList.of("dim2");
ServerSelector selector7 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 0, 3);
ServerSelector selector8 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 1, 3);
ServerSelector selector9 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 2, 3);
List<String> partitionDimensions3 = ImmutableList.of("dim1", "dim3");
ServerSelector selector10 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 0, 4);
ServerSelector selector11 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 1, 4);
ServerSelector selector12 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 2, 4);
ServerSelector selector13 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 3, 4);
timeline.add(interval1, "v", new NumberedPartitionChunk<>(0, 6, selector1));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(1, 6, selector2));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(2, 6, selector3));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(3, 6, selector4));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(4, 6, selector5));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(5, 6, selector6));
timeline.add(interval2, "v", new NumberedPartitionChunk<>(0, 3, selector7));
timeline.add(interval2, "v", new NumberedPartitionChunk<>(1, 3, selector8));
timeline.add(interval2, "v", new NumberedPartitionChunk<>(2, 3, selector9));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(0, 3, selector10));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(1, 3, selector11));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(2, 3, selector12));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(2, 3, selector13));
final Capture<QueryPlus> capture = Capture.newInstance();
final Capture<ResponseContext> contextCap = Capture.newInstance();
QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
EasyMock.replay(serverView);
EasyMock.replay(mockRunner);
List<SegmentDescriptor> expcetedDescriptors = new ArrayList<>();
// Narrow down to 1 chunk
expcetedDescriptors.add(new SegmentDescriptor(interval1, "v", 3));
// Can't filter out any chunks
expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 0));
expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 1));
expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 2));
// Narrow down to 1 chunk
expcetedDescriptors.add(new SegmentDescriptor(interval3, "v", 2));
MultipleSpecificSegmentSpec expected = new MultipleSpecificSegmentSpec(expcetedDescriptors);
runner.run(QueryPlus.wrap(query)).toList();
Assert.assertEquals(expected, ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec());
}
use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testGroupByCachingRenamedAggs.
@Test
public void testGroupByCachingRenamedAggs() {
GroupByQuery.Builder builder = new GroupByQuery.Builder().setDataSource(DATA_SOURCE).setQuerySegmentSpec(SEG_SPEC).setDimFilter(DIM_FILTER).setGranularity(GRANULARITY).setDimensions(new DefaultDimensionSpec("a", "output")).setAggregatorSpecs(AGGS).setContext(CONTEXT);
final GroupByQuery query1 = builder.randomQueryId().build();
testQueryCaching(getDefaultQueryRunner(), query1, Intervals.of("2011-01-01/2011-01-02"), makeGroupByResults(query1, DateTimes.of("2011-01-01"), ImmutableMap.of("output", "a", "rows", 1, "imps", 1, "impers", 1)), Intervals.of("2011-01-02/2011-01-03"), makeGroupByResults(query1, DateTimes.of("2011-01-02"), ImmutableMap.of("output", "b", "rows", 2, "imps", 2, "impers", 2)), Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults(query1, DateTimes.of("2011-01-05"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), DateTimes.of("2011-01-06"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), DateTimes.of("2011-01-07"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), DateTimes.of("2011-01-08"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), DateTimes.of("2011-01-09"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)), Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults(query1, DateTimes.of("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), DateTimes.of("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), DateTimes.of("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), DateTimes.of("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), DateTimes.of("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)));
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), WAREHOUSE.getToolChest(query1));
final ResponseContext context = initializeResponseContext();
TestHelper.assertExpectedObjects(makeGroupByResults(query1, DateTimes.of("2011-01-05T"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), DateTimes.of("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), DateTimes.of("2011-01-06T"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), DateTimes.of("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), DateTimes.of("2011-01-07T"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), DateTimes.of("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), DateTimes.of("2011-01-08T"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), DateTimes.of("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), DateTimes.of("2011-01-09T"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7), DateTimes.of("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)), runner.run(QueryPlus.wrap(builder.randomQueryId().setInterval("2011-01-05/2011-01-10").build()), context), "");
final GroupByQuery query2 = builder.setInterval("2011-01-05/2011-01-10").setDimensions(new DefaultDimensionSpec("a", "output2")).setAggregatorSpecs(RENAMED_AGGS).randomQueryId().build();
TestHelper.assertExpectedObjects(makeGroupByResults(query2, DateTimes.of("2011-01-05T"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), DateTimes.of("2011-01-05T01"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), DateTimes.of("2011-01-06T"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), DateTimes.of("2011-01-06T01"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), DateTimes.of("2011-01-07T"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), DateTimes.of("2011-01-07T01"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), DateTimes.of("2011-01-08T"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), DateTimes.of("2011-01-08T01"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), DateTimes.of("2011-01-09T"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7), DateTimes.of("2011-01-09T01"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7)), runner.run(QueryPlus.wrap(query2), context), "renamed aggregators test");
}
use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testTopNCachingTimeZone.
@Test
@SuppressWarnings("unchecked")
public void testTopNCachingTimeZone() {
final TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(DATA_SOURCE).dimension(TOP_DIM).metric("imps").threshold(3).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(PT1H_TZ_GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TopNQueryQueryToolChest(new TopNQueryConfig()));
testQueryCaching(runner, builder.randomQueryId().build(), Intervals.of("2011-11-04/2011-11-08"), makeTopNResultsWithoutRename(new DateTime("2011-11-04", TIMEZONE), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-11-05", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-11-06", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-11-07", TIMEZONE), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986));
TopNQuery query = builder.intervals("2011-11-04/2011-11-08").metric("imps").aggregators(RENAMED_AGGS).postAggregators(DIFF_ORDER_POST_AGGS).randomQueryId().build();
TestHelper.assertExpectedResults(makeRenamedTopNResults(new DateTime("2011-11-04", TIMEZONE), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-11-05", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-11-06", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-11-07", TIMEZONE), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986), runner.run(QueryPlus.wrap(query)));
}
use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testDisableUseCache.
@Test
public void testDisableUseCache() {
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
testQueryCaching(runner, 1, true, builder.context(ImmutableMap.of("useCache", "false", "populateCache", "true")).randomQueryId().build(), Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000));
Assert.assertEquals(1, cache.getStats().getNumEntries());
Assert.assertEquals(0, cache.getStats().getNumHits());
Assert.assertEquals(0, cache.getStats().getNumMisses());
cache.close(SegmentId.dummy("0_0").toString());
testQueryCaching(runner, 1, false, builder.context(ImmutableMap.of("useCache", "false", "populateCache", "false")).randomQueryId().build(), Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000));
Assert.assertEquals(0, cache.getStats().getNumEntries());
Assert.assertEquals(0, cache.getStats().getNumHits());
Assert.assertEquals(0, cache.getStats().getNumMisses());
testQueryCaching(getDefaultQueryRunner(), 1, false, builder.context(ImmutableMap.of("useCache", "true", "populateCache", "false")).randomQueryId().build(), Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000));
Assert.assertEquals(0, cache.getStats().getNumEntries());
Assert.assertEquals(0, cache.getStats().getNumHits());
Assert.assertEquals(1, cache.getStats().getNumMisses());
}
Aggregations