use of org.apache.druid.query.QueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testTimeSeriesWithFilter.
@Test
public void testTimeSeriesWithFilter() {
DimFilter filter = new AndDimFilter(new OrDimFilter(new SelectorDimFilter("dim0", "1", null), new BoundDimFilter("dim0", "222", "333", false, false, false, null, StringComparators.LEXICOGRAPHIC)), new AndDimFilter(new InDimFilter("dim1", Arrays.asList("0", "1", "2", "3", "4"), null), new BoundDimFilter("dim1", "0", "3", false, true, false, null, StringComparators.LEXICOGRAPHIC), new BoundDimFilter("dim1", "1", "9999", true, false, false, null, StringComparators.LEXICOGRAPHIC)));
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(filter).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
/*
For dim0 (2011-01-01/2011-01-05), the combined range is {[1,1], [222,333]}, so segments [-inf,1], [1,2], [2,3], and
[3,4] is needed
For dim1 (2011-01-06/2011-01-10), the combined range for the bound filters is {(1,3)}, combined this with the in
filter result in {[2,2]}, so segments [1,2] and [2,3] is needed
*/
List<Iterable<Result<TimeseriesResultValue>>> expectedResult = Arrays.asList(makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000, DateTimes.of("2011-01-02"), 10, 1252, DateTimes.of("2011-01-03"), 20, 6213, DateTimes.of("2011-01-04"), 30, 743), makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020, DateTimes.of("2011-01-08"), 70, 250));
testQueryCachingWithFilter(runner, 3, builder.randomQueryId().build(), expectedResult, Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-02"), 10, 1252), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-03"), 20, 6213), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-04"), 30, 743), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-05"), 40, 6000), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-06"), 50, 425), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-08"), 70, 250), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-09"), 23, 85312), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-10"), 100, 512));
}
use of org.apache.druid.query.QueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testTopNCachingEmptyResults.
@Test
@SuppressWarnings("unchecked")
public void testTopNCachingEmptyResults() {
final TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(DATA_SOURCE).dimension(TOP_DIM).metric("imps").threshold(3).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TopNQueryQueryToolChest(new TopNQueryConfig()));
testQueryCaching(runner, builder.randomQueryId().build(), Intervals.of("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), Intervals.of("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983), Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983));
TopNQuery query = builder.intervals("2011-01-01/2011-01-10").metric("imps").aggregators(RENAMED_AGGS).postAggregators(DIFF_ORDER_POST_AGGS).randomQueryId().build();
TestHelper.assertExpectedResults(makeRenamedTopNResults(DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983), runner.run(QueryPlus.wrap(query)));
}
use of org.apache.druid.query.QueryRunner in project druid by druid-io.
the class QueryResourceTest method createScheduledQueryResource.
private void createScheduledQueryResource(QueryScheduler scheduler, Collection<CountDownLatch> beforeScheduler, Collection<CountDownLatch> inScheduler) {
QuerySegmentWalker texasRanger = new QuerySegmentWalker() {
@Override
public <T> QueryRunner<T> getQueryRunnerForIntervals(Query<T> query, Iterable<Interval> intervals) {
return (queryPlus, responseContext) -> {
beforeScheduler.forEach(CountDownLatch::countDown);
return scheduler.run(scheduler.prioritizeAndLaneQuery(queryPlus, ImmutableSet.of()), new LazySequence<T>(() -> {
inScheduler.forEach(CountDownLatch::countDown);
try {
// pretend to be a query that is waiting on results
Thread.sleep(500);
} catch (InterruptedException ignored) {
}
// all that waiting for nothing :(
return Sequences.empty();
}));
};
}
@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(Query<T> query, Iterable<SegmentDescriptor> specs) {
return getQueryRunnerForIntervals(null, null);
}
};
queryResource = new QueryResource(new QueryLifecycleFactory(WAREHOUSE, texasRanger, new DefaultGenericQueryMetricsFactory(), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), AuthTestUtils.TEST_AUTHORIZER_MAPPER, Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of()))), jsonMapper, smileMapper, scheduler, new AuthConfig(), null, ResponseContextConfig.newConfig(true), DRUID_NODE);
}
use of org.apache.druid.query.QueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testHashBasedPruningQueryContextEnabledWithPartitionFunctionAndPartitionDimensionsDoSegmentPruning.
@Test
public void testHashBasedPruningQueryContextEnabledWithPartitionFunctionAndPartitionDimensionsDoSegmentPruning() {
DimFilter filter = new AndDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC), // Equivalent filter of dim3 below is InDimFilter("dim3", Arrays.asList("c"), null)
new AndDimFilter(new InDimFilter("dim3", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim3", "aaa", "ddd", false, false, false, null, StringComparators.LEXICOGRAPHIC)));
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).context(CONTEXT).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).randomQueryId();
TimeseriesQuery query = builder.build();
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
final DruidServer lastServer = servers[random.nextInt(servers.length)];
List<String> partitionDimensions1 = ImmutableList.of("dim1");
ServerSelector selector1 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 0, 6);
ServerSelector selector2 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 1, 6);
ServerSelector selector3 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 2, 6);
ServerSelector selector4 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 3, 6);
ServerSelector selector5 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 4, 6);
ServerSelector selector6 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 5, 6);
List<String> partitionDimensions2 = ImmutableList.of("dim2");
ServerSelector selector7 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 0, 3);
ServerSelector selector8 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 1, 3);
ServerSelector selector9 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 2, 3);
List<String> partitionDimensions3 = ImmutableList.of("dim1", "dim3");
ServerSelector selector10 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 0, 4);
ServerSelector selector11 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 1, 4);
ServerSelector selector12 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 2, 4);
ServerSelector selector13 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 3, 4);
timeline.add(interval1, "v", new NumberedPartitionChunk<>(0, 6, selector1));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(1, 6, selector2));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(2, 6, selector3));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(3, 6, selector4));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(4, 6, selector5));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(5, 6, selector6));
timeline.add(interval2, "v", new NumberedPartitionChunk<>(0, 3, selector7));
timeline.add(interval2, "v", new NumberedPartitionChunk<>(1, 3, selector8));
timeline.add(interval2, "v", new NumberedPartitionChunk<>(2, 3, selector9));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(0, 3, selector10));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(1, 3, selector11));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(2, 3, selector12));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(2, 3, selector13));
final Capture<QueryPlus> capture = Capture.newInstance();
final Capture<ResponseContext> contextCap = Capture.newInstance();
QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
EasyMock.replay(serverView);
EasyMock.replay(mockRunner);
List<SegmentDescriptor> expcetedDescriptors = new ArrayList<>();
// Narrow down to 1 chunk
expcetedDescriptors.add(new SegmentDescriptor(interval1, "v", 3));
// Can't filter out any chunks
expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 0));
expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 1));
expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 2));
// Narrow down to 1 chunk
expcetedDescriptors.add(new SegmentDescriptor(interval3, "v", 2));
MultipleSpecificSegmentSpec expected = new MultipleSpecificSegmentSpec(expcetedDescriptors);
runner.run(QueryPlus.wrap(query)).toList();
Assert.assertEquals(expected, ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec());
}
use of org.apache.druid.query.QueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testGroupByCachingRenamedAggs.
@Test
public void testGroupByCachingRenamedAggs() {
GroupByQuery.Builder builder = new GroupByQuery.Builder().setDataSource(DATA_SOURCE).setQuerySegmentSpec(SEG_SPEC).setDimFilter(DIM_FILTER).setGranularity(GRANULARITY).setDimensions(new DefaultDimensionSpec("a", "output")).setAggregatorSpecs(AGGS).setContext(CONTEXT);
final GroupByQuery query1 = builder.randomQueryId().build();
testQueryCaching(getDefaultQueryRunner(), query1, Intervals.of("2011-01-01/2011-01-02"), makeGroupByResults(query1, DateTimes.of("2011-01-01"), ImmutableMap.of("output", "a", "rows", 1, "imps", 1, "impers", 1)), Intervals.of("2011-01-02/2011-01-03"), makeGroupByResults(query1, DateTimes.of("2011-01-02"), ImmutableMap.of("output", "b", "rows", 2, "imps", 2, "impers", 2)), Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults(query1, DateTimes.of("2011-01-05"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), DateTimes.of("2011-01-06"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), DateTimes.of("2011-01-07"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), DateTimes.of("2011-01-08"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), DateTimes.of("2011-01-09"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)), Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults(query1, DateTimes.of("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), DateTimes.of("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), DateTimes.of("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), DateTimes.of("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), DateTimes.of("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)));
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), WAREHOUSE.getToolChest(query1));
final ResponseContext context = initializeResponseContext();
TestHelper.assertExpectedObjects(makeGroupByResults(query1, DateTimes.of("2011-01-05T"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), DateTimes.of("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), DateTimes.of("2011-01-06T"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), DateTimes.of("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), DateTimes.of("2011-01-07T"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), DateTimes.of("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), DateTimes.of("2011-01-08T"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), DateTimes.of("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), DateTimes.of("2011-01-09T"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7), DateTimes.of("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)), runner.run(QueryPlus.wrap(builder.randomQueryId().setInterval("2011-01-05/2011-01-10").build()), context), "");
final GroupByQuery query2 = builder.setInterval("2011-01-05/2011-01-10").setDimensions(new DefaultDimensionSpec("a", "output2")).setAggregatorSpecs(RENAMED_AGGS).randomQueryId().build();
TestHelper.assertExpectedObjects(makeGroupByResults(query2, DateTimes.of("2011-01-05T"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), DateTimes.of("2011-01-05T01"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), DateTimes.of("2011-01-06T"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), DateTimes.of("2011-01-06T01"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), DateTimes.of("2011-01-07T"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), DateTimes.of("2011-01-07T01"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), DateTimes.of("2011-01-08T"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), DateTimes.of("2011-01-08T01"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), DateTimes.of("2011-01-09T"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7), DateTimes.of("2011-01-09T01"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7)), runner.run(QueryPlus.wrap(query2), context), "renamed aggregators test");
}
Aggregations