Search in sources :

Example 36 with QueryPlus

use of org.apache.druid.query.QueryPlus in project druid by druid-io.

the class TimeSeriesUnionQueryRunnerTest method testUnionResultMerging.

@Test
public void testUnionResultMerging() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(new UnionDataSource(Lists.newArrayList(new TableDataSource("ds1"), new TableDataSource("ds2")))).granularity(QueryRunnerTestHelper.DAY_GRAN).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))).descending(descending).build();
    QueryToolChest toolChest = new TimeseriesQueryQueryToolChest();
    final List<Result<TimeseriesResultValue>> ds1 = Lists.newArrayList(new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 1L, "idx", 2L))), new Result<>(DateTimes.of("2011-04-03"), new TimeseriesResultValue(ImmutableMap.of("rows", 3L, "idx", 4L))));
    final List<Result<TimeseriesResultValue>> ds2 = Lists.newArrayList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 5L, "idx", 6L))), new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 7L, "idx", 8L))), new Result<>(DateTimes.of("2011-04-04"), new TimeseriesResultValue(ImmutableMap.of("rows", 9L, "idx", 10L))));
    QueryRunner mergingrunner = toolChest.mergeResults(new UnionQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(QueryPlus<Result<TimeseriesResultValue>> queryPlus, ResponseContext responseContext) {
            if (queryPlus.getQuery().getDataSource().equals(new TableDataSource("ds1"))) {
                return Sequences.simple(descending ? Lists.reverse(ds1) : ds1);
            } else {
                return Sequences.simple(descending ? Lists.reverse(ds2) : ds2);
            }
        }
    }));
    List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 5L, "idx", 6L))), new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 8L, "idx", 10L))), new Result<>(DateTimes.of("2011-04-03"), new TimeseriesResultValue(ImmutableMap.of("rows", 3L, "idx", 4L))), new Result<>(DateTimes.of("2011-04-04"), new TimeseriesResultValue(ImmutableMap.of("rows", 9L, "idx", 10L))));
    Iterable<Result<TimeseriesResultValue>> results = mergingrunner.run(QueryPlus.wrap(query)).toList();
    assertExpectedResults(expectedResults, results);
}
Also used : LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) QueryToolChest(org.apache.druid.query.QueryToolChest) UnionDataSource(org.apache.druid.query.UnionDataSource) UnionQueryRunner(org.apache.druid.query.UnionQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) Result(org.apache.druid.query.Result) TableDataSource(org.apache.druid.query.TableDataSource) ResponseContext(org.apache.druid.query.context.ResponseContext) QueryPlus(org.apache.druid.query.QueryPlus) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 37 with QueryPlus

use of org.apache.druid.query.QueryPlus in project druid by druid-io.

the class CachingClusteredClientTest method testHashBasedPruningQueryContextEnabledWithPartitionFunctionAndPartitionDimensionsDoSegmentPruning.

@Test
public void testHashBasedPruningQueryContextEnabledWithPartitionFunctionAndPartitionDimensionsDoSegmentPruning() {
    DimFilter filter = new AndDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC), // Equivalent filter of dim3 below is InDimFilter("dim3", Arrays.asList("c"), null)
    new AndDimFilter(new InDimFilter("dim3", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim3", "aaa", "ddd", false, false, false, null, StringComparators.LEXICOGRAPHIC)));
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).context(CONTEXT).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).randomQueryId();
    TimeseriesQuery query = builder.build();
    QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
    final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
    final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
    final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
    final DruidServer lastServer = servers[random.nextInt(servers.length)];
    List<String> partitionDimensions1 = ImmutableList.of("dim1");
    ServerSelector selector1 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 0, 6);
    ServerSelector selector2 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 1, 6);
    ServerSelector selector3 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 2, 6);
    ServerSelector selector4 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 3, 6);
    ServerSelector selector5 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 4, 6);
    ServerSelector selector6 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 5, 6);
    List<String> partitionDimensions2 = ImmutableList.of("dim2");
    ServerSelector selector7 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 0, 3);
    ServerSelector selector8 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 1, 3);
    ServerSelector selector9 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 2, 3);
    List<String> partitionDimensions3 = ImmutableList.of("dim1", "dim3");
    ServerSelector selector10 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 0, 4);
    ServerSelector selector11 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 1, 4);
    ServerSelector selector12 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 2, 4);
    ServerSelector selector13 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 3, 4);
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(0, 6, selector1));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(1, 6, selector2));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(2, 6, selector3));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(3, 6, selector4));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(4, 6, selector5));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(5, 6, selector6));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(0, 3, selector7));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(1, 3, selector8));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(2, 3, selector9));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(0, 3, selector10));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(1, 3, selector11));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(2, 3, selector12));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(2, 3, selector13));
    final Capture<QueryPlus> capture = Capture.newInstance();
    final Capture<ResponseContext> contextCap = Capture.newInstance();
    QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
    EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
    EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
    EasyMock.replay(serverView);
    EasyMock.replay(mockRunner);
    List<SegmentDescriptor> expcetedDescriptors = new ArrayList<>();
    // Narrow down to 1 chunk
    expcetedDescriptors.add(new SegmentDescriptor(interval1, "v", 3));
    // Can't filter out any chunks
    expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 0));
    expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 1));
    expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 2));
    // Narrow down to 1 chunk
    expcetedDescriptors.add(new SegmentDescriptor(interval3, "v", 2));
    MultipleSpecificSegmentSpec expected = new MultipleSpecificSegmentSpec(expcetedDescriptors);
    runner.run(QueryPlus.wrap(query)).toList();
    Assert.assertEquals(expected, ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec());
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) ArrayList(java.util.ArrayList) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) ServerSelector(org.apache.druid.client.selector.ServerSelector) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Druids(org.apache.druid.query.Druids) ResponseContext(org.apache.druid.query.context.ResponseContext) InDimFilter(org.apache.druid.query.filter.InDimFilter) QueryPlus(org.apache.druid.query.QueryPlus) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) DimFilter(org.apache.druid.query.filter.DimFilter) InDimFilter(org.apache.druid.query.filter.InDimFilter) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

QueryPlus (org.apache.druid.query.QueryPlus)37 QueryRunner (org.apache.druid.query.QueryRunner)37 ResponseContext (org.apache.druid.query.context.ResponseContext)35 Test (org.junit.Test)25 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)23 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)22 Sequence (org.apache.druid.java.util.common.guava.Sequence)21 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)19 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)16 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)14 ArrayList (java.util.ArrayList)13 MergeSequence (org.apache.druid.java.util.common.guava.MergeSequence)12 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)11 List (java.util.List)10 ChainedExecutionQueryRunner (org.apache.druid.query.ChainedExecutionQueryRunner)10 OrderByColumnSpec (org.apache.druid.query.groupby.orderby.OrderByColumnSpec)10 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)9 QuerySegmentSpec (org.apache.druid.query.spec.QuerySegmentSpec)8 Interval (org.joda.time.Interval)8 ImmutableMap (com.google.common.collect.ImmutableMap)7