Search in sources :

Example 66 with TimeseriesQueryQueryToolChest

use of org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest in project druid by druid-io.

the class IncrementalIndexTest method testConcurrentAddRead.

@Test(timeout = 60_000L)
public void testConcurrentAddRead() throws InterruptedException, ExecutionException {
    final int dimensionCount = 5;
    final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>(dimensionCount + 1);
    ingestAggregatorFactories.add(new CountAggregatorFactory("rows"));
    for (int i = 0; i < dimensionCount; ++i) {
        ingestAggregatorFactories.add(new LongSumAggregatorFactory(StringUtils.format("sumResult%s", i), StringUtils.format("Dim_%s", i)));
        ingestAggregatorFactories.add(new DoubleSumAggregatorFactory(StringUtils.format("doubleSumResult%s", i), StringUtils.format("Dim_%s", i)));
    }
    final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>(dimensionCount + 1);
    queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
    for (int i = 0; i < dimensionCount; ++i) {
        queryAggregatorFactories.add(new LongSumAggregatorFactory(StringUtils.format("sumResult%s", i), StringUtils.format("sumResult%s", i)));
        queryAggregatorFactories.add(new DoubleSumAggregatorFactory(StringUtils.format("doubleSumResult%s", i), StringUtils.format("doubleSumResult%s", i)));
    }
    final IncrementalIndex index = indexCreator.createIndex((Object) ingestAggregatorFactories.toArray(new AggregatorFactory[0]));
    final int concurrentThreads = 2;
    final int elementsPerThread = 10_000;
    final ListeningExecutorService indexExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("index-executor-%d").setPriority(Thread.MIN_PRIORITY).build()));
    final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("query-executor-%d").build()));
    final long timestamp = System.currentTimeMillis();
    final Interval queryInterval = Intervals.of("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z");
    final List<ListenableFuture<?>> indexFutures = Lists.newArrayListWithExpectedSize(concurrentThreads);
    final List<ListenableFuture<?>> queryFutures = Lists.newArrayListWithExpectedSize(concurrentThreads);
    final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
    final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    final AtomicInteger currentlyRunning = new AtomicInteger(0);
    final AtomicInteger concurrentlyRan = new AtomicInteger(0);
    final AtomicInteger someoneRan = new AtomicInteger(0);
    final CountDownLatch startLatch = new CountDownLatch(1);
    final CountDownLatch readyLatch = new CountDownLatch(concurrentThreads * 2);
    final AtomicInteger queriesAccumualted = new AtomicInteger(0);
    for (int j = 0; j < concurrentThreads; j++) {
        indexFutures.add(indexExecutor.submit(new Runnable() {

            @Override
            public void run() {
                readyLatch.countDown();
                try {
                    startLatch.await();
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    throw new RuntimeException(e);
                }
                currentlyRunning.incrementAndGet();
                try {
                    for (int i = 0; i < elementsPerThread; i++) {
                        index.add(getLongRow(timestamp + i, dimensionCount));
                        someoneRan.incrementAndGet();
                    }
                } catch (IndexSizeExceededException e) {
                    throw new RuntimeException(e);
                }
                currentlyRunning.decrementAndGet();
            }
        }));
        final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
        queryFutures.add(queryExecutor.submit(new Runnable() {

            @Override
            public void run() {
                readyLatch.countDown();
                try {
                    startLatch.await();
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    throw new RuntimeException(e);
                }
                while (concurrentlyRan.get() == 0) {
                    QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
                    Sequence<Result<TimeseriesResultValue>> sequence = runner.run(QueryPlus.wrap(query));
                    Double[] results = sequence.accumulate(new Double[0], new Accumulator<Double[], Result<TimeseriesResultValue>>() {

                        @Override
                        public Double[] accumulate(Double[] accumulated, Result<TimeseriesResultValue> in) {
                            if (currentlyRunning.get() > 0) {
                                concurrentlyRan.incrementAndGet();
                            }
                            queriesAccumualted.incrementAndGet();
                            return Lists.asList(in.getValue().getDoubleMetric("doubleSumResult0"), accumulated).toArray(new Double[0]);
                        }
                    });
                    for (Double result : results) {
                        final Integer maxValueExpected = someoneRan.get() + concurrentThreads;
                        if (maxValueExpected > 0) {
                            // Eventually consistent, but should be somewhere in that range
                            // Actual result is validated after all writes are guaranteed done.
                            Assert.assertTrue(StringUtils.format("%d >= %g >= 0 violated", maxValueExpected, result), result >= 0 && result <= maxValueExpected);
                        }
                    }
                }
            }
        }));
    }
    readyLatch.await();
    startLatch.countDown();
    List<ListenableFuture<?>> allFutures = new ArrayList<>(queryFutures.size() + indexFutures.size());
    allFutures.addAll(queryFutures);
    allFutures.addAll(indexFutures);
    Futures.allAsList(allFutures).get();
    Assert.assertTrue("Queries ran too fast", queriesAccumualted.get() > 0);
    Assert.assertTrue("Did not hit concurrency, please try again", concurrentlyRan.get() > 0);
    queryExecutor.shutdown();
    indexExecutor.shutdown();
    QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
    List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
    boolean isRollup = index.isRollup();
    for (Result<TimeseriesResultValue> result : results) {
        Assert.assertEquals(elementsPerThread * (isRollup ? 1 : concurrentThreads), result.getValue().getLongMetric("rows").intValue());
        for (int i = 0; i < dimensionCount; ++i) {
            Assert.assertEquals(StringUtils.format("Failed long sum on dimension %d", i), elementsPerThread * concurrentThreads, result.getValue().getLongMetric(StringUtils.format("sumResult%s", i)).intValue());
            Assert.assertEquals(StringUtils.format("Failed double sum on dimension %d", i), elementsPerThread * concurrentThreads, result.getValue().getDoubleMetric(StringUtils.format("doubleSumResult%s", i)).intValue());
        }
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) Segment(org.apache.druid.segment.Segment) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) CountDownLatch(java.util.concurrent.CountDownLatch) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) Interval(org.joda.time.Interval) IndexSizeExceededException(org.apache.druid.segment.incremental.IndexSizeExceededException) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 67 with TimeseriesQueryQueryToolChest

use of org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest in project druid by druid-io.

the class IncrementalIndexTest method testSingleThreadedIndexingAndQuery.

@Test
public void testSingleThreadedIndexingAndQuery() throws Exception {
    final int dimensionCount = 5;
    final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>();
    ingestAggregatorFactories.add(new CountAggregatorFactory("rows"));
    for (int i = 0; i < dimensionCount; ++i) {
        ingestAggregatorFactories.add(new LongSumAggregatorFactory(StringUtils.format("sumResult%s", i), StringUtils.format("Dim_%s", i)));
        ingestAggregatorFactories.add(new DoubleSumAggregatorFactory(StringUtils.format("doubleSumResult%s", i), StringUtils.format("Dim_%s", i)));
    }
    final IncrementalIndex index = indexCreator.createIndex((Object) ingestAggregatorFactories.toArray(new AggregatorFactory[0]));
    final long timestamp = System.currentTimeMillis();
    final int rows = 50;
    // ingesting same data twice to have some merging happening
    for (int i = 0; i < rows; i++) {
        index.add(getLongRow(timestamp + i, dimensionCount));
    }
    for (int i = 0; i < rows; i++) {
        index.add(getLongRow(timestamp + i, dimensionCount));
    }
    // run a timeseries query on the index and verify results
    final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>();
    queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
    for (int i = 0; i < dimensionCount; ++i) {
        queryAggregatorFactories.add(new LongSumAggregatorFactory(StringUtils.format("sumResult%s", i), StringUtils.format("sumResult%s", i)));
        queryAggregatorFactories.add(new DoubleSumAggregatorFactory(StringUtils.format("doubleSumResult%s", i), StringUtils.format("doubleSumResult%s", i)));
    }
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(Intervals.of("2000/2030"))).aggregators(queryAggregatorFactories).build();
    final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
    final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    final QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
    List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
    Result<TimeseriesResultValue> result = Iterables.getOnlyElement(results);
    boolean isRollup = index.isRollup();
    Assert.assertEquals(rows * (isRollup ? 1 : 2), result.getValue().getLongMetric("rows").intValue());
    for (int i = 0; i < dimensionCount; ++i) {
        Assert.assertEquals("Failed long sum on dimension " + i, 2 * rows, result.getValue().getLongMetric("sumResult" + i).intValue());
        Assert.assertEquals("Failed double sum on dimension " + i, 2 * rows, result.getValue().getDoubleMetric("doubleSumResult" + i).intValue());
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) Segment(org.apache.druid.segment.Segment) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 68 with TimeseriesQueryQueryToolChest

use of org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest in project druid by druid-io.

the class CachingClusteredClientTest method testTimeseriesCaching.

@Test
@SuppressWarnings("unchecked")
public void testTimeseriesCaching() {
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
    QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
    testQueryCaching(runner, builder.randomQueryId().build(), Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000), Intervals.of("2011-01-02/2011-01-03"), makeTimeResults(DateTimes.of("2011-01-02"), 30, 6000), Intervals.of("2011-01-04/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-04"), 23, 85312), Intervals.of("2011-01-05/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-05"), 85, 102, DateTimes.of("2011-01-06"), 412, 521, DateTimes.of("2011-01-07"), 122, 21894, DateTimes.of("2011-01-08"), 5, 20, DateTimes.of("2011-01-09"), 18, 521), Intervals.of("2011-01-05/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-05T01"), 80, 100, DateTimes.of("2011-01-06T01"), 420, 520, DateTimes.of("2011-01-07T01"), 12, 2194, DateTimes.of("2011-01-08T01"), 59, 201, DateTimes.of("2011-01-09T01"), 181, 52));
    TimeseriesQuery query = builder.intervals("2011-01-01/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).randomQueryId().build();
    TestHelper.assertExpectedResults(makeRenamedTimeResults(DateTimes.of("2011-01-01"), 50, 5000, DateTimes.of("2011-01-02"), 30, 6000, DateTimes.of("2011-01-04"), 23, 85312, DateTimes.of("2011-01-05"), 85, 102, DateTimes.of("2011-01-05T01"), 80, 100, DateTimes.of("2011-01-06"), 412, 521, DateTimes.of("2011-01-06T01"), 420, 520, DateTimes.of("2011-01-07"), 122, 21894, DateTimes.of("2011-01-07T01"), 12, 2194, DateTimes.of("2011-01-08"), 5, 20, DateTimes.of("2011-01-08T01"), 59, 201, DateTimes.of("2011-01-09"), 18, 521, DateTimes.of("2011-01-09T01"), 181, 52), runner.run(QueryPlus.wrap(query)));
}
Also used : FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Druids(org.apache.druid.query.Druids) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) Test(org.junit.Test)

Example 69 with TimeseriesQueryQueryToolChest

use of org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest in project druid by druid-io.

the class CachingClusteredClientTest method testTimeSeriesWithFilter.

@Test
public void testTimeSeriesWithFilter() {
    DimFilter filter = new AndDimFilter(new OrDimFilter(new SelectorDimFilter("dim0", "1", null), new BoundDimFilter("dim0", "222", "333", false, false, false, null, StringComparators.LEXICOGRAPHIC)), new AndDimFilter(new InDimFilter("dim1", Arrays.asList("0", "1", "2", "3", "4"), null), new BoundDimFilter("dim1", "0", "3", false, true, false, null, StringComparators.LEXICOGRAPHIC), new BoundDimFilter("dim1", "1", "9999", true, false, false, null, StringComparators.LEXICOGRAPHIC)));
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(filter).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
    QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
    /*
    For dim0 (2011-01-01/2011-01-05), the combined range is {[1,1], [222,333]}, so segments [-inf,1], [1,2], [2,3], and
    [3,4] is needed
    For dim1 (2011-01-06/2011-01-10), the combined range for the bound filters is {(1,3)}, combined this with the in
    filter result in {[2,2]}, so segments [1,2] and [2,3] is needed
    */
    List<Iterable<Result<TimeseriesResultValue>>> expectedResult = Arrays.asList(makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000, DateTimes.of("2011-01-02"), 10, 1252, DateTimes.of("2011-01-03"), 20, 6213, DateTimes.of("2011-01-04"), 30, 743), makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020, DateTimes.of("2011-01-08"), 70, 250));
    testQueryCachingWithFilter(runner, 3, builder.randomQueryId().build(), expectedResult, Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-02"), 10, 1252), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-03"), 20, 6213), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-04"), 30, 743), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-05"), 40, 6000), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-06"), 50, 425), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-08"), 70, 250), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-09"), 23, 85312), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-10"), 100, 512));
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) MergeIterable(org.apache.druid.java.util.common.guava.MergeIterable) FunctionalIterable(org.apache.druid.java.util.common.guava.FunctionalIterable) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Druids(org.apache.druid.query.Druids) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) InDimFilter(org.apache.druid.query.filter.InDimFilter) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) DimFilter(org.apache.druid.query.filter.DimFilter) InDimFilter(org.apache.druid.query.filter.InDimFilter) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) Test(org.junit.Test)

Example 70 with TimeseriesQueryQueryToolChest

use of org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest in project druid by druid-io.

the class CachingClusteredClientTest method testHashBasedPruningQueryContextEnabledWithPartitionFunctionAndPartitionDimensionsDoSegmentPruning.

@Test
public void testHashBasedPruningQueryContextEnabledWithPartitionFunctionAndPartitionDimensionsDoSegmentPruning() {
    DimFilter filter = new AndDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC), // Equivalent filter of dim3 below is InDimFilter("dim3", Arrays.asList("c"), null)
    new AndDimFilter(new InDimFilter("dim3", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim3", "aaa", "ddd", false, false, false, null, StringComparators.LEXICOGRAPHIC)));
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).context(CONTEXT).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).randomQueryId();
    TimeseriesQuery query = builder.build();
    QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
    final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
    final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
    final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
    final DruidServer lastServer = servers[random.nextInt(servers.length)];
    List<String> partitionDimensions1 = ImmutableList.of("dim1");
    ServerSelector selector1 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 0, 6);
    ServerSelector selector2 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 1, 6);
    ServerSelector selector3 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 2, 6);
    ServerSelector selector4 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 3, 6);
    ServerSelector selector5 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 4, 6);
    ServerSelector selector6 = makeMockHashBasedSelector(lastServer, partitionDimensions1, HashPartitionFunction.MURMUR3_32_ABS, 5, 6);
    List<String> partitionDimensions2 = ImmutableList.of("dim2");
    ServerSelector selector7 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 0, 3);
    ServerSelector selector8 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 1, 3);
    ServerSelector selector9 = makeMockHashBasedSelector(lastServer, partitionDimensions2, HashPartitionFunction.MURMUR3_32_ABS, 2, 3);
    List<String> partitionDimensions3 = ImmutableList.of("dim1", "dim3");
    ServerSelector selector10 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 0, 4);
    ServerSelector selector11 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 1, 4);
    ServerSelector selector12 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 2, 4);
    ServerSelector selector13 = makeMockHashBasedSelector(lastServer, partitionDimensions3, HashPartitionFunction.MURMUR3_32_ABS, 3, 4);
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(0, 6, selector1));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(1, 6, selector2));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(2, 6, selector3));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(3, 6, selector4));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(4, 6, selector5));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(5, 6, selector6));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(0, 3, selector7));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(1, 3, selector8));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(2, 3, selector9));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(0, 3, selector10));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(1, 3, selector11));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(2, 3, selector12));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(2, 3, selector13));
    final Capture<QueryPlus> capture = Capture.newInstance();
    final Capture<ResponseContext> contextCap = Capture.newInstance();
    QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
    EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
    EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
    EasyMock.replay(serverView);
    EasyMock.replay(mockRunner);
    List<SegmentDescriptor> expcetedDescriptors = new ArrayList<>();
    // Narrow down to 1 chunk
    expcetedDescriptors.add(new SegmentDescriptor(interval1, "v", 3));
    // Can't filter out any chunks
    expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 0));
    expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 1));
    expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", 2));
    // Narrow down to 1 chunk
    expcetedDescriptors.add(new SegmentDescriptor(interval3, "v", 2));
    MultipleSpecificSegmentSpec expected = new MultipleSpecificSegmentSpec(expcetedDescriptors);
    runner.run(QueryPlus.wrap(query)).toList();
    Assert.assertEquals(expected, ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec());
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) ArrayList(java.util.ArrayList) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) ServerSelector(org.apache.druid.client.selector.ServerSelector) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Druids(org.apache.druid.query.Druids) ResponseContext(org.apache.druid.query.context.ResponseContext) InDimFilter(org.apache.druid.query.filter.InDimFilter) QueryPlus(org.apache.druid.query.QueryPlus) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) DimFilter(org.apache.druid.query.filter.DimFilter) InDimFilter(org.apache.druid.query.filter.InDimFilter) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

TimeseriesQueryQueryToolChest (org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest)71 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)62 Test (org.junit.Test)62 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)41 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)21 TimeseriesQueryEngine (org.apache.druid.query.timeseries.TimeseriesQueryEngine)20 TimeseriesQueryRunnerFactory (org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory)20 QueryRunner (org.apache.druid.query.QueryRunner)18 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)18 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)18 FieldAccessPostAggregator (org.apache.druid.query.aggregation.post.FieldAccessPostAggregator)17 TimeseriesResultValue (org.apache.druid.query.timeseries.TimeseriesResultValue)17 Result (org.apache.druid.query.Result)15 FinalizingFieldAccessPostAggregator (org.apache.druid.query.aggregation.post.FinalizingFieldAccessPostAggregator)13 IOException (java.io.IOException)11 Druids (org.apache.druid.query.Druids)10 SpatialDimFilter (org.apache.druid.query.filter.SpatialDimFilter)9 ConstantPostAggregator (org.apache.druid.query.aggregation.post.ConstantPostAggregator)8 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)6 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)6