Search in sources :

Example 71 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class GroupByLimitPushDownInsufficientBufferTest method testPartialLimitPushDownMerge.

@Test
public void testPartialLimitPushDownMerge() {
    // one segment's results use limit push down, the other doesn't because of insufficient buffer capacity
    QueryToolChest<ResultRow, GroupByQuery> toolChest = groupByFactory.getToolchest();
    QueryRunner<ResultRow> theRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(groupByFactory.mergeRunners(executorService, getRunner1())), (QueryToolChest) toolChest);
    QueryRunner<ResultRow> theRunner2 = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(tooSmallGroupByFactory.mergeRunners(executorService, getRunner2())), (QueryToolChest) toolChest);
    QueryRunner<ResultRow> theRunner3 = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(new QueryRunner<ResultRow>() {

        @Override
        public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
            return Sequences.simple(ImmutableList.of(theRunner.run(queryPlus, responseContext), theRunner2.run(queryPlus, responseContext))).flatMerge(Function.identity(), queryPlus.getQuery().getResultOrdering());
        }
    }), (QueryToolChest) toolChest);
    QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(0, 1000000)));
    GroupByQuery query = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimA", null)).setAggregatorSpecs(new LongSumAggregatorFactory("metA", "metA")).setLimitSpec(new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("dimA", OrderByColumnSpec.Direction.DESCENDING)), 3)).setGranularity(Granularities.ALL).build();
    Sequence<ResultRow> queryResult = theRunner3.run(QueryPlus.wrap(query), ResponseContext.createEmpty());
    List<ResultRow> results = queryResult.toList();
    ResultRow expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "dimA", "zortaxx", "metA", 999L);
    ResultRow expectedRow1 = GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "dimA", "zebra", "metA", 180L);
    ResultRow expectedRow2 = GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "dimA", "world", "metA", 150L);
    Assert.assertEquals(3, results.size());
    Assert.assertEquals(expectedRow0, results.get(0));
    Assert.assertEquals(expectedRow1, results.get(1));
    Assert.assertEquals(expectedRow2, results.get(2));
}
Also used : DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) OrderByColumnSpec(org.apache.druid.query.groupby.orderby.OrderByColumnSpec) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) ResponseContext(org.apache.druid.query.context.ResponseContext) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) QueryPlus(org.apache.druid.query.QueryPlus) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 72 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class GroupByMultiSegmentTest method testHavingAndNoLimitPushDown.

@Test
public void testHavingAndNoLimitPushDown() {
    QueryToolChest<ResultRow, GroupByQuery> toolChest = groupByFactory.getToolchest();
    QueryRunner<ResultRow> theRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(groupByFactory.mergeRunners(executorService, makeGroupByMultiRunners())), (QueryToolChest) toolChest);
    QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(0, 1000000)));
    GroupByQuery query = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimA", null)).setAggregatorSpecs(new LongSumAggregatorFactory("metA", "metA")).setLimitSpec(new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("dimA", OrderByColumnSpec.Direction.ASCENDING)), 1)).setHavingSpec(new GreaterThanHavingSpec("metA", 110)).setGranularity(Granularities.ALL).build();
    Sequence<ResultRow> queryResult = theRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty());
    List<ResultRow> results = queryResult.toList();
    ResultRow expectedRow = GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "dimA", "world", "metA", 150L);
    Assert.assertEquals(1, results.size());
    Assert.assertEquals(expectedRow, results.get(0));
}
Also used : DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) OrderByColumnSpec(org.apache.druid.query.groupby.orderby.OrderByColumnSpec) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) GreaterThanHavingSpec(org.apache.druid.query.groupby.having.GreaterThanHavingSpec) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) Test(org.junit.Test)

Example 73 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class GroupByQueryQueryToolChestTest method testQueryCacheKeyWithLimitSpec.

@Test
public void testQueryCacheKeyWithLimitSpec() {
    final GroupByQuery query1 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(null, 100)).overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_APPLY_LIMIT_PUSH_DOWN, "true")).build();
    final GroupByQuery query2 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(null, 1000)).build();
    final GroupByQuery queryNoLimit = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
    final CacheStrategy<ResultRow, Object, GroupByQuery> strategy1 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query1);
    final CacheStrategy<ResultRow, Object, GroupByQuery> strategy2 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query2);
    Assert.assertFalse(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
    Assert.assertFalse(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(queryNoLimit)));
}
Also used : DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 74 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class GroupByQueryQueryToolChestTest method testResultsAsArraysDayGran.

@Test
public void testResultsAsArraysDayGran() {
    final GroupByQuery query = new GroupByQuery.Builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setGranularity(Granularities.DAY).setDimensions(new DefaultDimensionSpec("col", "dim")).setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).setAggregatorSpecs(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS).setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.CONSTANT)).build();
    QueryToolChestTestHelper.assertArrayResultsEquals(ImmutableList.of(new Object[] { DateTimes.of("2000-01-01").getMillis(), "foo", 1L, 2L, 3L, 1L }, new Object[] { DateTimes.of("2000-01-02").getMillis(), "bar", 4L, 5L, 6L, 1L }), new GroupByQueryQueryToolChest(null, null).resultsAsArrays(query, Sequences.simple(ImmutableList.of(makeRow(query, "2000-01-01", "dim", "foo", "rows", 1L, "index", 2L, "uniques", 3L, "const", 1L), makeRow(query, "2000-01-02", "dim", "bar", "rows", 4L, "index", 5L, "uniques", 6L, "const", 1L)))));
}
Also used : DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 75 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class GroupByQueryQueryToolChestTest method testQueryCacheKeyWithLimitSpecPushDownUsingContext.

@Test
public void testQueryCacheKeyWithLimitSpecPushDownUsingContext() {
    final GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(null, 100));
    final GroupByQuery query1 = builder.overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_APPLY_LIMIT_PUSH_DOWN, "true")).build();
    final GroupByQuery query2 = builder.overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_APPLY_LIMIT_PUSH_DOWN, "false")).build();
    final CacheStrategy<ResultRow, Object, GroupByQuery> strategy1 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query1);
    final CacheStrategy<ResultRow, Object, GroupByQuery> strategy2 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query2);
    Assert.assertFalse(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
    Assert.assertTrue(Arrays.equals(strategy1.computeResultLevelCacheKey(query1), strategy2.computeResultLevelCacheKey(query2)));
}
Also used : DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)494 Test (org.junit.Test)461 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)261 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)235 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)107 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)106 OrderByColumnSpec (org.apache.druid.query.groupby.orderby.OrderByColumnSpec)104 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)53 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)53 ExpressionVirtualColumn (org.apache.druid.segment.virtual.ExpressionVirtualColumn)53 QueryDataSource (org.apache.druid.query.QueryDataSource)52 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)43 TableDataSource (org.apache.druid.query.TableDataSource)41 Result (org.apache.druid.query.Result)37 ResultRow (org.apache.druid.query.groupby.ResultRow)35 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)33 ArrayList (java.util.ArrayList)32 HashMap (java.util.HashMap)30 TopNQueryBuilder (org.apache.druid.query.topn.TopNQueryBuilder)28 ExtractionDimensionSpec (org.apache.druid.query.dimension.ExtractionDimensionSpec)26