use of org.apache.druid.query.QueryPlus in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergedHavingSpec.
@Test
public void testMergedHavingSpec() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.of(new GreaterThanHavingSpec("rows", 2L), new EqualToHavingSpec("idx", 217L))));
GroupByQuery fullQuery = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L));
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
// simulate two daily segments
final QueryPlus queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
final QueryPlus queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
return new MergeSequence(queryPlus.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext))));
}
});
TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(QueryPlus.wrap(fullQuery)), "merged");
}
use of org.apache.druid.query.QueryPlus in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergeResultsWithLimitPushDownSortByAggWithOffset.
@Test
public void testMergeResultsWithLimitPushDownSortByAggWithOffset() {
if (!config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V2)) {
return;
}
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setLimitSpec(new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("idx", OrderByColumnSpec.Direction.DESCENDING)), 2, 3)).overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)).setGranularity(Granularities.ALL);
final GroupByQuery allGranQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
// simulate two daily segments
final QueryPlus<ResultRow> queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
final QueryPlus<ResultRow> queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
return factory.getToolchest().mergeResults((queryPlus3, responseContext1) -> new MergeSequence<>(queryPlus3.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1))))).run(queryPlus, responseContext);
}
});
List<ResultRow> allGranExpectedResults = Arrays.asList(makeRow(allGranQuery, "2011-04-02", "alias", "entertainment", "rows", 2L, "idx", 319L), makeRow(allGranQuery, "2011-04-02", "alias", "automotive", "rows", 2L, "idx", 269L), makeRow(allGranQuery, "2011-04-02", "alias", "travel", "rows", 2L, "idx", 243L));
Iterable<ResultRow> results = mergedRunner.run(QueryPlus.wrap(allGranQuery)).toList();
TestHelper.assertExpectedObjects(allGranExpectedResults, results, "merged");
}
use of org.apache.druid.query.QueryPlus in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergeResultsWithLimitPushDownSortByDimDim.
@Test
public void testMergeResultsWithLimitPushDownSortByDimDim() {
if (!config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V2)) {
return;
}
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setLimitSpec(new DefaultLimitSpec(Lists.newArrayList(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.DESCENDING), new OrderByColumnSpec("market", OrderByColumnSpec.Direction.DESCENDING)), 5)).overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)).setGranularity(Granularities.ALL);
final GroupByQuery allGranQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
// simulate two daily segments
final QueryPlus<ResultRow> queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
final QueryPlus<ResultRow> queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
return factory.getToolchest().mergeResults((queryPlus3, responseContext1) -> new MergeSequence<>(queryPlus3.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1))))).run(queryPlus, responseContext);
}
});
List<ResultRow> allGranExpectedResults = Arrays.asList(makeRow(allGranQuery, "2011-04-02", "alias", "travel", "market", "spot", "rows", 2L, "idx", 243L), makeRow(allGranQuery, "2011-04-02", "alias", "technology", "market", "spot", "rows", 2L, "idx", 177L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "market", "upfront", "rows", 2L, "idx", 1817L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "market", "total_market", "rows", 2L, "idx", 2342L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "market", "spot", "rows", 2L, "idx", 257L));
Iterable<ResultRow> results = mergedRunner.run(QueryPlus.wrap(allGranQuery)).toList();
TestHelper.assertExpectedObjects(allGranExpectedResults, results, "merged");
}
use of org.apache.druid.query.QueryPlus in project druid by druid-io.
the class GroupByLimitPushDownMultiNodeMergeTest method testDescendingNumerics.
@Test
public void testDescendingNumerics() {
QueryToolChest<ResultRow, GroupByQuery> toolChest = groupByFactory.getToolchest();
QueryRunner<ResultRow> theRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(groupByFactory.mergeRunners(executorService, getRunner1(2))), (QueryToolChest) toolChest);
QueryRunner<ResultRow> theRunner2 = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(groupByFactory2.mergeRunners(executorService, getRunner2(3))), (QueryToolChest) toolChest);
QueryRunner<ResultRow> finalRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
return Sequences.simple(ImmutableList.of(theRunner.run(queryPlus, responseContext), theRunner2.run(queryPlus, responseContext))).flatMerge(Function.identity(), queryPlus.getQuery().getResultOrdering());
}
}), (QueryToolChest) toolChest);
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(1500000000000L, 1900000000000L)));
DefaultLimitSpec ls2 = new DefaultLimitSpec(Arrays.asList(new OrderByColumnSpec("d0", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC), new OrderByColumnSpec("d1", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC), new OrderByColumnSpec("d2", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)), 100);
GroupByQuery query = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setVirtualColumns(new ExpressionVirtualColumn("d0:v", "timestamp_extract(\"__time\",'YEAR','UTC')", ColumnType.LONG, TestExprMacroTable.INSTANCE), new ExpressionVirtualColumn("d1:v", "timestamp_extract(\"__time\",'MONTH','UTC')", ColumnType.LONG, TestExprMacroTable.INSTANCE), new ExpressionVirtualColumn("d2:v", "timestamp_extract(\"__time\",'DAY','UTC')", ColumnType.LONG, TestExprMacroTable.INSTANCE)).setDimensions(new DefaultDimensionSpec("d0:v", "d0", ColumnType.LONG), new DefaultDimensionSpec("d1:v", "d1", ColumnType.LONG), new DefaultDimensionSpec("d2:v", "d2", ColumnType.LONG)).setAggregatorSpecs(new CountAggregatorFactory("a0")).setLimitSpec(ls2).setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_APPLY_LIMIT_PUSH_DOWN, true)).setGranularity(Granularities.ALL).build();
Sequence<ResultRow> queryResult = finalRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty());
List<ResultRow> results = queryResult.toList();
ResultRow expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow(query, "2017-07-14T02:40:00.000Z", "d0", 2027L, "d1", 3L, "d2", 17L, "a0", 2L);
ResultRow expectedRow1 = GroupByQueryRunnerTestHelper.createExpectedRow(query, "2017-07-14T02:40:00.000Z", "d0", 2024L, "d1", 1L, "d2", 14L, "a0", 2L);
ResultRow expectedRow2 = GroupByQueryRunnerTestHelper.createExpectedRow(query, "2017-07-14T02:40:00.000Z", "d0", 2020L, "d1", 11L, "d2", 13L, "a0", 2L);
ResultRow expectedRow3 = GroupByQueryRunnerTestHelper.createExpectedRow(query, "2017-07-14T02:40:00.000Z", "d0", 2017L, "d1", 9L, "d2", 13L, "a0", 2L);
System.out.println(results);
Assert.assertEquals(4, results.size());
Assert.assertEquals(expectedRow0, results.get(0));
Assert.assertEquals(expectedRow1, results.get(1));
Assert.assertEquals(expectedRow2, results.get(2));
Assert.assertEquals(expectedRow3, results.get(3));
}
use of org.apache.druid.query.QueryPlus in project druid by druid-io.
the class GroupByLimitPushDownMultiNodeMergeTest method testForcePushLimitDownAccuracyWhenSortHasNonGroupingFieldsHelper.
private List<ResultRow> testForcePushLimitDownAccuracyWhenSortHasNonGroupingFieldsHelper(Map<String, Object> context) {
QueryToolChest<ResultRow, GroupByQuery> toolChest = groupByFactory.getToolchest();
QueryRunner<ResultRow> theRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(groupByFactory.mergeRunners(executorService, getRunner1(4))), (QueryToolChest) toolChest);
QueryRunner<ResultRow> theRunner2 = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(groupByFactory2.mergeRunners(executorService, getRunner2(5))), (QueryToolChest) toolChest);
QueryRunner<ResultRow> finalRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
return Sequences.simple(ImmutableList.of(theRunner.run(queryPlus, responseContext), theRunner2.run(queryPlus, responseContext))).flatMerge(Function.identity(), queryPlus.getQuery().getResultOrdering());
}
}), (QueryToolChest) toolChest);
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(1500000000000L, 1900000000000L)));
DefaultLimitSpec ls = new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("a0", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)), 5);
GroupByQuery query = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimA", "d0", ColumnType.STRING), new DefaultDimensionSpec("dimB", "d1", ColumnType.STRING)).setAggregatorSpecs(new LongSumAggregatorFactory("a0", "metA")).setLimitSpec(ls).setContext(context).setGranularity(Granularities.ALL).build();
Sequence<ResultRow> queryResult = finalRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty());
return queryResult.toList();
}
Aggregations