use of org.apache.druid.java.util.common.guava.MergeSequence in project druid by druid-io.
the class GroupByQueryRunnerTest method testPostAggMergedHavingSpec.
@Test
public void testPostAggMergedHavingSpec() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("index", "index")).setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.of(new GreaterThanHavingSpec(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, 1000L))));
final GroupByQuery fullQuery = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "index", 4420L, QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, (double) (6L + 4420L + 1L)), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "index", 4416L, QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, (double) (6L + 4416L + 1L)));
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
// simulate two daily segments
final QueryPlus queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
final QueryPlus queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
return new MergeSequence(queryPlus.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext))));
}
});
TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(QueryPlus.wrap(fullQuery)), "merged");
}
use of org.apache.druid.java.util.common.guava.MergeSequence in project druid by druid-io.
the class GroupByQueryRunnerFactoryTest method testMergeRunnersEnsureGroupMerging.
@Test
public void testMergeRunnersEnsureGroupMerging() {
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("tags", "tags")).setAggregatorSpecs(new CountAggregatorFactory("count")).build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
return factory.getToolchest().mergeResults(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
final Query query = queryPlus.getQuery();
try {
return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(factory.createRunner(createSegment()).run(queryPlus, responseContext), factory.createRunner(createSegment()).run(queryPlus, responseContext))));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}).run(queryPlus, responseContext);
}
});
Sequence<ResultRow> result = mergedRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty());
List<ResultRow> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970-01-01T00:00:00.000Z", "tags", "t2", "count", 4L));
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "");
}
use of org.apache.druid.java.util.common.guava.MergeSequence in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergeResultsWithLimitPushDownSortByAgg.
@Test
public void testMergeResultsWithLimitPushDownSortByAgg() {
if (!config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V2)) {
return;
}
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setLimitSpec(new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("idx", OrderByColumnSpec.Direction.DESCENDING)), 5)).overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)).setGranularity(Granularities.ALL);
final GroupByQuery allGranQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
// simulate two daily segments
final QueryPlus<ResultRow> queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
final QueryPlus<ResultRow> queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
return factory.getToolchest().mergeResults((queryPlus3, responseContext1) -> new MergeSequence<>(queryPlus3.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1))))).run(queryPlus, responseContext);
}
});
List<ResultRow> allGranExpectedResults = Arrays.asList(makeRow(allGranQuery, "2011-04-02", "alias", "mezzanine", "rows", 6L, "idx", 4420L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "rows", 6L, "idx", 4416L), makeRow(allGranQuery, "2011-04-02", "alias", "entertainment", "rows", 2L, "idx", 319L), makeRow(allGranQuery, "2011-04-02", "alias", "automotive", "rows", 2L, "idx", 269L), makeRow(allGranQuery, "2011-04-02", "alias", "travel", "rows", 2L, "idx", 243L));
Iterable<ResultRow> results = mergedRunner.run(QueryPlus.wrap(allGranQuery)).toList();
TestHelper.assertExpectedObjects(allGranExpectedResults, results, "merged");
}
use of org.apache.druid.java.util.common.guava.MergeSequence in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergedPostAggHavingSpec.
@Test
public void testMergedPostAggHavingSpec() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setPostAggregatorSpecs(Collections.singletonList(new ArithmeticPostAggregator("rows_times_10", "*", Arrays.asList(new FieldAccessPostAggregator("rows", "rows"), new ConstantPostAggregator("const", 10L))))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.of(new GreaterThanHavingSpec("rows_times_10", 20L), new EqualToHavingSpec("idx", 217L))));
GroupByQuery query = builder.build();
// Same query, but with expressions instead of arithmetic.
final GroupByQuery expressionQuery = query.withPostAggregatorSpecs(Collections.singletonList(new ExpressionPostAggregator("rows_times_10", "rows * 10.0", null, TestExprMacroTable.INSTANCE)));
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L, "rows_times_10", 20.0), makeRow(query, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L, "rows_times_10", 60.0), makeRow(query, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L, "rows_times_10", 60.0));
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
// simulate two daily segments
final QueryPlus queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
final QueryPlus queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
return new MergeSequence(queryPlus.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext))));
}
});
ResponseContext context = ResponseContext.createEmpty();
// add an extra layer of merging, simulate broker forwarding query to historical
TestHelper.assertExpectedObjects(expectedResults, factory.getToolchest().postMergeQueryDecoration(factory.getToolchest().mergeResults(factory.getToolchest().preMergeQueryDecoration(mergedRunner))).run(QueryPlus.wrap(query)), "merged");
TestHelper.assertExpectedObjects(expectedResults, factory.getToolchest().postMergeQueryDecoration(factory.getToolchest().mergeResults(factory.getToolchest().preMergeQueryDecoration(mergedRunner))).run(QueryPlus.wrap(expressionQuery)), "merged");
}
use of org.apache.druid.java.util.common.guava.MergeSequence in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergeResultsWithLimitPushDownSortByDimAggDim.
@Test
public void testMergeResultsWithLimitPushDownSortByDimAggDim() {
if (!config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V2)) {
return;
}
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setLimitSpec(new DefaultLimitSpec(Lists.newArrayList(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.DESCENDING), new OrderByColumnSpec("idx", OrderByColumnSpec.Direction.DESCENDING), new OrderByColumnSpec("market", OrderByColumnSpec.Direction.DESCENDING)), 5)).overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)).setGranularity(Granularities.ALL);
final GroupByQuery allGranQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
// simulate two daily segments
final QueryPlus<ResultRow> queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
final QueryPlus<ResultRow> queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
return factory.getToolchest().mergeResults((queryPlus3, responseContext1) -> new MergeSequence<>(queryPlus3.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1))))).run(queryPlus, responseContext);
}
});
List<ResultRow> allGranExpectedResults = Arrays.asList(makeRow(allGranQuery, "2011-04-02", "alias", "travel", "market", "spot", "rows", 2L, "idx", 243L), makeRow(allGranQuery, "2011-04-02", "alias", "technology", "market", "spot", "rows", 2L, "idx", 177L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "market", "total_market", "rows", 2L, "idx", 2342L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "market", "upfront", "rows", 2L, "idx", 1817L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "market", "spot", "rows", 2L, "idx", 257L));
Iterable<ResultRow> results = mergedRunner.run(QueryPlus.wrap(allGranQuery)).toList();
TestHelper.assertExpectedObjects(allGranExpectedResults, results, "merged");
}
Aggregations