use of org.apache.druid.query.QueryDataSource in project druid by druid-io.
the class GroupByQueryRunnerTest method testNestedGroupByWithLongArrays.
@Test
public void testNestedGroupByWithLongArrays() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING");
}
cannotVectorize();
GroupByQuery inner = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setVirtualColumns(new ExpressionVirtualColumn("v0", "array(1,2)", ColumnType.LONG_ARRAY, ExprMacroTable.nil())).setDimensions(new DefaultDimensionSpec("v0", "alias", ColumnType.LONG_ARRAY)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
GroupByQuery outer = makeQueryBuilder().setDataSource(new QueryDataSource(inner)).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("alias", "alias_outer", ColumnType.LONG_ARRAY)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
List<ResultRow> expectedResults = ImmutableList.of(makeRow(outer, "2011-04-01", "alias_outer", new ComparableList(ImmutableList.of(1L, 2L)), "rows", 1L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, outer);
TestHelper.assertExpectedObjects(expectedResults, results, "long-nested-groupby-arrays");
}
use of org.apache.druid.query.QueryDataSource in project druid by druid-io.
the class GroupByQueryRunnerTest method testDifferentGroupingSubquery.
@Test
public void testDifferentGroupingSubquery() {
GroupByQuery subquery = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
GroupByQuery query = makeQueryBuilder().setDataSource(subquery).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new DoubleMaxAggregatorFactory("idx", "idx"), new DoubleMaxAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
List<ResultRow> expectedResults = makeRows(query, new String[] { "__time", "rows", "idx", "indexMaxPlusTen" }, new Object[] { "2011-04-01", 9L, 2900.0, 2930.0 }, new Object[] { "2011-04-02", 9L, 2505.0, 2535.0 });
TestHelper.assertExpectedObjects(expectedResults, GroupByQueryRunnerTestHelper.runQuery(factory, runner, query), "subquery");
subquery = makeQueryBuilder(subquery).setVirtualColumns(new ExpressionVirtualColumn("expr", "-index + 100", ColumnType.FLOAT, TestExprMacroTable.INSTANCE)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "expr"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")).build();
query = (GroupByQuery) query.withDataSource(new QueryDataSource(subquery));
expectedResults = makeRows(query, new String[] { "__time", "rows", "idx", "indexMaxPlusTen" }, new Object[] { "2011-04-01", 9L, 21.0, 2930.0 }, new Object[] { "2011-04-02", 9L, 2.0, 2535.0 });
TestHelper.assertExpectedObjects(expectedResults, GroupByQueryRunnerTestHelper.runQuery(factory, runner, query), "subquery");
}
use of org.apache.druid.query.QueryDataSource in project druid by druid-io.
the class DruidQuery method computeQuery.
/**
* Return this query as some kind of Druid query. The returned query will either be {@link TopNQuery},
* {@link TimeseriesQuery}, {@link GroupByQuery}, {@link ScanQuery}
*
* @return Druid query
*/
private Query computeQuery(final QueryFeatureInspector queryFeatureInspector) {
if (dataSource instanceof QueryDataSource) {
// If there is a subquery, then we prefer the outer query to be a groupBy if possible, since this potentially
// enables more efficient execution. (The groupBy query toolchest can handle some subqueries by itself, without
// requiring the Broker to inline results.)
final GroupByQuery outerQuery = toGroupByQuery(queryFeatureInspector);
if (outerQuery != null) {
return outerQuery;
}
}
final TimeseriesQuery tsQuery = toTimeseriesQuery(queryFeatureInspector);
if (tsQuery != null) {
return tsQuery;
}
final TopNQuery topNQuery = toTopNQuery(queryFeatureInspector);
if (topNQuery != null) {
return topNQuery;
}
final GroupByQuery groupByQuery = toGroupByQuery(queryFeatureInspector);
if (groupByQuery != null) {
return groupByQuery;
}
final ScanQuery scanQuery = toScanQuery(queryFeatureInspector);
if (scanQuery != null) {
return scanQuery;
}
throw new CannotBuildQueryException("Cannot convert query parts into an actual query");
}
use of org.apache.druid.query.QueryDataSource in project druid by druid-io.
the class DruidOuterQueryRel method toDruidQuery.
@Override
public DruidQuery toDruidQuery(final boolean finalizeAggregations) {
// Must finalize aggregations on subqueries.
final DruidQuery subQuery = ((DruidRel) sourceRel).toDruidQuery(true);
final RowSignature sourceRowSignature = subQuery.getOutputRowSignature();
return partialQuery.build(new QueryDataSource(subQuery.getQuery()), sourceRowSignature, getPlannerContext(), getCluster().getRexBuilder(), finalizeAggregations);
}
use of org.apache.druid.query.QueryDataSource in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method testGroupByOnGroupByOnTable.
@Test
public void testGroupByOnGroupByOnTable() {
final GroupByQuery subquery = (GroupByQuery) GroupByQuery.builder().setDataSource(FOO).setGranularity(Granularities.ALL).setInterval(Collections.singletonList(INTERVAL)).setDimensions(DefaultDimensionSpec.of("s")).build().withId("queryId");
final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(new QueryDataSource(subquery)).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
testQuery(query, // GroupBy handles its own subqueries; only the inner one will go to the cluster. Also, it gets a subquery id
ImmutableList.of(ExpectedQuery.cluster(subquery.withSubQueryId("1.1"))), ImmutableList.of(new Object[] { 3L }));
Assert.assertEquals(1, scheduler.getTotalRun().get());
Assert.assertEquals(1, scheduler.getTotalPrioritizedAndLaned().get());
Assert.assertEquals(1, scheduler.getTotalAcquired().get());
Assert.assertEquals(1, scheduler.getTotalReleased().get());
}
Aggregations