use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method testJoinOnTableErrorCantInlineTable.
@Test
public void testJoinOnTableErrorCantInlineTable() {
final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(JoinDataSource.create(new TableDataSource(FOO), new TableDataSource(BAR), "j.", "\"j.s\" == \"s\"", JoinType.INNER, null, ExprMacroTable.nil())).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s"), DefaultDimensionSpec.of("j.s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
expectedException.expect(IllegalStateException.class);
expectedException.expectMessage("Cannot handle subquery structure for dataSource");
testQuery(query, ImmutableList.of(), ImmutableList.of());
}
use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method testJoinOnGroupByOnUnionOfTables.
@Test
public void testJoinOnGroupByOnUnionOfTables() {
final UnionDataSource unionDataSource = new UnionDataSource(ImmutableList.of(new TableDataSource(FOO), new TableDataSource(BAR)));
final GroupByQuery subquery = GroupByQuery.builder().setDataSource(unionDataSource).setGranularity(Granularities.ALL).setInterval(Collections.singletonList(INTERVAL)).setDimensions(DefaultDimensionSpec.of("s")).setDimFilter(new SelectorDimFilter("s", "y", null)).build();
final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(JoinDataSource.create(unionDataSource, new QueryDataSource(subquery), "j.", "\"j.s\" == \"s\"", JoinType.INNER, null, ExprMacroTable.nil())).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s"), DefaultDimensionSpec.of("j.s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
testQuery(query, ImmutableList.of(ExpectedQuery.cluster(subquery.withDataSource(subquery.getDataSource().getChildren().get(0)).withId(DUMMY_QUERY_ID).withSubQueryId("2.1.foo.1")), ExpectedQuery.cluster(subquery.withDataSource(subquery.getDataSource().getChildren().get(1)).withId(DUMMY_QUERY_ID).withSubQueryId("2.1.bar.2")), ExpectedQuery.cluster(query.withDataSource(query.getDataSource().withChildren(ImmutableList.of(unionDataSource.getChildren().get(0), InlineDataSource.fromIterable(ImmutableList.of(new Object[] { "y" }), RowSignature.builder().add("s", ColumnType.STRING).build())))).withSubQueryId("foo.1")), ExpectedQuery.cluster(query.withDataSource(query.getDataSource().withChildren(ImmutableList.of(unionDataSource.getChildren().get(1), InlineDataSource.fromIterable(ImmutableList.of(new Object[] { "y" }), RowSignature.builder().add("s", ColumnType.STRING).build())))).withSubQueryId("bar.2"))), ImmutableList.of(new Object[] { "y", "y", 1L }));
// note: this should really be 1, but in the interim queries that are composed of multiple queries count each
// invocation of either the cluster or local walker in ClientQuerySegmentWalker
Assert.assertEquals(4, scheduler.getTotalRun().get());
Assert.assertEquals(4, scheduler.getTotalPrioritizedAndLaned().get());
Assert.assertEquals(4, scheduler.getTotalAcquired().get());
Assert.assertEquals(4, scheduler.getTotalReleased().get());
}
use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method testGroupByOnUnionOfTwoTables.
@Test
public void testGroupByOnUnionOfTwoTables() {
final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(new UnionDataSource(ImmutableList.of(new TableDataSource(FOO), new TableDataSource(BAR)))).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
testQuery(query, ImmutableList.of(ExpectedQuery.cluster(query.withDataSource(new TableDataSource(FOO)).withSubQueryId("foo.1")), ExpectedQuery.cluster(query.withDataSource(new TableDataSource(BAR)).withSubQueryId("bar.2"))), ImmutableList.of(new Object[] { "a", 2L }, new Object[] { "b", 1L }, new Object[] { "c", 1L }, new Object[] { "x", 2L }, new Object[] { "y", 1L }, new Object[] { "z", 1L }));
// note: this should really be 1, but in the interim queries that are composed of multiple queries count each
// invocation of either the cluster or local walker in ClientQuerySegmentWalker
Assert.assertEquals(2, scheduler.getTotalRun().get());
Assert.assertEquals(2, scheduler.getTotalPrioritizedAndLaned().get());
Assert.assertEquals(2, scheduler.getTotalAcquired().get());
Assert.assertEquals(2, scheduler.getTotalReleased().get());
}
use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method testGroupByOnUnionOfOneTable.
@Test
public void testGroupByOnUnionOfOneTable() {
final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(new UnionDataSource(ImmutableList.of(new TableDataSource(FOO)))).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
testQuery(query, ImmutableList.of(ExpectedQuery.cluster(query.withDataSource(new TableDataSource(FOO)))), ImmutableList.of(new Object[] { "x", 2L }, new Object[] { "y", 1L }, new Object[] { "z", 1L }));
Assert.assertEquals(1, scheduler.getTotalRun().get());
Assert.assertEquals(1, scheduler.getTotalPrioritizedAndLaned().get());
Assert.assertEquals(1, scheduler.getTotalAcquired().get());
Assert.assertEquals(1, scheduler.getTotalReleased().get());
}
use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class VectorizedVirtualColumnTest method testGroupBy.
private void testGroupBy(ColumnCapabilities capabilities, Map<String, Object> context, boolean canVectorize) {
GroupByQuery query = new GroupByQuery.Builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setGranularity(Granularities.ALL).setVirtualColumns(new AlwaysTwoVectorizedVirtualColumn(ALWAYS_TWO, capabilities, canVectorize)).addDimension(new DefaultDimensionSpec(ALWAYS_TWO, ALWAYS_TWO, capabilities.toColumnType())).setAggregatorSpecs(new AlwaysTwoCounterAggregatorFactory(COUNT, ALWAYS_TWO)).setInterval("2000/2030").setContext(context).addOrderByColumn(ALWAYS_TWO).build();
List<ResultRow> rows = groupByTestHelper.runQueryOnSegmentsObjs(segments, query).toList();
List<ResultRow> expectedRows = Collections.singletonList(GroupByQueryRunnerTestHelper.createExpectedRow(query, "2000", COUNT, getCount(capabilities), ALWAYS_TWO, getTwo(capabilities)));
TestHelper.assertExpectedObjects(expectedRows, rows, "failed");
}
Aggregations