use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByLongColumnDescending.
@Test
public void testGroupByLongColumnDescending() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
}
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("qualityLong", "ql_alias", ColumnType.LONG)).setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).addOrderByColumn(new OrderByColumnSpec("ql_alias", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
Assert.assertNotEquals(Functions.<Sequence<ResultRow>>identity(), query.getLimitSpec().build(query));
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "ql_alias", 1700L, "rows", 2L, "idx", 175L), makeRow(query, "2011-04-01", "ql_alias", 1200L, "rows", 2L, "idx", 324L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "long");
}
use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithAggsOnNumericDimensions.
@Test
public void testGroupByWithAggsOnNumericDimensions() {
// Cannot vectorize due to javascript aggregators.
cannotVectorize();
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setDimFilter(new SelectorDimFilter("quality", "technology", null)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("qlLong", "qualityLong"), new DoubleSumAggregatorFactory("qlFloat", "qualityLong"), new JavaScriptAggregatorFactory("qlJs", ImmutableList.of("qualityLong"), "function(a,b) { return a + b; }", "function() { return 0; }", "function(a,b) { return a + b }", JavaScriptConfig.getEnabledInstance()), new DoubleSumAggregatorFactory("qfFloat", "qualityFloat"), new LongSumAggregatorFactory("qfLong", "qualityFloat"), new JavaScriptAggregatorFactory("qfJs", ImmutableList.of("qualityFloat"), "function(a,b) { return a + b; }", "function() { return 0; }", "function(a,b) { return a + b }", JavaScriptConfig.getEnabledInstance())).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "technology", "rows", 1L, "qlLong", 1700L, "qlFloat", 1700.0, "qlJs", 1700.0, "qfFloat", 17000.0, "qfLong", 17000L, "qfJs", 17000.0), makeRow(query, "2011-04-02", "alias", "technology", "rows", 1L, "qlLong", 1700L, "qlFloat", 1700.0, "qlJs", 1700.0, "qfFloat", 17000.0, "qfLong", 17000L, "qfJs", 17000.0));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "numeric-dims");
}
use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testRejectForceLimitPushDownWithHaving.
@Test
public void testRejectForceLimitPushDownWithHaving() {
expectedException.expect(IAE.class);
expectedException.expectMessage("Cannot force limit push down when a having spec is present.");
final GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias")).setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).setLimitSpec(new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 2)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)).setHavingSpec(new GreaterThanHavingSpec("rows", 10)).build();
query.isApplyLimitPushDown();
}
use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testSubqueryWithOuterCountAggregator.
@Test
public void testSubqueryWithOuterCountAggregator() {
final GroupByQuery subquery = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).setDimensions(new DefaultDimensionSpec("quality", "alias")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.ASCENDING)), null)).build();
final GroupByQuery query = makeQueryBuilder().setDataSource(subquery).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new ArrayList<>()).setAggregatorSpecs(new CountAggregatorFactory("count")).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(ISE.class);
expectedException.expectMessage("Unknown column in order clause");
GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
} else {
List<ResultRow> expectedResults = Collections.singletonList(makeRow(query, "2011-04-01", "count", 18L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "subquery-count-agg");
}
}
use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByLongTimeColumn.
@Test
public void testGroupByLongTimeColumn() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
}
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("__time", "time_alias", ColumnType.LONG)).setDimFilter(new SelectorDimFilter("quality", "entertainment", null)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "time_alias", 1301616000000L, "rows", 1L, "idx", 158L), makeRow(query, "2011-04-02", "time_alias", 1301702400000L, "rows", 1L, "idx", 166L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "long");
}
Aggregations