Search in sources :

Example 26 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class CalciteQueryTest method testSillyQuarters.

@Test
public void testSillyQuarters() throws Exception {
    // Like FLOOR(__time TO QUARTER) but silly.
    // Cannot vectorize due to virtual columns.
    cannotVectorize();
    testQuery("SELECT CAST((EXTRACT(MONTH FROM __time) - 1 ) / 3 + 1 AS INTEGER) AS quarter, COUNT(*)\n" + "FROM foo\n" + "GROUP BY CAST((EXTRACT(MONTH FROM __time) - 1 ) / 3 + 1 AS INTEGER)", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "(((timestamp_extract(\"__time\",'MONTH','UTC') - 1) / 3) + 1)", ColumnType.LONG)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.LONG))).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 1, 6L }));
}
Also used : CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 27 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class CalciteQueryTest method testOrderByLatestLong.

@Test
public void testOrderByLatestLong() throws Exception {
    // Cannot vectorize LATEST aggregator.
    skipVectorize();
    List<Object[]> expected;
    if (NullHandling.replaceWithDefault()) {
        expected = ImmutableList.of(new Object[] { "1", 0L }, new Object[] { "2", 0L }, new Object[] { "abc", 0L }, new Object[] { "def", 0L }, new Object[] { "", 7L }, new Object[] { "10.1", 325323L });
    } else {
        expected = ImmutableList.of(new Object[] { "1", null }, new Object[] { "abc", null }, new Object[] { "def", null }, new Object[] { "2", 0L }, new Object[] { "", 7L }, new Object[] { "10.1", 325323L });
    }
    testQuery("SELECT dim1, LATEST(l1) FROM druid.numfoo GROUP BY 1 ORDER BY 2 LIMIT 10", ImmutableList.of(new TopNQueryBuilder().dataSource(CalciteTests.DATASOURCE3).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).dimension(new DefaultDimensionSpec("dim1", "_d0")).aggregators(aggregators(new LongLastAggregatorFactory("a0", "l1", null))).metric(new InvertedTopNMetricSpec(new NumericTopNMetricSpec("a0"))).threshold(10).context(QUERY_CONTEXT_DEFAULT).build()), expected);
}
Also used : TopNQueryBuilder(org.apache.druid.query.topn.TopNQueryBuilder) LongLastAggregatorFactory(org.apache.druid.query.aggregation.last.LongLastAggregatorFactory) InvertedTopNMetricSpec(org.apache.druid.query.topn.InvertedTopNMetricSpec) NumericTopNMetricSpec(org.apache.druid.query.topn.NumericTopNMetricSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 28 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class CalciteQueryTest method testGroupingSetsNoSuperset.

@Test
public void testGroupingSetsNoSuperset() throws Exception {
    // Cannot vectorize due to virtual columns.
    cannotVectorize();
    // Note: the grouping sets are reordered in the output of this query, but this is allowed.
    testQuery("SELECT dim2, gran, SUM(cnt)\n" + "FROM (SELECT FLOOR(__time TO MONTH) AS gran, COALESCE(dim2, '') dim2, cnt FROM druid.foo) AS x\n" + "GROUP BY GROUPING SETS ( (), (dim2), (gran) )", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "case_searched(notnull(\"dim2\"),\"dim2\",'')", ColumnType.STRING), expressionVirtualColumn("v1", "timestamp_floor(\"__time\",'P1M',null,'UTC')", ColumnType.LONG)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0"), new DefaultDimensionSpec("v1", "d1", ColumnType.LONG))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))).setSubtotalsSpec(ImmutableList.of(ImmutableList.of("d0"), ImmutableList.of("d1"), ImmutableList.of())).setContext(withTimestampResultContext(QUERY_CONTEXT_DEFAULT, "d1", 1, Granularities.MONTH)).build()), ImmutableList.of(new Object[] { "", null, 3L }, new Object[] { "a", null, 2L }, new Object[] { "abc", null, 1L }, new Object[] { NULL_STRING, timestamp("2000-01-01"), 3L }, new Object[] { NULL_STRING, timestamp("2001-01-01"), 3L }, new Object[] { NULL_STRING, null, 6L }));
}
Also used : LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 29 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class CalciteQueryTest method testGroupingSetsWithLimit.

@Test
public void testGroupingSetsWithLimit() throws Exception {
    // Cannot vectorize due to virtual columns.
    cannotVectorize();
    testQuery("SELECT dim2, gran, SUM(cnt)\n" + "FROM (SELECT FLOOR(__time TO MONTH) AS gran, COALESCE(dim2, '') dim2, cnt FROM druid.foo) AS x\n" + "GROUP BY GROUPING SETS ( (dim2, gran), (dim2), (gran), () ) LIMIT 100", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "case_searched(notnull(\"dim2\"),\"dim2\",'')", ColumnType.STRING), expressionVirtualColumn("v1", "timestamp_floor(\"__time\",'P1M',null,'UTC')", ColumnType.LONG)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0"), new DefaultDimensionSpec("v1", "d1", ColumnType.LONG))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))).setSubtotalsSpec(ImmutableList.of(ImmutableList.of("d0", "d1"), ImmutableList.of("d0"), ImmutableList.of("d1"), ImmutableList.of())).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(), 100)).setContext(withTimestampResultContext(QUERY_CONTEXT_DEFAULT, "d1", 1, Granularities.MONTH)).build()), ImmutableList.of(new Object[] { "", timestamp("2000-01-01"), 2L }, new Object[] { "", timestamp("2001-01-01"), 1L }, new Object[] { "a", timestamp("2000-01-01"), 1L }, new Object[] { "a", timestamp("2001-01-01"), 1L }, new Object[] { "abc", timestamp("2001-01-01"), 1L }, new Object[] { "", null, 3L }, new Object[] { "a", null, 2L }, new Object[] { "abc", null, 1L }, new Object[] { NULL_STRING, timestamp("2000-01-01"), 3L }, new Object[] { NULL_STRING, timestamp("2001-01-01"), 3L }, new Object[] { NULL_STRING, null, 6L }));
}
Also used : DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 30 with DefaultDimensionSpec

use of org.apache.druid.query.dimension.DefaultDimensionSpec in project druid by druid-io.

the class CalciteQueryTest method testGroupByLimitWrapping.

@Test
public void testGroupByLimitWrapping() throws Exception {
    List<Object[]> expected;
    if (NullHandling.replaceWithDefault()) {
        expected = ImmutableList.of(new Object[] { "def", "abc", 1L }, new Object[] { "abc", "", 1L });
    } else {
        expected = ImmutableList.of(new Object[] { "def", "abc", 1L }, new Object[] { "abc", null, 1L });
    }
    testQuery("SELECT dim1, dim2, COUNT(*) FROM druid.foo GROUP BY dim1, dim2 ORDER BY dim1 DESC", OUTER_LIMIT_CONTEXT, ImmutableList.of(new GroupByQuery.Builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("dim1", "d0", ColumnType.STRING), new DefaultDimensionSpec("dim2", "d1", ColumnType.STRING)).setLimitSpec(DefaultLimitSpec.builder().orderBy(new OrderByColumnSpec("d0", Direction.DESCENDING, StringComparators.LEXICOGRAPHIC)).limit(2).build()).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setContext(OUTER_LIMIT_CONTEXT).build()), expected);
}
Also used : OrderByColumnSpec(org.apache.druid.query.groupby.orderby.OrderByColumnSpec) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) TopNQueryBuilder(org.apache.druid.query.topn.TopNQueryBuilder) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Aggregations

DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)494 Test (org.junit.Test)461 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)261 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)235 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)107 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)106 OrderByColumnSpec (org.apache.druid.query.groupby.orderby.OrderByColumnSpec)104 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)53 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)53 ExpressionVirtualColumn (org.apache.druid.segment.virtual.ExpressionVirtualColumn)53 QueryDataSource (org.apache.druid.query.QueryDataSource)52 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)43 TableDataSource (org.apache.druid.query.TableDataSource)41 Result (org.apache.druid.query.Result)37 ResultRow (org.apache.druid.query.groupby.ResultRow)35 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)33 ArrayList (java.util.ArrayList)32 HashMap (java.util.HashMap)30 TopNQueryBuilder (org.apache.druid.query.topn.TopNQueryBuilder)28 ExtractionDimensionSpec (org.apache.druid.query.dimension.ExtractionDimensionSpec)26