Search in sources :

Example 6 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class CalciteQueryTest method testQueryWithSelectProjectAndIdentityProjectDoesNotRename.

@Test
public void testQueryWithSelectProjectAndIdentityProjectDoesNotRename() throws Exception {
    cannotVectorize();
    requireMergeBuffers(3);
    testQuery(PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of(PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, "true")), "SELECT\n" + "(SUM(CASE WHEN (TIMESTAMP '2000-01-04 17:00:00'<=__time AND __time<TIMESTAMP '2022-01-05 17:00:00') THEN 1 ELSE 0 END)*1.0/COUNT(DISTINCT CASE WHEN (TIMESTAMP '2000-01-04 17:00:00'<=__time AND __time<TIMESTAMP '2022-01-05 17:00:00') THEN dim1 END))\n" + "FROM druid.foo\n" + "GROUP BY ()", CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "case_searched(((947005200000 <= \"__time\") && (\"__time\" < 1641402000000)),\"dim1\",null)", ColumnType.STRING)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.STRING))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", null, "case_searched(((947005200000 <= \"__time\") && (\"__time\" < 1641402000000)),1,0)", ExprMacroTable.nil()), new GroupingAggregatorFactory("a1", ImmutableList.of("v0")))).setSubtotalsSpec(ImmutableList.of(ImmutableList.of("d0"), ImmutableList.of())).setContext(QUERY_CONTEXT_DEFAULT).build())).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(aggregators(new FilteredAggregatorFactory(new LongMinAggregatorFactory("_a0", "a0"), selector("a1", "1", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("_a1"), and(not(selector("d0", null, null)), selector("a1", "0", null))))).setPostAggregatorSpecs(Collections.singletonList(new ExpressionPostAggregator("p0", "((\"_a0\" * 1.0) / \"_a1\")", null, ExprMacroTable.nil()))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 1.0d }));
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) QueryDataSource(org.apache.druid.query.QueryDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) GroupingAggregatorFactory(org.apache.druid.query.aggregation.GroupingAggregatorFactory) ExpressionPostAggregator(org.apache.druid.query.aggregation.post.ExpressionPostAggregator) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) LongMinAggregatorFactory(org.apache.druid.query.aggregation.LongMinAggregatorFactory) Test(org.junit.Test)

Example 7 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class CalciteQueryTest method testGroupByExpressionFromLookup.

@Test
public void testGroupByExpressionFromLookup() throws Exception {
    // Cannot vectorize direct queries on lookup tables.
    cannotVectorize();
    testQuery("SELECT SUBSTRING(v, 1, 1), COUNT(*) FROM lookup.lookyloo GROUP BY 1", ImmutableList.of(GroupByQuery.builder().setDataSource(new LookupDataSource("lookyloo")).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimensions(dimensions(new ExtractionDimensionSpec("v", "d0", new SubstringDimExtractionFn(0, 1)))).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "m", 1L }, new Object[] { "x", 3L }));
}
Also used : SubstringDimExtractionFn(org.apache.druid.query.extraction.SubstringDimExtractionFn) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LookupDataSource(org.apache.druid.query.LookupDataSource) ExtractionDimensionSpec(org.apache.druid.query.dimension.ExtractionDimensionSpec) Test(org.junit.Test)

Example 8 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class CalciteQueryTest method testGroupByWithForceLimitPushDown.

@Test
public void testGroupByWithForceLimitPushDown() throws Exception {
    final Map<String, Object> context = new HashMap<>(QUERY_CONTEXT_DEFAULT);
    context.put(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true);
    testQuery("SELECT dim1, dim2, COUNT(*) FROM druid.foo GROUP BY dim1, dim2 limit 1", context, ImmutableList.of(new GroupByQuery.Builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimensions(new DefaultDimensionSpec("dim1", "d0", ColumnType.STRING), new DefaultDimensionSpec("dim2", "d1", ColumnType.STRING)).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(), 1)).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setContext(context).build()), ImmutableList.of(new Object[] { "", "a", 1L }));
}
Also used : DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) HashMap(java.util.HashMap) TopNQueryBuilder(org.apache.druid.query.topn.TopNQueryBuilder) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 9 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class CalciteQueryTest method testNvlColumns.

@Test
public void testNvlColumns() throws Exception {
    // Cannot vectorize due to usage of expressions.
    cannotVectorize();
    testQuery("SELECT NVL(dim2, dim1), COUNT(*) FROM druid.foo GROUP BY NVL(dim2, dim1)\n", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "case_searched(notnull(\"dim2\"),\"dim2\",\"dim1\")", ColumnType.STRING)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.STRING))).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setContext(QUERY_CONTEXT_DEFAULT).build()), NullHandling.replaceWithDefault() ? ImmutableList.of(new Object[] { "10.1", 1L }, new Object[] { "2", 1L }, new Object[] { "a", 2L }, new Object[] { "abc", 2L }) : ImmutableList.of(new Object[] { "", 1L }, new Object[] { "10.1", 1L }, new Object[] { "a", 2L }, new Object[] { "abc", 2L }));
}
Also used : CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 10 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class CalciteQueryTest method testFilterOnTimeExtractWithMultipleDays.

@Test
public void testFilterOnTimeExtractWithMultipleDays() throws Exception {
    // Cannot vectorize due to expression filters.
    cannotVectorize();
    testQuery("SELECT COUNT(*) FROM druid.foo\n" + "WHERE EXTRACT(YEAR FROM __time) = 2000\n" + "AND EXTRACT(DAY FROM __time) IN (2, 3, 5)", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).virtualColumns(expressionVirtualColumn("v0", "timestamp_extract(\"__time\",'YEAR','UTC')", ColumnType.LONG), expressionVirtualColumn("v1", "timestamp_extract(\"__time\",'DAY','UTC')", ColumnType.LONG)).aggregators(aggregators(new CountAggregatorFactory("a0"))).filters(and(selector("v0", "2000", null), in("v1", ImmutableList.of("2", "3", "5"), null))).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 2L }));
}
Also used : CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) Test(org.junit.Test)

Aggregations

CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)300 Test (org.junit.Test)249 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)103 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)81 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)62 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)50 TableDataSource (org.apache.druid.query.TableDataSource)44 QueryDataSource (org.apache.druid.query.QueryDataSource)41 TimeseriesQueryQueryToolChest (org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest)40 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)37 Result (org.apache.druid.query.Result)36 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)35 FilteredAggregatorFactory (org.apache.druid.query.aggregation.FilteredAggregatorFactory)30 FieldAccessPostAggregator (org.apache.druid.query.aggregation.post.FieldAccessPostAggregator)30 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)28 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)27 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)26 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)26 IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)26 Parameters (junitparams.Parameters)24