use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class CalciteSelectQueryTest method testSelectCountStar.
@Test
public void testSelectCountStar() throws Exception {
// timeseries with all granularity have a single group, so should return default results for given aggregators
// which for count is 0 and sum is null in sql compatible mode or 0.0 in default mode.
testQuery(PLANNER_CONFIG_DEFAULT, QUERY_CONTEXT_DEFAULT, "SELECT exp(count(*)) + 10, sum(m2) FROM druid.foo WHERE dim2 = 0", CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).filters(bound("dim2", "0", "0", false, false, null, StringComparators.NUMERIC)).granularity(Granularities.ALL).aggregators(aggregators(new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2"))).postAggregators(expressionPostAgg("p0", "(exp(\"a0\") + 10)")).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 11.0, NullHandling.defaultDoubleValue() }));
testQuery(PLANNER_CONFIG_DEFAULT, QUERY_CONTEXT_DEFAULT, "SELECT exp(count(*)) + 10, sum(m2) FROM druid.foo WHERE __time >= TIMESTAMP '2999-01-01 00:00:00'", CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))).granularity(Granularities.ALL).aggregators(aggregators(new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2"))).postAggregators(expressionPostAgg("p0", "(exp(\"a0\") + 10)")).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 11.0, NullHandling.defaultDoubleValue() }));
// this behavior was not always correct, so make sure legacy behavior can be retained by skipping empty buckets
// explicitly in the context which causes these timeseries queries to return no results
testQuery(PLANNER_CONFIG_DEFAULT, TIMESERIES_CONTEXT_BY_GRAN, "SELECT COUNT(*) FROM foo WHERE dim1 = 'nonexistent'", CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).filters(selector("dim1", "nonexistent", null)).granularity(Granularities.ALL).aggregators(aggregators(new CountAggregatorFactory("a0"))).context(TIMESERIES_CONTEXT_BY_GRAN).build()), ImmutableList.of());
// timeseries with a granularity is grouping by the time expression, so matching nothing returns no results
testQuery("SELECT COUNT(*) FROM foo WHERE dim1 = 'nonexistent' GROUP BY FLOOR(__time TO DAY)", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).filters(selector("dim1", "nonexistent", null)).granularity(Granularities.DAY).aggregators(aggregators(new CountAggregatorFactory("a0"))).context(getTimeseriesContextWithFloorTime(TIMESERIES_CONTEXT_BY_GRAN, "d0")).build()), ImmutableList.of());
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class CalciteQueryTest method testStringEarliestInSubquery.
// This test the off-heap (buffer) version of the EarliestAggregator (String)
@Test
public void testStringEarliestInSubquery() throws Exception {
// Cannot vectorize EARLIEST aggregator.
skipVectorize();
testQuery("SELECT SUM(val) FROM (SELECT dim2, EARLIEST(dim1, 10) AS val FROM foo GROUP BY dim2)", ImmutableList.of(GroupByQuery.builder().setDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimensions(dimensions(new DefaultDimensionSpec("dim2", "d0"))).setAggregatorSpecs(aggregators(new StringFirstAggregatorFactory("a0:a", "dim1", null, 10))).setPostAggregatorSpecs(ImmutableList.of(new FinalizingFieldAccessPostAggregator("a0", "a0:a"))).setContext(QUERY_CONTEXT_DEFAULT).build()).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(aggregators(new DoubleSumAggregatorFactory("_a0", null, "CAST(\"a0\", 'DOUBLE')", ExprMacroTable.nil()))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(// [abc, def]
new Object[] { NullHandling.sqlCompatible() ? 12.1 : 10.1 }));
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class CalciteQueryTest method testExpressionAggregations.
@Test
public void testExpressionAggregations() throws Exception {
// Cannot vectorize due to expressions.
cannotVectorize();
final ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
testQuery("SELECT\n" + " SUM(cnt * 3),\n" + " LN(SUM(cnt) + SUM(m1)),\n" + " MOD(SUM(cnt), 4),\n" + " SUM(CHARACTER_LENGTH(CAST(cnt * 10 AS VARCHAR))),\n" + " MAX(CHARACTER_LENGTH(dim2) + LN(m1))\n" + "FROM druid.foo", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).aggregators(aggregators(new LongSumAggregatorFactory("a0", null, "(\"cnt\" * 3)", macroTable), new LongSumAggregatorFactory("a1", "cnt"), new DoubleSumAggregatorFactory("a2", "m1"), new LongSumAggregatorFactory("a3", null, "strlen(CAST((\"cnt\" * 10), 'STRING'))", macroTable), new DoubleMaxAggregatorFactory("a4", null, "(strlen(\"dim2\") + log(\"m1\"))", macroTable))).postAggregators(expressionPostAgg("p0", "log((\"a1\" + \"a2\"))"), expressionPostAgg("p1", "(\"a1\" % 4)")).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 18L, 3.295836866004329, 2, 12L, 3f + (Math.log(5.0)) }));
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class CalciteQueryTest method testPrimitiveLatestInSubquery.
// This test the off-heap (buffer) version of the LatestAggregator (Double/Float/Long)
@Test
public void testPrimitiveLatestInSubquery() throws Exception {
// Cannot vectorize LATEST aggregator.
skipVectorize();
testQuery("SELECT SUM(val1), SUM(val2), SUM(val3) FROM (SELECT dim2, LATEST(m1) AS val1, LATEST(cnt) AS val2, LATEST(m2) AS val3 FROM foo GROUP BY dim2)", ImmutableList.of(GroupByQuery.builder().setDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimensions(dimensions(new DefaultDimensionSpec("dim2", "d0"))).setAggregatorSpecs(aggregators(new FloatLastAggregatorFactory("a0:a", "m1", null), new LongLastAggregatorFactory("a1:a", "cnt", null), new DoubleLastAggregatorFactory("a2:a", "m2", null))).setPostAggregatorSpecs(ImmutableList.of(new FinalizingFieldAccessPostAggregator("a0", "a0:a"), new FinalizingFieldAccessPostAggregator("a1", "a1:a"), new FinalizingFieldAccessPostAggregator("a2", "a2:a"))).setContext(QUERY_CONTEXT_DEFAULT).build()).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(aggregators(new DoubleSumAggregatorFactory("_a0", "a0"), new LongSumAggregatorFactory("_a1", "a1"), new DoubleSumAggregatorFactory("_a2", "a2"))).setContext(QUERY_CONTEXT_DEFAULT).build()), NullHandling.sqlCompatible() ? ImmutableList.of(new Object[] { 18.0, 4L, 18.0 }) : ImmutableList.of(new Object[] { 15.0, 3L, 15.0 }));
}
use of org.apache.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithAggsOnNumericDimensions.
@Test
public void testGroupByWithAggsOnNumericDimensions() {
// Cannot vectorize due to javascript aggregators.
cannotVectorize();
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setDimFilter(new SelectorDimFilter("quality", "technology", null)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("qlLong", "qualityLong"), new DoubleSumAggregatorFactory("qlFloat", "qualityLong"), new JavaScriptAggregatorFactory("qlJs", ImmutableList.of("qualityLong"), "function(a,b) { return a + b; }", "function() { return 0; }", "function(a,b) { return a + b }", JavaScriptConfig.getEnabledInstance()), new DoubleSumAggregatorFactory("qfFloat", "qualityFloat"), new LongSumAggregatorFactory("qfLong", "qualityFloat"), new JavaScriptAggregatorFactory("qfJs", ImmutableList.of("qualityFloat"), "function(a,b) { return a + b; }", "function() { return 0; }", "function(a,b) { return a + b }", JavaScriptConfig.getEnabledInstance())).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "technology", "rows", 1L, "qlLong", 1700L, "qlFloat", 1700.0, "qlJs", 1700.0, "qfFloat", 17000.0, "qfLong", 17000L, "qfJs", 17000.0), makeRow(query, "2011-04-02", "alias", "technology", "rows", 1L, "qlLong", 1700L, "qlFloat", 1700.0, "qlJs", 1700.0, "qfFloat", 17000.0, "qfLong", 17000L, "qfJs", 17000.0));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "numeric-dims");
}
Aggregations