Search in sources :

Example 1 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class GroupByRules method translateAggregateCall.

/**
   * Translate an AggregateCall to Druid equivalents.
   *
   * @return translated aggregation, or null if translation failed.
   */
private static Aggregation translateAggregateCall(final PlannerContext plannerContext, final RowSignature sourceRowSignature, final Project project, final AggregateCall call, final DruidOperatorTable operatorTable, final List<Aggregation> existingAggregations, final int aggNumber, final boolean approximateCountDistinct) {
    final List<DimFilter> filters = Lists.newArrayList();
    final List<String> rowOrder = sourceRowSignature.getRowOrder();
    final String name = aggOutputName(aggNumber);
    final SqlKind kind = call.getAggregation().getKind();
    final SqlTypeName outputType = call.getType().getSqlTypeName();
    if (call.filterArg >= 0) {
        // AGG(xxx) FILTER(WHERE yyy)
        if (project == null) {
            // We need some kind of projection to support filtered aggregations.
            return null;
        }
        final RexNode expression = project.getChildExps().get(call.filterArg);
        final DimFilter filter = Expressions.toFilter(operatorTable, plannerContext, sourceRowSignature, expression);
        if (filter == null) {
            return null;
        }
        filters.add(filter);
    }
    if (kind == SqlKind.COUNT && call.getArgList().isEmpty()) {
        // COUNT(*)
        return Aggregation.create(new CountAggregatorFactory(name)).filter(makeFilter(filters, sourceRowSignature));
    } else if (kind == SqlKind.COUNT && call.isDistinct()) {
        // COUNT(DISTINCT x)
        return approximateCountDistinct ? APPROX_COUNT_DISTINCT.toDruidAggregation(name, sourceRowSignature, operatorTable, plannerContext, existingAggregations, project, call, makeFilter(filters, sourceRowSignature)) : null;
    } else if (kind == SqlKind.COUNT || kind == SqlKind.SUM || kind == SqlKind.SUM0 || kind == SqlKind.MIN || kind == SqlKind.MAX || kind == SqlKind.AVG) {
        // Built-in agg, not distinct, not COUNT(*)
        boolean forceCount = false;
        final FieldOrExpression input;
        final int inputField = Iterables.getOnlyElement(call.getArgList());
        final RexNode rexNode = Expressions.fromFieldAccess(sourceRowSignature, project, inputField);
        final FieldOrExpression foe = FieldOrExpression.fromRexNode(operatorTable, plannerContext, rowOrder, rexNode);
        if (foe != null) {
            input = foe;
        } else if (rexNode.getKind() == SqlKind.CASE && ((RexCall) rexNode).getOperands().size() == 3) {
            // Possibly a CASE-style filtered aggregation. Styles supported:
            // A: SUM(CASE WHEN x = 'foo' THEN cnt END) => operands (x = 'foo', cnt, null)
            // B: SUM(CASE WHEN x = 'foo' THEN 1 ELSE 0 END) => operands (x = 'foo', 1, 0)
            // C: COUNT(CASE WHEN x = 'foo' THEN 'dummy' END) => operands (x = 'foo', 'dummy', null)
            // If the null and non-null args are switched, "flip" is set, which negates the filter.
            final RexCall caseCall = (RexCall) rexNode;
            final boolean flip = RexLiteral.isNullLiteral(caseCall.getOperands().get(1)) && !RexLiteral.isNullLiteral(caseCall.getOperands().get(2));
            final RexNode arg1 = caseCall.getOperands().get(flip ? 2 : 1);
            final RexNode arg2 = caseCall.getOperands().get(flip ? 1 : 2);
            // Operand 1: Filter
            final DimFilter filter = Expressions.toFilter(operatorTable, plannerContext, sourceRowSignature, caseCall.getOperands().get(0));
            if (filter == null) {
                return null;
            } else {
                filters.add(flip ? new NotDimFilter(filter) : filter);
            }
            if (call.getAggregation().getKind() == SqlKind.COUNT && arg1 instanceof RexLiteral && !RexLiteral.isNullLiteral(arg1) && RexLiteral.isNullLiteral(arg2)) {
                // Case C
                forceCount = true;
                input = null;
            } else if (call.getAggregation().getKind() == SqlKind.SUM && arg1 instanceof RexLiteral && ((Number) RexLiteral.value(arg1)).intValue() == 1 && arg2 instanceof RexLiteral && ((Number) RexLiteral.value(arg2)).intValue() == 0) {
                // Case B
                forceCount = true;
                input = null;
            } else if (RexLiteral.isNullLiteral(arg2)) {
                // Maybe case A
                input = FieldOrExpression.fromRexNode(operatorTable, plannerContext, rowOrder, arg1);
                if (input == null) {
                    return null;
                }
            } else {
                // Can't translate CASE into a filter.
                return null;
            }
        } else {
            // Can't translate operand.
            return null;
        }
        if (!forceCount) {
            Preconditions.checkNotNull(input, "WTF?! input was null for non-COUNT aggregation");
        }
        if (forceCount || kind == SqlKind.COUNT) {
            // COUNT(x)
            return Aggregation.create(new CountAggregatorFactory(name)).filter(makeFilter(filters, sourceRowSignature));
        } else {
            // Built-in aggregator that is not COUNT.
            final Aggregation retVal;
            final String fieldName = input.getFieldName();
            final String expression = input.getExpression();
            final boolean isLong = SqlTypeName.INT_TYPES.contains(outputType) || SqlTypeName.TIMESTAMP == outputType || SqlTypeName.DATE == outputType;
            if (kind == SqlKind.SUM || kind == SqlKind.SUM0) {
                retVal = isLong ? Aggregation.create(new LongSumAggregatorFactory(name, fieldName, expression)) : Aggregation.create(new DoubleSumAggregatorFactory(name, fieldName, expression));
            } else if (kind == SqlKind.MIN) {
                retVal = isLong ? Aggregation.create(new LongMinAggregatorFactory(name, fieldName, expression)) : Aggregation.create(new DoubleMinAggregatorFactory(name, fieldName, expression));
            } else if (kind == SqlKind.MAX) {
                retVal = isLong ? Aggregation.create(new LongMaxAggregatorFactory(name, fieldName, expression)) : Aggregation.create(new DoubleMaxAggregatorFactory(name, fieldName, expression));
            } else if (kind == SqlKind.AVG) {
                final String sumName = aggInternalName(aggNumber, "sum");
                final String countName = aggInternalName(aggNumber, "count");
                final AggregatorFactory sum = isLong ? new LongSumAggregatorFactory(sumName, fieldName, expression) : new DoubleSumAggregatorFactory(sumName, fieldName, expression);
                final AggregatorFactory count = new CountAggregatorFactory(countName);
                retVal = Aggregation.create(ImmutableList.of(sum, count), new ArithmeticPostAggregator(name, "quotient", ImmutableList.<PostAggregator>of(new FieldAccessPostAggregator(null, sumName), new FieldAccessPostAggregator(null, countName))));
            } else {
                // Not reached.
                throw new ISE("WTF?! Kind[%s] got into the built-in aggregator path somehow?!", kind);
            }
            return retVal.filter(makeFilter(filters, sourceRowSignature));
        }
    } else {
        // Not a built-in aggregator, check operator table.
        final SqlAggregator sqlAggregator = operatorTable.lookupAggregator(call.getAggregation().getName());
        return sqlAggregator != null ? sqlAggregator.toDruidAggregation(name, sourceRowSignature, operatorTable, plannerContext, existingAggregations, project, call, makeFilter(filters, sourceRowSignature)) : null;
    }
}
Also used : RexLiteral(org.apache.calcite.rex.RexLiteral) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DoubleMinAggregatorFactory(io.druid.query.aggregation.DoubleMinAggregatorFactory) LongMinAggregatorFactory(io.druid.query.aggregation.LongMinAggregatorFactory) RexCall(org.apache.calcite.rex.RexCall) Aggregation(io.druid.sql.calcite.aggregation.Aggregation) ISE(io.druid.java.util.common.ISE) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) NotDimFilter(io.druid.query.filter.NotDimFilter) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) PostAggregator(io.druid.query.aggregation.PostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) SqlKind(org.apache.calcite.sql.SqlKind) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LongMinAggregatorFactory(io.druid.query.aggregation.LongMinAggregatorFactory) PostAggregatorFactory(io.druid.sql.calcite.aggregation.PostAggregatorFactory) DoubleMinAggregatorFactory(io.druid.query.aggregation.DoubleMinAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) SqlAggregator(io.druid.sql.calcite.aggregation.SqlAggregator) ApproxCountDistinctSqlAggregator(io.druid.sql.calcite.aggregation.ApproxCountDistinctSqlAggregator) DimFilter(io.druid.query.filter.DimFilter) NotDimFilter(io.druid.query.filter.NotDimFilter) AndDimFilter(io.druid.query.filter.AndDimFilter) RexNode(org.apache.calcite.rex.RexNode)

Example 2 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class GroupByQueryRunnerTest method testDifferentGroupingSubqueryWithFilter.

@Test
public void testDifferentGroupingSubqueryWithFilter() {
    GroupByQuery subquery = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "quality"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(QueryRunnerTestHelper.dayGran).build();
    GroupByQuery query = GroupByQuery.builder().setDataSource(subquery).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setAggregatorSpecs(Arrays.<AggregatorFactory>asList(new DoubleMaxAggregatorFactory("idx", "idx"))).setDimFilter(new OrDimFilter(Lists.<DimFilter>newArrayList(new SelectorDimFilter("quality", "automotive", null), new SelectorDimFilter("quality", "premium", null), new SelectorDimFilter("quality", "mezzanine", null), new SelectorDimFilter("quality", "business", null), new SelectorDimFilter("quality", "entertainment", null), new SelectorDimFilter("quality", "health", null), new SelectorDimFilter("quality", "news", null), new SelectorDimFilter("quality", "technology", null), new SelectorDimFilter("quality", "travel", null)))).setGranularity(QueryRunnerTestHelper.dayGran).build();
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "idx", 2900.0), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "idx", 2505.0));
    Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
    TestHelper.assertExpectedObjects(expectedResults, results, "");
}
Also used : DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) RegexFilteredDimensionSpec(io.druid.query.dimension.RegexFilteredDimensionSpec) ExtractionDimensionSpec(io.druid.query.dimension.ExtractionDimensionSpec) ListFilteredDimensionSpec(io.druid.query.dimension.ListFilteredDimensionSpec) DimensionSpec(io.druid.query.dimension.DimensionSpec) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) OrDimFilter(io.druid.query.filter.OrDimFilter) Row(io.druid.data.input.Row) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) LongFirstAggregatorFactory(io.druid.query.aggregation.first.LongFirstAggregatorFactory) FilteredAggregatorFactory(io.druid.query.aggregation.FilteredAggregatorFactory) JavaScriptAggregatorFactory(io.druid.query.aggregation.JavaScriptAggregatorFactory) CardinalityAggregatorFactory(io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) HyperUniquesAggregatorFactory(io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LongLastAggregatorFactory(io.druid.query.aggregation.last.LongLastAggregatorFactory) JavaScriptDimFilter(io.druid.query.filter.JavaScriptDimFilter) SearchQueryDimFilter(io.druid.query.filter.SearchQueryDimFilter) ExtractionDimFilter(io.druid.query.filter.ExtractionDimFilter) RegexDimFilter(io.druid.query.filter.RegexDimFilter) BoundDimFilter(io.druid.query.filter.BoundDimFilter) InDimFilter(io.druid.query.filter.InDimFilter) OrDimFilter(io.druid.query.filter.OrDimFilter) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) DimFilter(io.druid.query.filter.DimFilter) AndDimFilter(io.druid.query.filter.AndDimFilter) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 3 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class GroupByQueryRunnerTest method testDifferentGroupingSubqueryMultipleAggregatorsOnSameField.

@Test
public void testDifferentGroupingSubqueryMultipleAggregatorsOnSameField() {
    GroupByQuery subquery = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setPostAggregatorSpecs(Lists.<PostAggregator>newArrayList(new ArithmeticPostAggregator("post_agg", "+", Lists.<PostAggregator>newArrayList(new FieldAccessPostAggregator("idx", "idx"), new FieldAccessPostAggregator("idx", "idx"))))).setGranularity(QueryRunnerTestHelper.dayGran).build();
    GroupByQuery query = GroupByQuery.builder().setDataSource(subquery).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setAggregatorSpecs(Arrays.<AggregatorFactory>asList(new DoubleMaxAggregatorFactory("idx1", "idx"), new DoubleMaxAggregatorFactory("idx2", "idx"), new DoubleMaxAggregatorFactory("idx3", "post_agg"), new DoubleMaxAggregatorFactory("idx4", "post_agg"))).setGranularity(QueryRunnerTestHelper.dayGran).build();
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "idx1", 2900.0, "idx2", 2900.0, "idx3", 5800.0, "idx4", 5800.0), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "idx1", 2505.0, "idx2", 2505.0, "idx3", 5010.0, "idx4", 5010.0));
    Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
    TestHelper.assertExpectedObjects(expectedResults, results, "");
}
Also used : ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) Row(io.druid.data.input.Row) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 4 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class GroupByQueryRunnerTest method testGroupByNestedWithInnerQueryNumericsWithLongTime.

@Test
public void testGroupByNestedWithInnerQueryNumericsWithLongTime() {
    if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
        expectedException.expect(UnsupportedOperationException.class);
        expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
    }
    GroupByQuery subQuery = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("__time", "time_alias", ValueType.LONG), new DefaultDimensionSpec("index", "index_alias", ValueType.FLOAT))).setAggregatorSpecs(Arrays.<AggregatorFactory>asList(QueryRunnerTestHelper.rowsCount)).setGranularity(QueryRunnerTestHelper.allGran).build();
    GroupByQuery outerQuery = GroupByQuery.builder().setDataSource(subQuery).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("alias", "market"), new DefaultDimensionSpec("time_alias", "time_alias2", ValueType.LONG))).setAggregatorSpecs(Arrays.<AggregatorFactory>asList(new LongMaxAggregatorFactory("time_alias_max", "time_alias"), new DoubleMaxAggregatorFactory("index_alias_max", "index_alias"))).setGranularity(QueryRunnerTestHelper.allGran).build();
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "spot", "time_alias2", 1301616000000L, "time_alias_max", 1301616000000L, "index_alias_max", 158.74722290039062), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "spot", "time_alias2", 1301702400000L, "time_alias_max", 1301702400000L, "index_alias_max", 166.01605224609375), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "total_market", "time_alias2", 1301616000000L, "time_alias_max", 1301616000000L, "index_alias_max", 1522.043701171875), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "total_market", "time_alias2", 1301702400000L, "time_alias_max", 1301702400000L, "index_alias_max", 1321.375), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "upfront", "time_alias2", 1301616000000L, "time_alias_max", 1301616000000L, "index_alias_max", 1447.3411865234375), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "market", "upfront", "time_alias2", 1301702400000L, "time_alias_max", 1301702400000L, "index_alias_max", 1144.3424072265625));
    Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, outerQuery);
    TestHelper.assertExpectedObjects(expectedResults, results, "");
}
Also used : DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) RegexFilteredDimensionSpec(io.druid.query.dimension.RegexFilteredDimensionSpec) ExtractionDimensionSpec(io.druid.query.dimension.ExtractionDimensionSpec) ListFilteredDimensionSpec(io.druid.query.dimension.ListFilteredDimensionSpec) DimensionSpec(io.druid.query.dimension.DimensionSpec) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) Row(io.druid.data.input.Row) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 5 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class GroupByTimeseriesQueryRunnerTest method testFullOnTimeseriesMaxMin.

// GroupBy handles timestamps differently when granularity is ALL
@Test
public void testFullOnTimeseriesMaxMin() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(Granularities.ALL).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Arrays.asList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))).descending(descending).build();
    DateTime expectedEarliest = new DateTime("1970-01-01");
    DateTime expectedLast = new DateTime("2011-04-15");
    Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
    Result<TimeseriesResultValue> result = results.iterator().next();
    Assert.assertEquals(expectedEarliest, result.getTimestamp());
    Assert.assertFalse(String.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast));
    final TimeseriesResultValue value = result.getValue();
    Assert.assertEquals(result.toString(), 1870.06103515625, value.getDoubleMetric("maxIndex"), 0.0);
    Assert.assertEquals(result.toString(), 59.02102279663086, value.getDoubleMetric("minIndex"), 0.0);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) DoubleMinAggregatorFactory(io.druid.query.aggregation.DoubleMinAggregatorFactory) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) TimeseriesQueryRunnerTest(io.druid.query.timeseries.TimeseriesQueryRunnerTest) Test(org.junit.Test)

Aggregations

DoubleMaxAggregatorFactory (io.druid.query.aggregation.DoubleMaxAggregatorFactory)41 Test (org.junit.Test)35 DoubleMinAggregatorFactory (io.druid.query.aggregation.DoubleMinAggregatorFactory)33 Result (io.druid.query.Result)30 PostAggregator (io.druid.query.aggregation.PostAggregator)26 DateTime (org.joda.time.DateTime)26 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)16 HyperUniqueFinalizingPostAggregator (io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator)15 ExtractionDimensionSpec (io.druid.query.dimension.ExtractionDimensionSpec)11 HashMap (java.util.HashMap)11 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)10 ArithmeticPostAggregator (io.druid.query.aggregation.post.ArithmeticPostAggregator)9 FieldAccessPostAggregator (io.druid.query.aggregation.post.FieldAccessPostAggregator)9 LookupExtractionFn (io.druid.query.lookup.LookupExtractionFn)8 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)7 ConstantPostAggregator (io.druid.query.aggregation.post.ConstantPostAggregator)7 ListFilteredDimensionSpec (io.druid.query.dimension.ListFilteredDimensionSpec)7 LinkedHashMap (java.util.LinkedHashMap)7 Row (io.druid.data.input.Row)6 DoubleSumAggregatorFactory (io.druid.query.aggregation.DoubleSumAggregatorFactory)5