Search in sources :

Example 21 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class GroupByRules method translateAggregateCall.

/**
   * Translate an AggregateCall to Druid equivalents.
   *
   * @return translated aggregation, or null if translation failed.
   */
private static Aggregation translateAggregateCall(final PlannerContext plannerContext, final RowSignature sourceRowSignature, final Project project, final AggregateCall call, final DruidOperatorTable operatorTable, final List<Aggregation> existingAggregations, final int aggNumber, final boolean approximateCountDistinct) {
    final List<DimFilter> filters = Lists.newArrayList();
    final List<String> rowOrder = sourceRowSignature.getRowOrder();
    final String name = aggOutputName(aggNumber);
    final SqlKind kind = call.getAggregation().getKind();
    final SqlTypeName outputType = call.getType().getSqlTypeName();
    if (call.filterArg >= 0) {
        // AGG(xxx) FILTER(WHERE yyy)
        if (project == null) {
            // We need some kind of projection to support filtered aggregations.
            return null;
        }
        final RexNode expression = project.getChildExps().get(call.filterArg);
        final DimFilter filter = Expressions.toFilter(operatorTable, plannerContext, sourceRowSignature, expression);
        if (filter == null) {
            return null;
        }
        filters.add(filter);
    }
    if (kind == SqlKind.COUNT && call.getArgList().isEmpty()) {
        // COUNT(*)
        return Aggregation.create(new CountAggregatorFactory(name)).filter(makeFilter(filters, sourceRowSignature));
    } else if (kind == SqlKind.COUNT && call.isDistinct()) {
        // COUNT(DISTINCT x)
        return approximateCountDistinct ? APPROX_COUNT_DISTINCT.toDruidAggregation(name, sourceRowSignature, operatorTable, plannerContext, existingAggregations, project, call, makeFilter(filters, sourceRowSignature)) : null;
    } else if (kind == SqlKind.COUNT || kind == SqlKind.SUM || kind == SqlKind.SUM0 || kind == SqlKind.MIN || kind == SqlKind.MAX || kind == SqlKind.AVG) {
        // Built-in agg, not distinct, not COUNT(*)
        boolean forceCount = false;
        final FieldOrExpression input;
        final int inputField = Iterables.getOnlyElement(call.getArgList());
        final RexNode rexNode = Expressions.fromFieldAccess(sourceRowSignature, project, inputField);
        final FieldOrExpression foe = FieldOrExpression.fromRexNode(operatorTable, plannerContext, rowOrder, rexNode);
        if (foe != null) {
            input = foe;
        } else if (rexNode.getKind() == SqlKind.CASE && ((RexCall) rexNode).getOperands().size() == 3) {
            // Possibly a CASE-style filtered aggregation. Styles supported:
            // A: SUM(CASE WHEN x = 'foo' THEN cnt END) => operands (x = 'foo', cnt, null)
            // B: SUM(CASE WHEN x = 'foo' THEN 1 ELSE 0 END) => operands (x = 'foo', 1, 0)
            // C: COUNT(CASE WHEN x = 'foo' THEN 'dummy' END) => operands (x = 'foo', 'dummy', null)
            // If the null and non-null args are switched, "flip" is set, which negates the filter.
            final RexCall caseCall = (RexCall) rexNode;
            final boolean flip = RexLiteral.isNullLiteral(caseCall.getOperands().get(1)) && !RexLiteral.isNullLiteral(caseCall.getOperands().get(2));
            final RexNode arg1 = caseCall.getOperands().get(flip ? 2 : 1);
            final RexNode arg2 = caseCall.getOperands().get(flip ? 1 : 2);
            // Operand 1: Filter
            final DimFilter filter = Expressions.toFilter(operatorTable, plannerContext, sourceRowSignature, caseCall.getOperands().get(0));
            if (filter == null) {
                return null;
            } else {
                filters.add(flip ? new NotDimFilter(filter) : filter);
            }
            if (call.getAggregation().getKind() == SqlKind.COUNT && arg1 instanceof RexLiteral && !RexLiteral.isNullLiteral(arg1) && RexLiteral.isNullLiteral(arg2)) {
                // Case C
                forceCount = true;
                input = null;
            } else if (call.getAggregation().getKind() == SqlKind.SUM && arg1 instanceof RexLiteral && ((Number) RexLiteral.value(arg1)).intValue() == 1 && arg2 instanceof RexLiteral && ((Number) RexLiteral.value(arg2)).intValue() == 0) {
                // Case B
                forceCount = true;
                input = null;
            } else if (RexLiteral.isNullLiteral(arg2)) {
                // Maybe case A
                input = FieldOrExpression.fromRexNode(operatorTable, plannerContext, rowOrder, arg1);
                if (input == null) {
                    return null;
                }
            } else {
                // Can't translate CASE into a filter.
                return null;
            }
        } else {
            // Can't translate operand.
            return null;
        }
        if (!forceCount) {
            Preconditions.checkNotNull(input, "WTF?! input was null for non-COUNT aggregation");
        }
        if (forceCount || kind == SqlKind.COUNT) {
            // COUNT(x)
            return Aggregation.create(new CountAggregatorFactory(name)).filter(makeFilter(filters, sourceRowSignature));
        } else {
            // Built-in aggregator that is not COUNT.
            final Aggregation retVal;
            final String fieldName = input.getFieldName();
            final String expression = input.getExpression();
            final boolean isLong = SqlTypeName.INT_TYPES.contains(outputType) || SqlTypeName.TIMESTAMP == outputType || SqlTypeName.DATE == outputType;
            if (kind == SqlKind.SUM || kind == SqlKind.SUM0) {
                retVal = isLong ? Aggregation.create(new LongSumAggregatorFactory(name, fieldName, expression)) : Aggregation.create(new DoubleSumAggregatorFactory(name, fieldName, expression));
            } else if (kind == SqlKind.MIN) {
                retVal = isLong ? Aggregation.create(new LongMinAggregatorFactory(name, fieldName, expression)) : Aggregation.create(new DoubleMinAggregatorFactory(name, fieldName, expression));
            } else if (kind == SqlKind.MAX) {
                retVal = isLong ? Aggregation.create(new LongMaxAggregatorFactory(name, fieldName, expression)) : Aggregation.create(new DoubleMaxAggregatorFactory(name, fieldName, expression));
            } else if (kind == SqlKind.AVG) {
                final String sumName = aggInternalName(aggNumber, "sum");
                final String countName = aggInternalName(aggNumber, "count");
                final AggregatorFactory sum = isLong ? new LongSumAggregatorFactory(sumName, fieldName, expression) : new DoubleSumAggregatorFactory(sumName, fieldName, expression);
                final AggregatorFactory count = new CountAggregatorFactory(countName);
                retVal = Aggregation.create(ImmutableList.of(sum, count), new ArithmeticPostAggregator(name, "quotient", ImmutableList.<PostAggregator>of(new FieldAccessPostAggregator(null, sumName), new FieldAccessPostAggregator(null, countName))));
            } else {
                // Not reached.
                throw new ISE("WTF?! Kind[%s] got into the built-in aggregator path somehow?!", kind);
            }
            return retVal.filter(makeFilter(filters, sourceRowSignature));
        }
    } else {
        // Not a built-in aggregator, check operator table.
        final SqlAggregator sqlAggregator = operatorTable.lookupAggregator(call.getAggregation().getName());
        return sqlAggregator != null ? sqlAggregator.toDruidAggregation(name, sourceRowSignature, operatorTable, plannerContext, existingAggregations, project, call, makeFilter(filters, sourceRowSignature)) : null;
    }
}
Also used : RexLiteral(org.apache.calcite.rex.RexLiteral) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DoubleMinAggregatorFactory(io.druid.query.aggregation.DoubleMinAggregatorFactory) LongMinAggregatorFactory(io.druid.query.aggregation.LongMinAggregatorFactory) RexCall(org.apache.calcite.rex.RexCall) Aggregation(io.druid.sql.calcite.aggregation.Aggregation) ISE(io.druid.java.util.common.ISE) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) NotDimFilter(io.druid.query.filter.NotDimFilter) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) PostAggregator(io.druid.query.aggregation.PostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) SqlKind(org.apache.calcite.sql.SqlKind) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LongMinAggregatorFactory(io.druid.query.aggregation.LongMinAggregatorFactory) PostAggregatorFactory(io.druid.sql.calcite.aggregation.PostAggregatorFactory) DoubleMinAggregatorFactory(io.druid.query.aggregation.DoubleMinAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) SqlAggregator(io.druid.sql.calcite.aggregation.SqlAggregator) ApproxCountDistinctSqlAggregator(io.druid.sql.calcite.aggregation.ApproxCountDistinctSqlAggregator) DimFilter(io.druid.query.filter.DimFilter) NotDimFilter(io.druid.query.filter.NotDimFilter) AndDimFilter(io.druid.query.filter.AndDimFilter) RexNode(org.apache.calcite.rex.RexNode)

Example 22 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class SchemalessTestFullTest method testFullOnTimeseries.

private void testFullOnTimeseries(QueryRunner runner, List<Result<TimeseriesResultValue>> expectedResults, String failMsg) {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(dataSource).granularity(allGran).intervals(fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)).build();
    failMsg += " timeseries ";
    HashMap<String, Object> context = new HashMap<>();
    Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
    TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) PostAggregator(io.druid.query.aggregation.PostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) ConstantPostAggregator(io.druid.query.aggregation.post.ConstantPostAggregator) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DoubleMinAggregatorFactory(io.druid.query.aggregation.DoubleMinAggregatorFactory) Result(io.druid.query.Result)

Example 23 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class SchemalessTestFullTest method testFilteredTimeseries.

private void testFilteredTimeseries(QueryRunner runner, List<Result<TimeseriesResultValue>> expectedResults, String failMsg) {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(dataSource).granularity(allGran).intervals(fullOnInterval).filters(marketDimension, "spot").aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)).build();
    failMsg += " filtered timeseries ";
    HashMap<String, Object> context = new HashMap<>();
    Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
    TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) PostAggregator(io.druid.query.aggregation.PostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) ConstantPostAggregator(io.druid.query.aggregation.post.ConstantPostAggregator) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DoubleMinAggregatorFactory(io.druid.query.aggregation.DoubleMinAggregatorFactory) Result(io.druid.query.Result)

Example 24 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class SchemalessTestSimpleTest method testFullOnTopN.

//  @Test TODO: Handling of null values is inconsistent right now, need to make it all consistent and re-enable test
// TODO: Complain to Eric when you see this.  It shouldn't be like this...
public void testFullOnTopN() {
    TopNQuery query = new TopNQueryBuilder().dataSource(dataSource).granularity(allGran).dimension(marketDimension).metric(indexMetric).threshold(3).intervals(fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)).build();
    List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<DimensionAndMetricValueExtractor>asList(new DimensionAndMetricValueExtractor(ImmutableMap.<String, Object>builder().put("market", "spot").put("rows", 4L).put("index", 400.0D).put("addRowsIndexConstant", 405.0D).put("uniques", 1.0002442201269182D).put("maxIndex", 100.0).put("minIndex", 100.0).build()), new DimensionAndMetricValueExtractor(ImmutableMap.<String, Object>builder().put("market", "").put("rows", 2L).put("index", 200.0D).put("addRowsIndexConstant", 203.0D).put("uniques", 0.0).put("maxIndex", 100.0D).put("minIndex", 100.0D).build()), new DimensionAndMetricValueExtractor(ImmutableMap.<String, Object>builder().put("market", "total_market").put("rows", 2L).put("index", 200.0D).put("addRowsIndexConstant", 203.0D).put("uniques", 1.0002442201269182D).put("maxIndex", 100.0D).put("minIndex", 100.0D).build())))));
    QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment);
    HashMap<String, Object> context = new HashMap<String, Object>();
    TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
Also used : TopNQueryBuilder(io.druid.query.topn.TopNQueryBuilder) TopNResultValue(io.druid.query.topn.TopNResultValue) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) PostAggregator(io.druid.query.aggregation.PostAggregator) FieldAccessPostAggregator(io.druid.query.aggregation.post.FieldAccessPostAggregator) ArithmeticPostAggregator(io.druid.query.aggregation.post.ArithmeticPostAggregator) ConstantPostAggregator(io.druid.query.aggregation.post.ConstantPostAggregator) HashMap(java.util.HashMap) DoubleMinAggregatorFactory(io.druid.query.aggregation.DoubleMinAggregatorFactory) DateTime(org.joda.time.DateTime) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) TopNQuery(io.druid.query.topn.TopNQuery) DimensionAndMetricValueExtractor(io.druid.query.topn.DimensionAndMetricValueExtractor)

Example 25 with DoubleMaxAggregatorFactory

use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.

the class MetadataTest method testMerge.

@Test
public void testMerge() {
    Assert.assertNull(Metadata.merge(null, null));
    Assert.assertNull(Metadata.merge(ImmutableList.<Metadata>of(), null));
    List<Metadata> metadataToBeMerged = new ArrayList<>();
    metadataToBeMerged.add(null);
    Assert.assertNull(Metadata.merge(metadataToBeMerged, null));
    //sanity merge check
    AggregatorFactory[] aggs = new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "f") };
    Metadata m1 = new Metadata();
    m1.put("k", "v");
    m1.setAggregators(aggs);
    m1.setTimestampSpec(new TimestampSpec("ds", "auto", null));
    m1.setQueryGranularity(Granularities.ALL);
    m1.setRollup(Boolean.FALSE);
    Metadata m2 = new Metadata();
    m2.put("k", "v");
    m2.setAggregators(aggs);
    m2.setTimestampSpec(new TimestampSpec("ds", "auto", null));
    m2.setQueryGranularity(Granularities.ALL);
    m2.setRollup(Boolean.FALSE);
    Metadata merged = new Metadata();
    merged.put("k", "v");
    merged.setAggregators(new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "n") });
    merged.setTimestampSpec(new TimestampSpec("ds", "auto", null));
    merged.setRollup(Boolean.FALSE);
    merged.setQueryGranularity(Granularities.ALL);
    Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), null));
    //merge check with one metadata being null
    metadataToBeMerged.clear();
    metadataToBeMerged.add(m1);
    metadataToBeMerged.add(m2);
    metadataToBeMerged.add(null);
    merged.setAggregators(null);
    merged.setTimestampSpec(null);
    merged.setQueryGranularity(null);
    merged.setRollup(null);
    Assert.assertEquals(merged, Metadata.merge(metadataToBeMerged, null));
    //merge check with client explicitly providing merged aggregators
    AggregatorFactory[] explicitAggs = new AggregatorFactory[] { new DoubleMaxAggregatorFactory("x", "y") };
    merged.setAggregators(explicitAggs);
    Assert.assertEquals(merged, Metadata.merge(metadataToBeMerged, explicitAggs));
    merged.setTimestampSpec(new TimestampSpec("ds", "auto", null));
    merged.setQueryGranularity(Granularities.ALL);
    m1.setRollup(Boolean.TRUE);
    Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), explicitAggs));
}
Also used : DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) ArrayList(java.util.ArrayList) TimestampSpec(io.druid.data.input.impl.TimestampSpec) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) DoubleMaxAggregatorFactory(io.druid.query.aggregation.DoubleMaxAggregatorFactory) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) LongMaxAggregatorFactory(io.druid.query.aggregation.LongMaxAggregatorFactory) Test(org.junit.Test)

Aggregations

DoubleMaxAggregatorFactory (io.druid.query.aggregation.DoubleMaxAggregatorFactory)41 Test (org.junit.Test)35 DoubleMinAggregatorFactory (io.druid.query.aggregation.DoubleMinAggregatorFactory)33 Result (io.druid.query.Result)30 PostAggregator (io.druid.query.aggregation.PostAggregator)26 DateTime (org.joda.time.DateTime)26 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)16 HyperUniqueFinalizingPostAggregator (io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator)15 ExtractionDimensionSpec (io.druid.query.dimension.ExtractionDimensionSpec)11 HashMap (java.util.HashMap)11 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)10 ArithmeticPostAggregator (io.druid.query.aggregation.post.ArithmeticPostAggregator)9 FieldAccessPostAggregator (io.druid.query.aggregation.post.FieldAccessPostAggregator)9 LookupExtractionFn (io.druid.query.lookup.LookupExtractionFn)8 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)7 ConstantPostAggregator (io.druid.query.aggregation.post.ConstantPostAggregator)7 ListFilteredDimensionSpec (io.druid.query.dimension.ListFilteredDimensionSpec)7 LinkedHashMap (java.util.LinkedHashMap)7 Row (io.druid.data.input.Row)6 DoubleSumAggregatorFactory (io.druid.query.aggregation.DoubleSumAggregatorFactory)5