use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlAggFunction in project drill by apache.
the class WindowPrule method onMatch.
@Override
public void onMatch(RelOptRuleCall call) {
final DrillWindowRel window = call.rel(0);
RelNode input = call.rel(1);
// TODO: Order window based on existing partition by
// input.getTraitSet().subsumes()
boolean partitionby = false;
boolean addMerge = false;
// The start index of the constant fields of DrillWindowRel
final int startConstantsIndex = window.getInput().getRowType().getFieldCount();
int constantShiftIndex = 0;
for (final Ord<Window.Group> w : Ord.zip(window.groups)) {
Window.Group windowBase = w.getValue();
RelTraitSet traits = call.getPlanner().emptyTraitSet().plus(Prel.DRILL_PHYSICAL);
// For empty Over-Clause
if (windowBase.keys.isEmpty() && windowBase.orderKeys.getFieldCollations().isEmpty()) {
DrillDistributionTrait distEmptyKeys = new DrillDistributionTrait(DrillDistributionTrait.DistributionType.SINGLETON);
traits = traits.plus(distEmptyKeys);
} else if (windowBase.keys.size() > 0) {
DrillDistributionTrait distOnAllKeys = new DrillDistributionTrait(DrillDistributionTrait.DistributionType.HASH_DISTRIBUTED, ImmutableList.copyOf(getDistributionFields(windowBase)));
partitionby = true;
traits = traits.plus(distOnAllKeys);
} else if (windowBase.orderKeys.getFieldCollations().size() > 0) {
// if only the order-by clause is specified, there is a single partition
// consisting of all the rows, so we do a distributed sort followed by a
// single merge as the input of the window operator
DrillDistributionTrait distKeys = new DrillDistributionTrait(DrillDistributionTrait.DistributionType.HASH_DISTRIBUTED, ImmutableList.copyOf(getDistributionFieldsFromCollation(windowBase)));
traits = traits.plus(distKeys);
if (!isSingleMode(call)) {
addMerge = true;
}
}
// Add collation trait if either partition-by or order-by is specified.
if (partitionby || windowBase.orderKeys.getFieldCollations().size() > 0) {
RelCollation collation = getCollation(windowBase);
traits = traits.plus(collation);
}
RelNode convertedInput = convert(input, traits);
if (addMerge) {
traits = traits.plus(DrillDistributionTrait.SINGLETON);
convertedInput = new SingleMergeExchangePrel(window.getCluster(), traits, convertedInput, windowBase.collation());
}
List<RelDataTypeField> newRowFields = Lists.newArrayList();
newRowFields.addAll(convertedInput.getRowType().getFieldList());
Iterable<RelDataTypeField> newWindowFields = Iterables.filter(window.getRowType().getFieldList(), new Predicate<RelDataTypeField>() {
@Override
public boolean apply(RelDataTypeField relDataTypeField) {
return relDataTypeField.getName().startsWith("w" + w.i + "$");
}
});
for (RelDataTypeField newField : newWindowFields) {
newRowFields.add(newField);
}
RelDataType rowType = new RelRecordType(newRowFields);
List<Window.RexWinAggCall> newWinAggCalls = Lists.newArrayList();
for (Ord<Window.RexWinAggCall> aggOrd : Ord.zip(windowBase.aggCalls)) {
Window.RexWinAggCall aggCall = aggOrd.getValue();
// If the argument points at the constant and
// additional fields have been generated by the Window below,
// the index of constants will be shifted
final List<RexNode> newOperandsOfWindowFunction = Lists.newArrayList();
for (RexNode operand : aggCall.getOperands()) {
if (operand instanceof RexInputRef) {
final RexInputRef rexInputRef = (RexInputRef) operand;
final int refIndex = rexInputRef.getIndex();
// Check if this RexInputRef points at the constants
if (rexInputRef.getIndex() >= startConstantsIndex) {
operand = new RexInputRef(refIndex + constantShiftIndex, window.constants.get(refIndex - startConstantsIndex).getType());
}
}
newOperandsOfWindowFunction.add(operand);
}
aggCall = new Window.RexWinAggCall((SqlAggFunction) aggCall.getOperator(), aggCall.getType(), newOperandsOfWindowFunction, aggCall.ordinal, aggCall.distinct);
newWinAggCalls.add(new Window.RexWinAggCall((SqlAggFunction) aggCall.getOperator(), aggCall.getType(), aggCall.getOperands(), aggOrd.i, aggCall.distinct));
}
windowBase = new Window.Group(windowBase.keys, windowBase.isRows, windowBase.lowerBound, windowBase.upperBound, windowBase.orderKeys, newWinAggCalls);
input = new WindowPrel(window.getCluster(), window.getTraitSet().merge(traits), convertedInput, window.getConstants(), rowType, windowBase);
constantShiftIndex += windowBase.aggCalls.size();
}
call.transformTo(input);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlAggFunction in project drill by apache.
the class DrillReduceAggregatesRule method reduceAgg.
private RexNode reduceAgg(Aggregate oldAggRel, AggregateCall oldCall, List<AggregateCall> newCalls, Map<AggregateCall, RexNode> aggCallMapping, List<RexNode> inputExprs) {
final SqlAggFunction sqlAggFunction = DrillCalciteWrapperUtility.extractSqlOperatorFromWrapper(oldCall.getAggregation());
if (sqlAggFunction instanceof SqlSumAggFunction) {
// case COUNT(x) when 0 then null else SUM0(x) end
return reduceSum(oldAggRel, oldCall, newCalls, aggCallMapping);
}
if (sqlAggFunction instanceof SqlAvgAggFunction) {
// causes the loss of the scale
if (oldCall.getType().getSqlTypeName() == SqlTypeName.DECIMAL) {
return oldAggRel.getCluster().getRexBuilder().addAggCall(oldCall, oldAggRel.getGroupCount(), newCalls, aggCallMapping, ImmutableList.of(getFieldType(oldAggRel.getInput(), oldCall.getArgList().get(0))));
}
final SqlKind subtype = sqlAggFunction.getKind();
switch(subtype) {
case AVG:
// replace original AVG(x) with SUM(x) / COUNT(x)
return reduceAvg(oldAggRel, oldCall, newCalls, aggCallMapping);
case STDDEV_POP:
// / COUNT(x))
return reduceStddev(oldAggRel, oldCall, true, true, newCalls, aggCallMapping, inputExprs);
case STDDEV_SAMP:
// / CASE COUNT(x) WHEN 1 THEN NULL ELSE COUNT(x) - 1 END)
return reduceStddev(oldAggRel, oldCall, false, true, newCalls, aggCallMapping, inputExprs);
case VAR_POP:
// / COUNT(x)
return reduceStddev(oldAggRel, oldCall, true, false, newCalls, aggCallMapping, inputExprs);
case VAR_SAMP:
// / CASE COUNT(x) WHEN 1 THEN NULL ELSE COUNT(x) - 1 END
return reduceStddev(oldAggRel, oldCall, false, false, newCalls, aggCallMapping, inputExprs);
default:
throw Util.unexpected(subtype);
}
} else {
// anything else: preserve original call
RexBuilder rexBuilder = oldAggRel.getCluster().getRexBuilder();
final int nGroups = oldAggRel.getGroupCount();
List<RelDataType> oldArgTypes = new ArrayList<>();
List<Integer> ordinals = oldCall.getArgList();
assert ordinals.size() <= inputExprs.size();
for (int ordinal : ordinals) {
oldArgTypes.add(inputExprs.get(ordinal).getType());
}
// different RelDataTypes
if (aggCallMapping.containsKey(oldCall) && !aggCallMapping.get(oldCall).getType().equals(oldCall.getType())) {
int index = newCalls.size() + nGroups;
newCalls.add(oldCall);
return rexBuilder.makeInputRef(oldCall.getType(), index);
}
return rexBuilder.addAggCall(oldCall, nGroups, newCalls, aggCallMapping, oldArgTypes);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlAggFunction in project drill by apache.
the class DrillReduceAggregatesRule method reduceSum.
private RexNode reduceSum(Aggregate oldAggRel, AggregateCall oldCall, List<AggregateCall> newCalls, Map<AggregateCall, RexNode> aggCallMapping) {
final PlannerSettings plannerSettings = (PlannerSettings) oldAggRel.getCluster().getPlanner().getContext();
final boolean isInferenceEnabled = plannerSettings.isTypeInferenceEnabled();
final int nGroups = oldAggRel.getGroupCount();
RelDataTypeFactory typeFactory = oldAggRel.getCluster().getTypeFactory();
RexBuilder rexBuilder = oldAggRel.getCluster().getRexBuilder();
int arg = oldCall.getArgList().get(0);
RelDataType argType = getFieldType(oldAggRel.getInput(), arg);
final RelDataType sumType;
final SqlAggFunction sumZeroAgg;
if (isInferenceEnabled) {
sumType = oldCall.getType();
} else {
sumType = typeFactory.createTypeWithNullability(oldCall.getType(), argType.isNullable());
}
sumZeroAgg = new DrillCalciteSqlAggFunctionWrapper(new SqlSumEmptyIsZeroAggFunction(), sumType);
AggregateCall sumZeroCall = AggregateCall.create(sumZeroAgg, oldCall.isDistinct(), oldCall.isApproximate(), oldCall.getArgList(), -1, sumType, null);
final SqlCountAggFunction countAgg = (SqlCountAggFunction) SqlStdOperatorTable.COUNT;
final RelDataType countType = countAgg.getReturnType(typeFactory);
AggregateCall countCall = AggregateCall.create(countAgg, oldCall.isDistinct(), oldCall.isApproximate(), oldCall.getArgList(), -1, countType, null);
// NOTE: these references are with respect to the output
// of newAggRel
RexNode sumZeroRef = rexBuilder.addAggCall(sumZeroCall, nGroups, newCalls, aggCallMapping, ImmutableList.of(argType));
if (!oldCall.getType().isNullable()) {
// null). Therefore we translate to SUM0(x).
return sumZeroRef;
}
RexNode countRef = rexBuilder.addAggCall(countCall, nGroups, newCalls, aggCallMapping, ImmutableList.of(argType));
return rexBuilder.makeCall(SqlStdOperatorTable.CASE, rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, countRef, rexBuilder.makeExactLiteral(BigDecimal.ZERO)), rexBuilder.constantNull(), sumZeroRef);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlAggFunction in project drill by apache.
the class AggPrelBase method createKeysAndExprs.
protected void createKeysAndExprs() {
final List<String> childFields = getInput().getRowType().getFieldNames();
final List<String> fields = getRowType().getFieldNames();
for (int group : BitSets.toIter(groupSet)) {
FieldReference fr = FieldReference.getWithQuotedRef(childFields.get(group));
keys.add(new NamedExpression(fr, fr));
}
for (Ord<AggregateCall> aggCall : Ord.zip(aggCalls)) {
int aggExprOrdinal = groupSet.cardinality() + aggCall.i;
FieldReference ref = FieldReference.getWithQuotedRef(fields.get(aggExprOrdinal));
LogicalExpression expr = toDrill(aggCall.e, childFields);
NamedExpression ne = new NamedExpression(expr, ref);
aggExprs.add(ne);
if (getOperatorPhase() == OperatorPhase.PHASE_1of2) {
if (aggCall.e.getAggregation().getName().equals("COUNT")) {
// If we are doing a COUNT aggregate in Phase1of2, then in Phase2of2 we should SUM the COUNTs,
SqlAggFunction sumAggFun = new SqlSumCountAggFunction(aggCall.e.getType());
AggregateCall newAggCall = AggregateCall.create(sumAggFun, aggCall.e.isDistinct(), aggCall.e.isApproximate(), Collections.singletonList(aggExprOrdinal), aggCall.e.filterArg, aggCall.e.getType(), aggCall.e.getName());
phase2AggCallList.add(newAggCall);
} else {
AggregateCall newAggCall = AggregateCall.create(aggCall.e.getAggregation(), aggCall.e.isDistinct(), aggCall.e.isApproximate(), Collections.singletonList(aggExprOrdinal), aggCall.e.filterArg, aggCall.e.getType(), aggCall.e.getName());
phase2AggCallList.add(newAggCall);
}
}
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlAggFunction in project flink by apache.
the class HiveParserUtils method makeOver.
/**
* Proxy to {@link RexBuilder#makeOver(RelDataType, SqlAggFunction, List, List,
* com.google.common.collect.ImmutableList, RexWindowBound, RexWindowBound, boolean, boolean,
* boolean, boolean, boolean)}.
*/
public static RexNode makeOver(RexBuilder rexBuilder, RelDataType type, SqlAggFunction operator, List<RexNode> exprs, List<RexNode> partitionKeys, List<RexFieldCollation> orderKeys, RexWindowBound lowerBound, RexWindowBound upperBound, boolean physical, boolean allowPartial, boolean nullWhenCountZero, boolean distinct, boolean ignoreNulls) {
Preconditions.checkState(immutableListClz != null || shadedImmutableListClz != null, "Neither original nor shaded guava class can be found");
Method method = null;
final String methodName = "makeOver";
final int orderKeysIndex = 4;
Class[] argTypes = new Class[] { RelDataType.class, SqlAggFunction.class, List.class, List.class, null, RexWindowBound.class, RexWindowBound.class, boolean.class, boolean.class, boolean.class, boolean.class, boolean.class };
if (immutableListClz != null) {
argTypes[orderKeysIndex] = immutableListClz;
method = HiveReflectionUtils.tryGetMethod(rexBuilder.getClass(), methodName, argTypes);
}
if (method == null) {
Preconditions.checkState(shadedImmutableListClz != null, String.format("Shaded guava class not found, but method %s takes shaded parameter", methodName));
argTypes[orderKeysIndex] = shadedImmutableListClz;
method = HiveReflectionUtils.tryGetMethod(rexBuilder.getClass(), methodName, argTypes);
}
Preconditions.checkState(method != null, "Neither original nor shaded method can be found");
Object orderKeysArg = toImmutableList(orderKeys);
Object[] args = new Object[] { type, operator, exprs, partitionKeys, orderKeysArg, lowerBound, upperBound, physical, allowPartial, nullWhenCountZero, distinct, ignoreNulls };
try {
return (RexNode) method.invoke(rexBuilder, args);
} catch (InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException("Failed to invoke " + methodName, e);
}
}
Aggregations