use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveExcept in project hive by apache.
the class HiveExceptRewriteRule method onMatch.
// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
final HiveExcept hiveExcept = call.rel(0);
final RelOptCluster cluster = hiveExcept.getCluster();
final RexBuilder rexBuilder = cluster.getRexBuilder();
Builder<RelNode> bldr = new ImmutableList.Builder<RelNode>();
// branch
try {
bldr.add(createFirstGB(hiveExcept.getInputs().get(0), true, cluster, rexBuilder));
bldr.add(createFirstGB(hiveExcept.getInputs().get(1), false, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// create a union above all the branches
// the schema of union looks like this
// all keys + VCol + c
HiveRelNode union = new HiveUnion(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build());
// 2nd level GB: create a GB (all keys + sum(c) as a + sum(VCol*c) as b) for
// each branch
final List<RexNode> gbChildProjLst = Lists.newArrayList();
final List<Integer> groupSetPositions = Lists.newArrayList();
int unionColumnSize = union.getRowType().getFieldList().size();
for (int cInd = 0; cInd < unionColumnSize; cInd++) {
gbChildProjLst.add(rexBuilder.makeInputRef(union, cInd));
// the last 2 columns are VCol and c
if (cInd < unionColumnSize - 2) {
groupSetPositions.add(cInd);
}
}
try {
gbChildProjLst.add(multiply(rexBuilder.makeInputRef(union, unionColumnSize - 2), rexBuilder.makeInputRef(union, unionColumnSize - 1), cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
RelNode gbInputRel = null;
try {
// Here we create a project for the following reasons:
// (1) GBy only accepts arg as a position of the input, however, we need to sum on VCol*c
// (2) This can better reuse the function createSingleArgAggCall.
gbInputRel = HiveProject.create(union, gbChildProjLst, null);
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// gbInputRel's schema is like this
// all keys + VCol + c + VCol*c
List<AggregateCall> aggregateCalls = Lists.newArrayList();
RelDataType aggFnRetType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory());
// sum(c)
AggregateCall aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize - 1, aggFnRetType);
aggregateCalls.add(aggregateCall);
// sum(VCol*c)
aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize, aggFnRetType);
aggregateCalls.add(aggregateCall);
final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions);
HiveRelNode aggregateRel = new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, groupSet, null, aggregateCalls);
if (!hiveExcept.all) {
RelNode filterRel = null;
try {
filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), aggregateRel, makeFilterExprForExceptDistinct(aggregateRel, unionColumnSize, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// finally add a project to project out the last 2 columns
Set<Integer> projectOutColumnPositions = new HashSet<>();
projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 2);
projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 1);
try {
call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(filterRel, projectOutColumnPositions));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
} else {
List<RexNode> originalInputRefs = Lists.transform(aggregateRel.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() {
@Override
public RexNode apply(RelDataTypeField input) {
return new RexInputRef(input.getIndex(), input.getType());
}
});
List<RexNode> copyInputRefs = new ArrayList<>();
try {
copyInputRefs.add(makeExprForExceptAll(aggregateRel, unionColumnSize, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
for (int i = 0; i < originalInputRefs.size() - 2; i++) {
copyInputRefs.add(originalInputRefs.get(i));
}
RelNode srcRel = null;
try {
srcRel = HiveProject.create(aggregateRel, copyInputRefs, null);
HiveTableFunctionScan udtf = HiveCalciteUtil.createUDTFForSetOp(cluster, srcRel);
// finally add a project to project out the 1st columns
Set<Integer> projectOutColumnPositions = new HashSet<>();
projectOutColumnPositions.add(0);
call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(udtf, projectOutColumnPositions));
} catch (SemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
}
}
Aggregations