use of org.apache.calcite.rel.core.Project in project drill by axbaretto.
the class DrillPushProjIntoScan method onMatch.
@Override
public void onMatch(RelOptRuleCall call) {
final Project proj = call.rel(0);
final TableScan scan = call.rel(1);
try {
ProjectPushInfo columnInfo = PrelUtil.getColumns(scan.getRowType(), proj.getProjects());
// get DrillTable, either wrapped in RelOptTable, or DrillTranslatableTable.
DrillTable table = scan.getTable().unwrap(DrillTable.class);
if (table == null) {
table = scan.getTable().unwrap(DrillTranslatableTable.class).getDrillTable();
}
if (//
columnInfo == null || columnInfo.isStarQuery() || !//
table.getGroupScan().canPushdownProjects(columnInfo.columns)) {
return;
}
final DrillScanRel newScan = new DrillScanRel(scan.getCluster(), scan.getTraitSet().plus(DrillRel.DRILL_LOGICAL), scan.getTable(), columnInfo.createNewRowType(proj.getInput().getCluster().getTypeFactory()), columnInfo.columns);
List<RexNode> newProjects = Lists.newArrayList();
for (RexNode n : proj.getChildExps()) {
newProjects.add(n.accept(columnInfo.getInputRewriter()));
}
final DrillProjectRel newProj = new DrillProjectRel(proj.getCluster(), proj.getTraitSet().plus(DrillRel.DRILL_LOGICAL), newScan, newProjects, proj.getRowType());
if (ProjectRemoveRule.isTrivial(newProj)) {
call.transformTo(newScan);
} else {
call.transformTo(newProj);
}
} catch (IOException e) {
throw new DrillRuntimeException(e);
}
}
use of org.apache.calcite.rel.core.Project in project drill by axbaretto.
the class DrillProjectRule method onMatch.
@Override
public void onMatch(RelOptRuleCall call) {
final Project project = call.rel(0);
final RelNode input = project.getInput();
final RelTraitSet traits = project.getTraitSet().plus(DrillRel.DRILL_LOGICAL);
final RelNode convertedInput = convert(input, input.getTraitSet().plus(DrillRel.DRILL_LOGICAL).simplify());
call.transformTo(new DrillProjectRel(project.getCluster(), traits, convertedInput, project.getProjects(), project.getRowType()));
}
use of org.apache.calcite.rel.core.Project in project hive by apache.
the class ASTConverter method convert.
private ASTNode convert() throws CalciteSemanticException {
/*
* 1. Walk RelNode Graph; note from, where, gBy.. nodes.
*/
new QBVisitor().go(root);
/*
* 2. convert from node.
*/
QueryBlockInfo qb = convertSource(from);
schema = qb.schema;
hiveAST.from = ASTBuilder.construct(HiveParser.TOK_FROM, "TOK_FROM").add(qb.ast).node();
/*
* 3. convert filterNode
*/
if (where != null) {
ASTNode cond = where.getCondition().accept(new RexVisitor(schema, false, root.getCluster().getRexBuilder()));
hiveAST.where = ASTBuilder.where(cond);
planMapper.link(cond, where);
planMapper.link(cond, RelTreeSignature.of(where));
}
/*
* 4. GBy
*/
if (groupBy != null) {
ASTBuilder b;
boolean groupingSetsExpression = false;
Group aggregateType = groupBy.getGroupType();
switch(aggregateType) {
case SIMPLE:
b = ASTBuilder.construct(HiveParser.TOK_GROUPBY, "TOK_GROUPBY");
break;
case ROLLUP:
case CUBE:
case OTHER:
b = ASTBuilder.construct(HiveParser.TOK_GROUPING_SETS, "TOK_GROUPING_SETS");
groupingSetsExpression = true;
break;
default:
throw new CalciteSemanticException("Group type not recognized");
}
HiveAggregate hiveAgg = (HiveAggregate) groupBy;
if (hiveAgg.getAggregateColumnsOrder() != null) {
// Aggregation columns may have been sorted in specific order
for (int pos : hiveAgg.getAggregateColumnsOrder()) {
addRefToBuilder(b, groupBy.getGroupSet().nth(pos));
}
for (int pos = 0; pos < groupBy.getGroupCount(); pos++) {
if (!hiveAgg.getAggregateColumnsOrder().contains(pos)) {
addRefToBuilder(b, groupBy.getGroupSet().nth(pos));
}
}
} else {
// Aggregation columns have not been reordered
for (int i : groupBy.getGroupSet()) {
addRefToBuilder(b, i);
}
}
// Grouping sets expressions
if (groupingSetsExpression) {
for (ImmutableBitSet groupSet : groupBy.getGroupSets()) {
ASTBuilder expression = ASTBuilder.construct(HiveParser.TOK_GROUPING_SETS_EXPRESSION, "TOK_GROUPING_SETS_EXPRESSION");
for (int i : groupSet) {
addRefToBuilder(expression, i);
}
b.add(expression);
}
}
if (!groupBy.getGroupSet().isEmpty()) {
hiveAST.groupBy = b.node();
}
schema = new Schema(schema, groupBy);
}
/*
* 5. Having
*/
if (having != null) {
ASTNode cond = having.getCondition().accept(new RexVisitor(schema, false, root.getCluster().getRexBuilder()));
hiveAST.having = ASTBuilder.having(cond);
}
/*
* 6. Project
*/
ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_SELECT, "TOK_SELECT");
if (select instanceof Project) {
List<RexNode> childExps = ((Project) select).getProjects();
if (childExps.isEmpty()) {
RexLiteral r = select.getCluster().getRexBuilder().makeExactLiteral(new BigDecimal(1));
ASTNode selectExpr = ASTBuilder.selectExpr(ASTBuilder.literal(r), "1");
b.add(selectExpr);
} else {
int i = 0;
for (RexNode r : childExps) {
ASTNode expr = r.accept(new RexVisitor(schema, r instanceof RexLiteral, select.getCluster().getRexBuilder()));
String alias = select.getRowType().getFieldNames().get(i++);
ASTNode selectExpr = ASTBuilder.selectExpr(expr, alias);
b.add(selectExpr);
}
}
hiveAST.select = b.node();
} else {
// select is UDTF
HiveTableFunctionScan udtf = (HiveTableFunctionScan) select;
List<ASTNode> children = new ArrayList<>();
RexCall call = (RexCall) udtf.getCall();
for (RexNode r : call.getOperands()) {
ASTNode expr = r.accept(new RexVisitor(schema, r instanceof RexLiteral, select.getCluster().getRexBuilder()));
children.add(expr);
}
ASTBuilder sel = ASTBuilder.construct(HiveParser.TOK_SELEXPR, "TOK_SELEXPR");
ASTNode function = buildUDTFAST(call.getOperator().getName(), children);
sel.add(function);
for (String alias : udtf.getRowType().getFieldNames()) {
sel.add(HiveParser.Identifier, alias);
}
b.add(sel);
hiveAST.select = b.node();
}
/*
* 7. Order Use in Order By from the block above. RelNode has no pointer to
* parent hence we need to go top down; but OB at each block really belong
* to its src/from. Hence the need to pass in sort for each block from
* its parent.
* 8. Limit
*/
convertOrderToASTNode(orderLimit);
return hiveAST.getAST();
}
use of org.apache.calcite.rel.core.Project in project hive by apache.
the class PlanModifierForASTConv method convertOpTree.
public static RelNode convertOpTree(RelNode rel, List<FieldSchema> resultSchema, boolean alignColumns) throws CalciteSemanticException {
RelNode newTopNode = rel;
if (LOG.isDebugEnabled()) {
LOG.debug("Original plan for PlanModifier\n " + RelOptUtil.toString(newTopNode));
}
if (!(newTopNode instanceof Project) && !(newTopNode instanceof Sort) && !(newTopNode instanceof Exchange)) {
newTopNode = introduceDerivedTable(newTopNode);
if (LOG.isDebugEnabled()) {
LOG.debug("Plan after top-level introduceDerivedTable\n " + RelOptUtil.toString(newTopNode));
}
}
convertOpTree(newTopNode, (RelNode) null);
if (LOG.isDebugEnabled()) {
LOG.debug("Plan after nested convertOpTree\n " + RelOptUtil.toString(newTopNode));
}
if (alignColumns) {
HiveRelColumnsAlignment propagator = new HiveRelColumnsAlignment(HiveRelFactories.HIVE_BUILDER.create(newTopNode.getCluster(), null));
newTopNode = propagator.align(newTopNode);
if (LOG.isDebugEnabled()) {
LOG.debug("Plan after propagating order\n " + RelOptUtil.toString(newTopNode));
}
}
Pair<RelNode, RelNode> topSelparentPair = HiveCalciteUtil.getTopLevelSelect(newTopNode);
PlanModifierUtil.fixTopOBSchema(newTopNode, topSelparentPair, resultSchema, true);
if (LOG.isDebugEnabled()) {
LOG.debug("Plan after fixTopOBSchema\n " + RelOptUtil.toString(newTopNode));
}
topSelparentPair = HiveCalciteUtil.getTopLevelSelect(newTopNode);
newTopNode = renameTopLevelSelectInResultSchema(newTopNode, topSelparentPair, resultSchema);
if (LOG.isDebugEnabled()) {
LOG.debug("Final plan after modifier\n " + RelOptUtil.toString(newTopNode));
}
return newTopNode;
}
use of org.apache.calcite.rel.core.Project in project hive by apache.
the class HiveMaterializedViewUtils method deriveGroupingSetsMaterializedViews.
/**
* If a materialization does not contain grouping sets, it returns the materialization
* itself. Otherwise, it will create one materialization for each grouping set.
* For each grouping set, the query for the materialization will consist of the group by
* columns in the grouping set, followed by a projection to recreate the literal null
* values. The view scan will consist of the scan over the materialization followed by a
* filter on the grouping id value corresponding to that grouping set.
*/
public static List<HiveRelOptMaterialization> deriveGroupingSetsMaterializedViews(HiveRelOptMaterialization materialization) {
final RelNode query = materialization.queryRel;
final Project project;
final Aggregate aggregate;
if (query instanceof Aggregate) {
project = null;
aggregate = (Aggregate) query;
} else if (query instanceof Project && query.getInput(0) instanceof Aggregate) {
project = (Project) query;
aggregate = (Aggregate) query.getInput(0);
} else {
project = null;
aggregate = null;
}
if (aggregate == null) {
// Not an aggregate materialized view, return original materialization
return Collections.singletonList(materialization);
}
if (aggregate.getGroupType() == Group.SIMPLE) {
// Not a grouping sets materialized view, return original materialization
return Collections.singletonList(materialization);
}
int aggregateGroupingIdIndex = -1;
for (int i = 0; i < aggregate.getAggCallList().size(); i++) {
if (aggregate.getAggCallList().get(i).getAggregation() == HiveGroupingID.INSTANCE) {
aggregateGroupingIdIndex = aggregate.getGroupCount() + i;
break;
}
}
Preconditions.checkState(aggregateGroupingIdIndex != -1);
int projectGroupingIdIndex = -1;
if (project != null) {
for (int i = 0; i < project.getProjects().size(); i++) {
RexNode expr = project.getProjects().get(i);
if (expr instanceof RexInputRef) {
RexInputRef ref = (RexInputRef) expr;
if (ref.getIndex() == aggregateGroupingIdIndex) {
// Grouping id is present
projectGroupingIdIndex = i;
break;
}
}
}
if (projectGroupingIdIndex == -1) {
// Grouping id is not present, return original materialization
return Collections.singletonList(materialization);
}
}
// Create multiple materializations
final List<HiveRelOptMaterialization> materializationList = new ArrayList<>();
final RelBuilder builder = HiveRelFactories.HIVE_BUILDER.create(aggregate.getCluster(), null);
final RexBuilder rexBuilder = aggregate.getCluster().getRexBuilder();
final List<AggregateCall> aggregateCalls = new ArrayList<>(aggregate.getAggCallList());
aggregateCalls.remove(aggregateGroupingIdIndex - aggregate.getGroupCount());
for (ImmutableBitSet targetGroupSet : aggregate.getGroupSets()) {
// Compute the grouping id value
long groupingIdValue = convert(targetGroupSet, aggregate.getGroupSet());
// First we modify the MV query
Aggregate newAggregate = aggregate.copy(aggregate.getTraitSet(), aggregate.getInput(), targetGroupSet, null, aggregateCalls);
builder.push(newAggregate);
List<RexNode> exprs = new ArrayList<>();
for (int pos = 0; pos < aggregate.getGroupCount(); pos++) {
int ref = aggregate.getGroupSet().nth(pos);
if (targetGroupSet.get(ref)) {
exprs.add(rexBuilder.makeInputRef(newAggregate, targetGroupSet.indexOf(ref)));
} else {
exprs.add(rexBuilder.makeNullLiteral(aggregate.getRowType().getFieldList().get(pos).getType()));
}
}
int pos = targetGroupSet.cardinality();
for (AggregateCall aggregateCall : aggregate.getAggCallList()) {
if (aggregateCall.getAggregation() == HiveGroupingID.INSTANCE) {
exprs.add(rexBuilder.makeBigintLiteral(new BigDecimal(groupingIdValue)));
} else {
exprs.add(rexBuilder.makeInputRef(newAggregate, pos++));
}
}
if (project != null) {
// Include projections from top operator
Project bottomProject = (Project) builder.project(exprs, ImmutableList.of(), true).build();
List<RexNode> newNodes = RelOptUtil.pushPastProject(project.getProjects(), bottomProject);
builder.push(bottomProject.getInput()).project(newNodes);
} else {
builder.project(exprs);
}
final RelNode newQueryRel = builder.build();
// Second we modify the MV scan
builder.push(materialization.tableRel);
RexNode condition = rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(materialization.tableRel, project != null ? projectGroupingIdIndex : aggregateGroupingIdIndex), rexBuilder.makeBigintLiteral(new BigDecimal(groupingIdValue)));
builder.filter(condition);
final RelNode newTableRel = builder.build();
final Table scanTable = extractTable(materialization);
materializationList.add(new HiveRelOptMaterialization(newTableRel, newQueryRel, null, ImmutableList.of(scanTable.getDbName(), scanTable.getTableName(), "#" + materializationList.size()), materialization.getScope(), materialization.getRebuildMode()));
}
return materializationList;
}
Aggregations