use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.util.Pair in project hive by apache.
the class HiveRelDecorrelator method decorrelateRel.
public Frame decorrelateRel(HiveProject rel) throws SemanticException {
//
// Rewrite logic:
//
// 1. Pass along any correlated variables coming from the input.
//
final RelNode oldInput = rel.getInput();
Frame frame = getInvoke(oldInput, rel);
if (frame == null) {
// If input has not been rewritten, do not rewrite this rel.
return null;
}
final List<RexNode> oldProjects = rel.getProjects();
final List<RelDataTypeField> relOutput = rel.getRowType().getFieldList();
// LogicalProject projects the original expressions,
// plus any correlated variables the input wants to pass along.
final List<Pair<RexNode, String>> projects = Lists.newArrayList();
// and produce the correlated variables in the new output.
if (cm.mapRefRelToCorRef.containsKey(rel)) {
frame = decorrelateInputWithValueGenerator(rel);
}
// LogicalProject projects the original expressions
final Map<Integer, Integer> mapOldToNewOutputs = new HashMap<>();
int newPos;
for (newPos = 0; newPos < oldProjects.size(); newPos++) {
projects.add(newPos, Pair.of(decorrelateExpr(oldProjects.get(newPos)), relOutput.get(newPos).getName()));
mapOldToNewOutputs.put(newPos, newPos);
}
// Project any correlated variables the input wants to pass along.
final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>();
for (Map.Entry<CorDef, Integer> entry : frame.corDefOutputs.entrySet()) {
projects.add(RexInputRef.of2(entry.getValue(), frame.r.getRowType().getFieldList()));
corDefOutputs.put(entry.getKey(), newPos);
newPos++;
}
RelNode newProject = HiveProject.create(frame.r, Pair.left(projects), SqlValidatorUtil.uniquify(Pair.right(projects)));
return register(rel, newProject, mapOldToNewOutputs, corDefOutputs);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.util.Pair in project hive by apache.
the class HiveRelDecorrelator method aggregateCorrelatorOutput.
/**
* Pulls a {@link Project} above a {@link Correlate} from its RHS input.
* Enforces nullability for join output.
*
* @param correlate Correlate
* @param project the original project as the RHS input of the join
* @param isCount Positions which are calls to the <code>COUNT</code>
* aggregation function
* @return the subtree with the new LogicalProject at the root
*/
private RelNode aggregateCorrelatorOutput(Correlate correlate, LogicalProject project, Set<Integer> isCount) {
final RelNode left = correlate.getLeft();
final JoinRelType joinType = correlate.getJoinType().toJoinType();
// now create the new project
final List<Pair<RexNode, String>> newProjects = Lists.newArrayList();
// Project everything from the LHS and then those from the original
// project
final List<RelDataTypeField> leftInputFields = left.getRowType().getFieldList();
for (int i = 0; i < leftInputFields.size(); i++) {
newProjects.add(RexInputRef.of2(i, leftInputFields));
}
// Marked where the projected expr is coming from so that the types will
// become nullable for the original projections which are now coming out
// of the nullable side of the OJ.
boolean projectPulledAboveLeftCorrelator = joinType.generatesNullsOnRight();
for (Pair<RexNode, String> pair : project.getNamedProjects()) {
RexNode newProjExpr = removeCorrelationExpr(pair.left, projectPulledAboveLeftCorrelator, isCount);
newProjects.add(Pair.of(newProjExpr, pair.right));
}
return RelOptUtil.createProject(correlate, newProjects, false);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.util.Pair in project hive by apache.
the class HiveRelDecorrelator method decorrelateRel.
/**
* Rewrites a {@link LogicalAggregate}.
*
* @param rel Aggregate to rewrite
*/
public Frame decorrelateRel(LogicalAggregate rel) throws SemanticException {
if (rel.getGroupType() != Aggregate.Group.SIMPLE) {
throw new AssertionError(Bug.CALCITE_461_FIXED);
}
// Aggregate itself should not reference cor vars.
assert !cm.mapRefRelToCorRef.containsKey(rel);
final RelNode oldInput = rel.getInput();
final Frame frame = getInvoke(oldInput, rel);
if (frame == null) {
// If input has not been rewritten, do not rewrite this rel.
return null;
}
final RelNode newInput = frame.r;
// map from newInput
Map<Integer, Integer> mapNewInputToProjOutputs = new HashMap<>();
final int oldGroupKeyCount = rel.getGroupSet().cardinality();
// Project projects the original expressions,
// plus any correlated variables the input wants to pass along.
final List<Pair<RexNode, String>> projects = Lists.newArrayList();
List<RelDataTypeField> newInputOutput = newInput.getRowType().getFieldList();
int newPos = 0;
// oldInput has the original group by keys in the front.
final NavigableMap<Integer, RexLiteral> omittedConstants = new TreeMap<>();
for (int i = 0; i < oldGroupKeyCount; i++) {
final RexLiteral constant = projectedLiteral(newInput, i);
if (constant != null) {
// Exclude constants. Aggregate({true}) occurs because Aggregate({})
// would generate 1 row even when applied to an empty table.
omittedConstants.put(i, constant);
continue;
}
int newInputPos = frame.oldToNewOutputs.get(i);
projects.add(RexInputRef.of2(newInputPos, newInputOutput));
mapNewInputToProjOutputs.put(newInputPos, newPos);
newPos++;
}
final SortedMap<CorDef, Integer> corDefOutputs = new TreeMap<>();
if (!frame.corDefOutputs.isEmpty()) {
// position oldGroupKeyCount.
for (Map.Entry<CorDef, Integer> entry : frame.corDefOutputs.entrySet()) {
projects.add(RexInputRef.of2(entry.getValue(), newInputOutput));
corDefOutputs.put(entry.getKey(), newPos);
mapNewInputToProjOutputs.put(entry.getValue(), newPos);
newPos++;
}
}
// add the remaining fields
final int newGroupKeyCount = newPos;
for (int i = 0; i < newInputOutput.size(); i++) {
if (!mapNewInputToProjOutputs.containsKey(i)) {
projects.add(RexInputRef.of2(i, newInputOutput));
mapNewInputToProjOutputs.put(i, newPos);
newPos++;
}
}
assert newPos == newInputOutput.size();
// This Project will be what the old input maps to,
// replacing any previous mapping from old input).
RelNode newProject = HiveProject.create(newInput, Pair.left(projects), Pair.right(projects));
// update mappings:
// oldInput ----> newInput
//
// newProject
// |
// oldInput ----> newInput
//
// is transformed to
//
// oldInput ----> newProject
// |
// newInput
Map<Integer, Integer> combinedMap = Maps.newHashMap();
for (Integer oldInputPos : frame.oldToNewOutputs.keySet()) {
combinedMap.put(oldInputPos, mapNewInputToProjOutputs.get(frame.oldToNewOutputs.get(oldInputPos)));
}
register(oldInput, newProject, combinedMap, corDefOutputs);
// now it's time to rewrite the Aggregate
final ImmutableBitSet newGroupSet = ImmutableBitSet.range(newGroupKeyCount);
List<AggregateCall> newAggCalls = Lists.newArrayList();
List<AggregateCall> oldAggCalls = rel.getAggCallList();
int oldInputOutputFieldCount = rel.getGroupSet().cardinality();
int newInputOutputFieldCount = newGroupSet.cardinality();
int i = -1;
for (AggregateCall oldAggCall : oldAggCalls) {
++i;
List<Integer> oldAggArgs = oldAggCall.getArgList();
List<Integer> aggArgs = Lists.newArrayList();
// for the argument.
for (int oldPos : oldAggArgs) {
aggArgs.add(combinedMap.get(oldPos));
}
final int filterArg = oldAggCall.filterArg < 0 ? oldAggCall.filterArg : combinedMap.get(oldAggCall.filterArg);
newAggCalls.add(oldAggCall.adaptTo(newProject, aggArgs, filterArg, oldGroupKeyCount, newGroupKeyCount));
// The old to new output position mapping will be the same as that
// of newProject, plus any aggregates that the oldAgg produces.
combinedMap.put(oldInputOutputFieldCount + i, newInputOutputFieldCount + i);
}
relBuilder.push(LogicalAggregate.create(newProject, false, newGroupSet, null, newAggCalls));
if (!omittedConstants.isEmpty()) {
final List<RexNode> postProjects = new ArrayList<>(relBuilder.fields());
for (Map.Entry<Integer, RexLiteral> entry : omittedConstants.descendingMap().entrySet()) {
postProjects.add(entry.getKey() + frame.corDefOutputs.size(), entry.getValue());
}
relBuilder.project(postProjects);
}
// located at the same position as the input newProject.
return register(rel, relBuilder.build(), combinedMap, corDefOutputs);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.util.Pair in project hive by apache.
the class HiveSubQRemoveRelBuilder method join.
public HiveSubQRemoveRelBuilder join(JoinRelType joinType, RexNode condition, Set<CorrelationId> variablesSet, boolean createSemiJoin) {
Frame right = stack.pop();
final Frame left = stack.pop();
final RelNode join;
final boolean correlate = variablesSet.size() == 1;
RexNode postCondition = literal(true);
if (correlate) {
final CorrelationId id = Iterables.getOnlyElement(variablesSet);
final ImmutableBitSet requiredColumns = RelOptUtil.correlationColumns(id, right.rel);
if (!RelOptUtil.notContainsCorrelation(left.rel, id, Litmus.IGNORE)) {
throw new IllegalArgumentException("variable " + id + " must not be used by left input to correlation");
}
switch(joinType) {
case LEFT:
// Correlate does not have an ON clause.
// For a LEFT correlate, predicate must be evaluated first.
// For INNER, we can defer.
stack.push(right);
filter(condition.accept(new Shifter(left.rel, id, right.rel)));
right = stack.pop();
break;
default:
postCondition = condition;
}
if (createSemiJoin) {
join = correlateFactory.createCorrelate(left.rel, right.rel, id, requiredColumns, SemiJoinType.SEMI);
} else {
join = correlateFactory.createCorrelate(left.rel, right.rel, id, requiredColumns, SemiJoinType.of(joinType));
}
} else {
join = joinFactory.createJoin(left.rel, right.rel, condition, variablesSet, joinType, false);
}
final List<Pair<String, RelDataType>> pairs = new ArrayList<>();
pairs.addAll(left.right);
pairs.addAll(right.right);
stack.push(new Frame(join, ImmutableList.copyOf(pairs)));
filter(postCondition);
return this;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.util.Pair in project drill by axbaretto.
the class DefaultSqlHandler method validateNode.
protected Pair<SqlNode, RelDataType> validateNode(SqlNode sqlNode) throws ValidationException, RelConversionException, ForemanSetupException {
final SqlNode sqlNodeValidated = config.getConverter().validate(sqlNode);
final Pair<SqlNode, RelDataType> typedSqlNode = new Pair<>(sqlNodeValidated, config.getConverter().getOutputType(sqlNodeValidated));
// Check if the unsupported functionality is used
UnsupportedOperatorsVisitor visitor = UnsupportedOperatorsVisitor.createVisitor(context);
try {
sqlNodeValidated.accept(visitor);
} catch (UnsupportedOperationException ex) {
// If the exception due to the unsupported functionalities
visitor.convertException();
// If it is not, let this exception move forward to higher logic
throw ex;
}
return typedSqlNode;
}
Aggregations