use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexInputRef in project beam by apache.
the class BeamIOPushDownRule method constructNodesWithPushDown.
/**
* Construct a new {@link BeamIOSourceRel} with predicate and/or project pushed-down and a new
* {@code Calc} to do field reordering/field duplication/complex projects.
*
* @param resolved A descriptor of fields used by a {@code Calc}.
* @param relBuilder A {@code RelBuilder} for constructing {@code Project} and {@code Filter} Rel
* nodes with operations unsupported by the IO.
* @param ioSourceRel Original {@code BeamIOSourceRel} we are attempting to perform push-down for.
* @param tableFilter A class containing information about IO predicate push-down capabilities.
* @param calcDataType A Calcite output schema of an original {@code Calc}.
* @param calcProjects A list of projected {@code RexNode}s by a {@code Calc}.
* @return An alternative {@code RelNode} with supported filters/projects pushed-down to IO Rel.
*/
private RelNode constructNodesWithPushDown(FieldAccessDescriptor resolved, RelBuilder relBuilder, BeamIOSourceRel ioSourceRel, BeamSqlTableFilter tableFilter, RelDataType calcDataType, List<RexNode> calcProjects) {
Schema newSchema = SelectHelpers.getOutputSchema(ioSourceRel.getBeamSqlTable().getSchema(), resolved);
RelDataType calcInputType = CalciteUtils.toCalciteRowType(newSchema, ioSourceRel.getCluster().getTypeFactory());
BeamIOSourceRel newIoSourceRel = ioSourceRel.createPushDownRel(calcInputType, newSchema.getFieldNames(), tableFilter);
relBuilder.push(newIoSourceRel);
List<RexNode> newProjects = new ArrayList<>();
List<RexNode> newFilter = new ArrayList<>();
// Ex: let's say the original fields are (number before each element is the index):
// {0:unused1, 1:id, 2:name, 3:unused2},
// where only 'id' and 'name' are being used. Then the new calcInputType should be as follows:
// {0:id, 1:name}.
// A mapping list will contain 2 entries: {0:1, 1:2},
// showing how used field names map to the original fields.
List<Integer> mapping = resolved.getFieldsAccessed().stream().map(FieldDescriptor::getFieldId).collect(Collectors.toList());
// Map filters to new RexInputRef.
for (RexNode filter : tableFilter.getNotSupported()) {
newFilter.add(reMapRexNodeToNewInputs(filter, mapping));
}
// Map projects to new RexInputRef.
for (RexNode project : calcProjects) {
newProjects.add(reMapRexNodeToNewInputs(project, mapping));
}
if (RexUtil.isIdentity(newProjects, newIoSourceRel.getRowType())) {
// Force a rename prior to filter for identity function.
relBuilder.project(newProjects, calcDataType.getFieldNames(), true);
}
relBuilder.filter(newFilter);
relBuilder.project(newProjects, calcDataType.getFieldNames());
return relBuilder.build();
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexInputRef in project beam by apache.
the class CalcRelSplitter method chooseLevels.
/**
* Figures out which expressions to calculate at which level.
*
* @param exprs Array of expressions
* @param conditionOrdinal Ordinal of the condition expression, or -1 if no condition
* @param exprLevels Level ordinal for each expression (output)
* @param levelTypeOrdinals The type of each level (output)
* @return Number of levels required
*/
private int chooseLevels(final RexNode[] exprs, int conditionOrdinal, int[] exprLevels, int[] levelTypeOrdinals) {
final int inputFieldCount = program.getInputRowType().getFieldCount();
int levelCount = 0;
final MaxInputFinder maxInputFinder = new MaxInputFinder(exprLevels);
boolean[] relTypesPossibleForTopLevel = new boolean[relTypes.length];
Arrays.fill(relTypesPossibleForTopLevel, true);
// Compute the order in which to visit expressions.
final List<Set<Integer>> cohorts = getCohorts();
final List<Integer> permutation = computeTopologicalOrdering(exprs, cohorts);
for (int i : permutation) {
RexNode expr = exprs[i];
final boolean condition = i == conditionOrdinal;
if (i < inputFieldCount) {
assert expr instanceof RexInputRef;
exprLevels[i] = -1;
continue;
}
// Deduce the minimum level of the expression. An expression must
// be at a level greater than or equal to all of its inputs.
int level = maxInputFinder.maxInputFor(expr);
// If the expression is in a cohort, it can occur no lower than the
// levels of other expressions in the same cohort.
Set<Integer> cohort = findCohort(cohorts, i);
if (cohort != null) {
for (Integer exprOrdinal : cohort) {
if (exprOrdinal == i) {
// of effort to repeat.
continue;
}
final RexNode cohortExpr = exprs[exprOrdinal];
int cohortLevel = maxInputFinder.maxInputFor(cohortExpr);
if (cohortLevel > level) {
level = cohortLevel;
}
}
}
// If that is not possible, try to implement it at higher levels.
levelLoop: for (; ; ++level) {
if (level >= levelCount) {
// This is a new level. We can use any type we like.
for (int relTypeOrdinal = 0; relTypeOrdinal < relTypes.length; relTypeOrdinal++) {
if (!relTypesPossibleForTopLevel[relTypeOrdinal]) {
continue;
}
if (relTypes[relTypeOrdinal].canImplement(expr, condition)) {
// Success. We have found a type where we can
// implement this expression.
exprLevels[i] = level;
levelTypeOrdinals[level] = relTypeOrdinal;
assert (level == 0) || (levelTypeOrdinals[level - 1] != levelTypeOrdinals[level]) : "successive levels of same type";
// Previous reltypes are not possible.
for (int j = 0; j < relTypeOrdinal; ++j) {
relTypesPossibleForTopLevel[j] = false;
}
// Successive reltypes may be possible.
for (int j = relTypeOrdinal + 1; j < relTypes.length; ++j) {
if (relTypesPossibleForTopLevel[j]) {
relTypesPossibleForTopLevel[j] = relTypes[j].canImplement(expr, condition);
}
}
// Move to next level.
levelTypeOrdinals[levelCount] = firstSet(relTypesPossibleForTopLevel);
++levelCount;
Arrays.fill(relTypesPossibleForTopLevel, true);
break levelLoop;
}
}
// level, with all options open?
if (count(relTypesPossibleForTopLevel) >= relTypes.length) {
// Cannot implement for any type.
throw new AssertionError("cannot implement " + expr);
}
levelTypeOrdinals[levelCount] = firstSet(relTypesPossibleForTopLevel);
++levelCount;
Arrays.fill(relTypesPossibleForTopLevel, true);
} else {
final int levelTypeOrdinal = levelTypeOrdinals[level];
if (!relTypes[levelTypeOrdinal].canImplement(expr, condition)) {
// continue to next level.
continue;
}
exprLevels[i] = level;
break;
}
}
}
if (levelCount == 0) {
// At least one level is always required.
levelCount = 1;
}
return levelCount;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexInputRef in project beam by apache.
the class MongoDbTable method translateRexNodeToBson.
/**
* Recursively translates a single RexNode to MongoDB Bson filter. Supports simple comparison
* operations, negation, and nested conjunction/disjunction. Boolean fields are translated as an
* `$eq` operation with a boolean `true`.
*
* @param node {@code RexNode} to translate.
* @return {@code Bson} filter.
*/
private Bson translateRexNodeToBson(RexNode node) {
final IntFunction<String> fieldIdToName = i -> getSchema().getField(i).getName();
// Supported operations are described in MongoDbFilter#isSupported
if (node instanceof RexCall) {
RexCall compositeNode = (RexCall) node;
List<RexLiteral> literals = new ArrayList<>();
List<RexInputRef> inputRefs = new ArrayList<>();
for (RexNode operand : compositeNode.getOperands()) {
if (operand instanceof RexLiteral) {
literals.add((RexLiteral) operand);
} else if (operand instanceof RexInputRef) {
inputRefs.add((RexInputRef) operand);
}
}
// Operation is a comparison, since one of the operands in a field reference.
if (inputRefs.size() == 1) {
RexInputRef inputRef = inputRefs.get(0);
String inputFieldName = fieldIdToName.apply(inputRef.getIndex());
if (literals.size() > 0) {
// Convert literal value to the same Java type as the field we are comparing to.
Object literal = convertToExpectedType(inputRef, literals.get(0));
switch(node.getKind()) {
case IN:
return Filters.in(inputFieldName, convertToExpectedType(inputRef, literals));
case EQUALS:
return Filters.eq(inputFieldName, literal);
case NOT_EQUALS:
return Filters.not(Filters.eq(inputFieldName, literal));
case LESS_THAN:
return Filters.lt(inputFieldName, literal);
case GREATER_THAN:
return Filters.gt(inputFieldName, literal);
case GREATER_THAN_OR_EQUAL:
return Filters.gte(inputFieldName, literal);
case LESS_THAN_OR_EQUAL:
return Filters.lte(inputFieldName, literal);
default:
// Encountered an unexpected node kind, RuntimeException below.
break;
}
} else if (node.getKind().equals(SqlKind.NOT)) {
// Ex: `where not boolean_field`
return Filters.not(translateRexNodeToBson(inputRef));
} else {
throw new RuntimeException("Cannot create a filter for an unsupported node: " + node.toString());
}
} else {
// Operation is a conjunction/disjunction.
switch(node.getKind()) {
case AND:
// Recursively construct filter for each operand of conjunction.
return Filters.and(compositeNode.getOperands().stream().map(this::translateRexNodeToBson).collect(Collectors.toList()));
case OR:
// Recursively construct filter for each operand of disjunction.
return Filters.or(compositeNode.getOperands().stream().map(this::translateRexNodeToBson).collect(Collectors.toList()));
default:
// Encountered an unexpected node kind, RuntimeException below.
break;
}
}
throw new RuntimeException("Encountered an unexpected node kind: " + node.getKind().toString());
} else if (node instanceof RexInputRef && node.getType().getSqlTypeName().equals(SqlTypeName.BOOLEAN)) {
// Boolean field, must be true. Ex: `select * from table where bool_field`
return Filters.eq(fieldIdToName.apply(((RexInputRef) node).getIndex()), true);
}
throw new RuntimeException("Was expecting a RexCall or a boolean RexInputRef, but received: " + node.getClass().getSimpleName());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexInputRef in project beam by apache.
the class BigQueryFilter method isSupported.
/**
* Check whether a {@code RexNode} is supported. As of right now BigQuery supports: 1. Complex
* predicates (both conjunction and disjunction). 2. Comparison between a column and a literal.
*
* <p>TODO: Check if comparison between two columns is supported. Also over a boolean field.
*
* @param node A node to check for predicate push-down support.
* @return A pair containing a boolean whether an expression is supported and the number of input
* references used by the expression.
*/
private Pair<Boolean, Integer> isSupported(RexNode node) {
int numberOfInputRefs = 0;
boolean isSupported = true;
if (node instanceof RexCall) {
RexCall compositeNode = (RexCall) node;
// CAST, TRIM? and REVERSE? should be supported as well.
if (!node.getKind().belongsTo(SUPPORTED_OPS)) {
isSupported = false;
} else {
for (RexNode operand : compositeNode.getOperands()) {
// All operands must be supported for a parent node to be supported.
Pair<Boolean, Integer> childSupported = isSupported(operand);
// (OR).
if (!node.getKind().belongsTo(ImmutableSet.of(AND, OR))) {
numberOfInputRefs += childSupported.getRight();
}
// Predicate functions, where more than one field is involved are unsupported.
isSupported = numberOfInputRefs < 2 && childSupported.getLeft();
}
}
} else if (node instanceof RexInputRef) {
numberOfInputRefs = 1;
} else if (node instanceof RexLiteral) {
// RexLiterals are expected, but no action is needed.
} else {
throw new UnsupportedOperationException("Encountered an unexpected node type: " + node.getClass().getSimpleName());
}
return Pair.of(isSupported, numberOfInputRefs);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexInputRef in project druid by druid-io.
the class DruidQuery method computeSorting.
@Nonnull
private static Sorting computeSorting(final PartialDruidQuery partialQuery, final PlannerContext plannerContext, final RowSignature rowSignature, @Nullable final VirtualColumnRegistry virtualColumnRegistry) {
final Sort sort = Preconditions.checkNotNull(partialQuery.getSort(), "sort");
final Project sortProject = partialQuery.getSortProject();
// Extract limit and offset.
final OffsetLimit offsetLimit = OffsetLimit.fromSort(sort);
// Extract orderBy column specs.
final List<OrderByColumnSpec> orderBys = new ArrayList<>(sort.getChildExps().size());
for (int sortKey = 0; sortKey < sort.getChildExps().size(); sortKey++) {
final RexNode sortExpression = sort.getChildExps().get(sortKey);
final RelFieldCollation collation = sort.getCollation().getFieldCollations().get(sortKey);
final OrderByColumnSpec.Direction direction;
final StringComparator comparator;
if (collation.getDirection() == RelFieldCollation.Direction.ASCENDING) {
direction = OrderByColumnSpec.Direction.ASCENDING;
} else if (collation.getDirection() == RelFieldCollation.Direction.DESCENDING) {
direction = OrderByColumnSpec.Direction.DESCENDING;
} else {
throw new ISE("Don't know what to do with direction[%s]", collation.getDirection());
}
final SqlTypeName sortExpressionType = sortExpression.getType().getSqlTypeName();
if (SqlTypeName.NUMERIC_TYPES.contains(sortExpressionType) || SqlTypeName.TIMESTAMP == sortExpressionType || SqlTypeName.DATE == sortExpressionType) {
comparator = StringComparators.NUMERIC;
} else {
comparator = StringComparators.LEXICOGRAPHIC;
}
if (sortExpression.isA(SqlKind.INPUT_REF)) {
final RexInputRef ref = (RexInputRef) sortExpression;
final String fieldName = rowSignature.getColumnName(ref.getIndex());
orderBys.add(new OrderByColumnSpec(fieldName, direction, comparator));
} else {
// We don't support sorting by anything other than refs which actually appear in the query result.
throw new CannotBuildQueryException(sort, sortExpression);
}
}
// Extract any post-sort Projection.
final Projection projection;
if (sortProject == null) {
projection = null;
} else if (partialQuery.getAggregate() == null) {
if (virtualColumnRegistry == null) {
throw new ISE("Must provide 'virtualColumnRegistry' for pre-aggregation Projection!");
}
projection = Projection.preAggregation(sortProject, plannerContext, rowSignature, virtualColumnRegistry);
} else {
projection = Projection.postAggregation(sortProject, plannerContext, rowSignature, "s");
}
return Sorting.create(orderBys, offsetLimit, projection);
}
Aggregations