use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Function in project beam by apache.
the class ExpressionConverter method convertRexNodeFromComputedColumnWithFieldList.
private RexNode convertRexNodeFromComputedColumnWithFieldList(ResolvedComputedColumn column, List<ResolvedColumn> columnList, List<RelDataTypeField> fieldList, int windowFieldIndex) {
if (column.getExpr().nodeKind() != RESOLVED_FUNCTION_CALL) {
return convertRexNodeFromResolvedExpr(column.getExpr(), columnList, fieldList, ImmutableMap.of());
}
ResolvedFunctionCall functionCall = (ResolvedFunctionCall) column.getExpr();
// TODO: is there any other illegal case?
if (functionCall.getFunction().getName().equals(FIXED_WINDOW) || functionCall.getFunction().getName().equals(SLIDING_WINDOW) || functionCall.getFunction().getName().equals(SESSION_WINDOW)) {
throw new ZetaSqlException(functionCall.getFunction().getName() + " shouldn't appear in SELECT exprlist.");
}
if (!functionCall.getFunction().getGroup().equals(PRE_DEFINED_WINDOW_FUNCTIONS)) {
// non-window function should still go through normal FunctionCall conversion process.
return convertRexNodeFromResolvedExpr(column.getExpr(), columnList, fieldList, ImmutableMap.of());
}
// ONLY window_start and window_end should arrive here.
// TODO: Have extra verification here to make sure window start/end functions have the same
// parameter with window function.
List<RexNode> operands = new ArrayList<>();
switch(functionCall.getFunction().getName()) {
case FIXED_WINDOW_START:
case SLIDING_WINDOW_START:
case SESSION_WINDOW_START:
// in Calcite.
case SESSION_WINDOW_END:
return rexBuilder().makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex);
case FIXED_WINDOW_END:
operands.add(rexBuilder().makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex));
// TODO: check window_end 's duration is the same as it's aggregate window.
operands.add(convertIntervalToRexIntervalLiteral((ResolvedLiteral) functionCall.getArgumentList().get(0)));
return rexBuilder().makeCall(SqlOperators.ZETASQL_TIMESTAMP_ADD, operands);
case SLIDING_WINDOW_END:
operands.add(rexBuilder().makeInputRef(fieldList.get(windowFieldIndex).getType(), windowFieldIndex));
operands.add(convertIntervalToRexIntervalLiteral((ResolvedLiteral) functionCall.getArgumentList().get(1)));
return rexBuilder().makeCall(SqlOperators.ZETASQL_TIMESTAMP_ADD, operands);
default:
throw new UnsupportedOperationException("Does not support window start/end: " + functionCall.getFunction().getName());
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Function in project beam by apache.
the class ExpressionConverter method convertTableValuedFunction.
/**
* Convert a TableValuedFunction in ZetaSQL to a RexCall in Calcite.
*/
public RexCall convertTableValuedFunction(RelNode input, TableValuedFunction tvf, List<ResolvedNodes.ResolvedFunctionArgument> argumentList, List<ResolvedColumn> inputTableColumns) {
ResolvedColumn wmCol;
// Handle builtin windowing TVF.
switch(tvf.getName()) {
case TVFStreamingUtils.FIXED_WINDOW_TVF:
// TUMBLE tvf's second argument is descriptor.
wmCol = extractWatermarkColumnFromDescriptor(argumentList.get(1).getDescriptorArg());
return (RexCall) rexBuilder().makeCall(new SqlWindowTableFunction(SqlKind.TUMBLE.name()), convertRelNodeToRexRangeRef(input), convertResolvedColumnToRexInputRef(wmCol, inputTableColumns), convertIntervalToRexIntervalLiteral((ResolvedLiteral) argumentList.get(2).getExpr()));
case TVFStreamingUtils.SLIDING_WINDOW_TVF:
// HOP tvf's second argument is descriptor.
wmCol = extractWatermarkColumnFromDescriptor(argumentList.get(1).getDescriptorArg());
return (RexCall) rexBuilder().makeCall(new SqlWindowTableFunction(SqlKind.HOP.name()), convertRelNodeToRexRangeRef(input), convertResolvedColumnToRexInputRef(wmCol, inputTableColumns), convertIntervalToRexIntervalLiteral((ResolvedLiteral) argumentList.get(2).getExpr()), convertIntervalToRexIntervalLiteral((ResolvedLiteral) argumentList.get(3).getExpr()));
case TVFStreamingUtils.SESSION_WINDOW_TVF:
// SESSION tvf's second argument is descriptor.
wmCol = extractWatermarkColumnFromDescriptor(argumentList.get(1).getDescriptorArg());
// SESSION tvf's third argument is descriptor.
List<ResolvedColumn> keyCol = extractSessionKeyColumnFromDescriptor(argumentList.get(2).getDescriptorArg());
List<RexNode> operands = new ArrayList<>();
operands.add(convertRelNodeToRexRangeRef(input));
operands.add(convertResolvedColumnToRexInputRef(wmCol, inputTableColumns));
operands.add(convertIntervalToRexIntervalLiteral((ResolvedLiteral) argumentList.get(3).getExpr()));
operands.addAll(convertResolvedColumnsToRexInputRef(keyCol, inputTableColumns));
return (RexCall) rexBuilder().makeCall(new SqlWindowTableFunction(SqlKind.SESSION.name()), operands);
}
if (tvf instanceof FixedOutputSchemaTVF) {
FixedOutputSchemaTVF fixedOutputSchemaTVF = (FixedOutputSchemaTVF) tvf;
return (RexCall) rexBuilder().makeCall(new ZetaSqlUserDefinedSQLNativeTableValuedFunction(new SqlIdentifier(tvf.getName(), SqlParserPos.ZERO), opBinding -> {
List<RelDataTypeField> relDataTypeFields = convertTVFRelationColumnsToRelDataTypeFields(fixedOutputSchemaTVF.getOutputSchema().getColumns());
return new RelRecordType(relDataTypeFields);
}, null, null, null, null));
}
throw new UnsupportedOperationException("Does not support table-valued function: " + tvf.getName());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Function in project beam by apache.
the class AggregateScanConverter method convertAggregateScanInputScanToLogicalProject.
private LogicalProject convertAggregateScanInputScanToLogicalProject(ResolvedAggregateScan node, RelNode input) {
// AggregateScan's input is the source of data (e.g. TableScan), which is different from the
// design of CalciteSQL, in which the LogicalAggregate's input is a LogicalProject, whose input
// is a LogicalTableScan. When AggregateScan's input is WithRefScan, the WithRefScan is
// ebullient to a LogicalTableScan. So it's still required to build another LogicalProject as
// the input of LogicalAggregate.
List<RexNode> projects = new ArrayList<>();
List<String> fieldNames = new ArrayList<>();
// LogicalAggregate.
for (ResolvedComputedColumn computedColumn : node.getGroupByList()) {
projects.add(getExpressionConverter().convertRexNodeFromResolvedExpr(computedColumn.getExpr(), node.getInputScan().getColumnList(), input.getRowType().getFieldList(), ImmutableMap.of()));
fieldNames.add(getTrait().resolveAlias(computedColumn.getColumn()));
}
// TODO: remove duplicate columns in projects.
for (ResolvedComputedColumn resolvedComputedColumn : node.getAggregateList()) {
// Should create Calcite's RexInputRef from ResolvedColumn from ResolvedComputedColumn.
// TODO: handle aggregate function with more than one argument and handle OVER
// TODO: is there is general way for column reference tracking and deduplication for
// aggregation?
ResolvedAggregateFunctionCall aggregateFunctionCall = ((ResolvedAggregateFunctionCall) resolvedComputedColumn.getExpr());
if (aggregateFunctionCall.getArgumentList() != null && aggregateFunctionCall.getArgumentList().size() >= 1) {
ResolvedExpr resolvedExpr = aggregateFunctionCall.getArgumentList().get(0);
for (int i = 0; i < aggregateFunctionCall.getArgumentList().size(); i++) {
if (i == 0) {
// TODO: assume aggregate function's input is either a ColumnRef or a cast(ColumnRef).
// TODO: user might use multiple CAST so we need to handle this rare case.
projects.add(getExpressionConverter().convertRexNodeFromResolvedExpr(resolvedExpr, node.getInputScan().getColumnList(), input.getRowType().getFieldList(), ImmutableMap.of()));
} else {
projects.add(getExpressionConverter().convertRexNodeFromResolvedExpr(aggregateFunctionCall.getArgumentList().get(i)));
}
fieldNames.add(getTrait().resolveAlias(resolvedComputedColumn.getColumn()));
}
}
}
return LogicalProject.create(input, ImmutableList.of(), projects, fieldNames);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Function in project beam by apache.
the class SqlCoalesceOperatorRewriter method apply.
@Override
public RexNode apply(RexBuilder rexBuilder, List<RexNode> operands) {
Preconditions.checkArgument(operands.size() >= 1, "COALESCE should have at least one argument in function call.");
// No need for a case operator if there's only one operand
if (operands.size() == 1) {
return operands.get(0);
}
SqlOperator op = SqlStdOperatorTable.CASE;
List<RexNode> newOperands = new ArrayList<>();
for (RexNode operand : Util.skipLast(operands)) {
newOperands.add(rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, ImmutableList.of(operand)));
newOperands.add(operand);
}
newOperands.add(Util.last(operands));
return rexBuilder.makeCall(op, newOperands);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Function in project beam by apache.
the class BeamZetaSqlCatalogTest method loadsUserDefinedFunctionsFromSchema.
@Test
public void loadsUserDefinedFunctionsFromSchema() throws NoSuchMethodException {
JdbcConnection jdbcConnection = createJdbcConnection();
SchemaPlus calciteSchema = jdbcConnection.getCurrentSchemaPlus();
Method method = IncrementFn.class.getMethod("eval", Long.class);
calciteSchema.add("increment", ScalarFunctionImpl.create(method));
BeamZetaSqlCatalog beamCatalog = BeamZetaSqlCatalog.create(calciteSchema, jdbcConnection.getTypeFactory(), SqlAnalyzer.baseAnalyzerOptions());
assertNotNull("ZetaSQL catalog contains function signature.", beamCatalog.getZetaSqlCatalog().getFunctionByFullName(USER_DEFINED_JAVA_SCALAR_FUNCTIONS + ":increment"));
assertEquals("Beam catalog contains function definition.", UserFunctionDefinitions.JavaScalarFunction.create(method, ""), beamCatalog.getUserFunctionDefinitions().javaScalarFunctions().get(ImmutableList.of("increment")));
}
Aggregations