Search in sources :

Example 96 with RexBuilder

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexBuilder in project flink by apache.

the class HiveParserDMLHelper method replaceProjectForStaticPart.

private RelNode replaceProjectForStaticPart(Project project, Map<String, String> staticPartSpec, Table destTable, Map<String, RelDataType> targetColToType) {
    List<RexNode> exprs = project.getProjects();
    List<RexNode> extendedExprs = new ArrayList<>(exprs);
    int numDynmPart = destTable.getTTable().getPartitionKeys().size() - staticPartSpec.size();
    int insertIndex = extendedExprs.size() - numDynmPart;
    RexBuilder rexBuilder = plannerContext.getCluster().getRexBuilder();
    for (Map.Entry<String, String> spec : staticPartSpec.entrySet()) {
        RexNode toAdd = rexBuilder.makeCharLiteral(HiveParserUtils.asUnicodeString(spec.getValue()));
        toAdd = rexBuilder.makeAbstractCast(targetColToType.get(spec.getKey()), toAdd);
        extendedExprs.add(insertIndex++, toAdd);
    }
    RelNode res = LogicalProject.create(project.getInput(), Collections.emptyList(), extendedExprs, (List<String>) null);
    project.replaceInput(0, null);
    return res;
}
Also used : RelNode(org.apache.calcite.rel.RelNode) ArrayList(java.util.ArrayList) RexBuilder(org.apache.calcite.rex.RexBuilder) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) RexNode(org.apache.calcite.rex.RexNode)

Example 97 with RexBuilder

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexBuilder in project flink by apache.

the class HiveParserRexNodeConverter method convertConstant.

public static RexNode convertConstant(ExprNodeConstantDesc literal, RelOptCluster cluster) throws SemanticException {
    RexBuilder rexBuilder = cluster.getRexBuilder();
    RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    RelDataType calciteDataType = HiveParserTypeConverter.convert(hiveType, dtFactory);
    PrimitiveObjectInspector.PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
    RexNode calciteLiteral;
    HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
    // If value is null, the type should also be VOID.
    if (value == null) {
        hiveTypeCategory = PrimitiveObjectInspector.PrimitiveCategory.VOID;
    }
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch(hiveTypeCategory) {
        case BOOLEAN:
            calciteLiteral = rexBuilder.makeLiteral((Boolean) value);
            break;
        case BYTE:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
            break;
        case SHORT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
            break;
        case INT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
            break;
        case LONG:
            calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
            break;
        // TODO: is Decimal an exact numeric or approximate numeric?
        case DECIMAL:
            if (value instanceof HiveDecimal) {
                value = ((HiveDecimal) value).bigDecimalValue();
            } else if (value instanceof Decimal128) {
                value = ((Decimal128) value).toBigDecimal();
            }
            if (value == null) {
                // For now, we will not run CBO in the presence of invalid decimal literals.
                throw new SemanticException("Expression " + literal.getExprString() + " is not a valid decimal");
            // TODO: return createNullLiteral(literal);
            }
            BigDecimal bd = (BigDecimal) value;
            BigInteger unscaled = bd.unscaledValue();
            if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
                calciteLiteral = rexBuilder.makeExactLiteral(bd);
            } else {
                // CBO doesn't support unlimited precision decimals. In practice, this
                // will work...
                // An alternative would be to throw CboSemanticException and fall back
                // to no CBO.
                RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, unscaled.toString().length(), bd.scale());
                calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
            }
            break;
        case FLOAT:
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
            break;
        case DOUBLE:
            // TODO: The best solution is to support NaN in expression reduction.
            if (Double.isNaN((Double) value)) {
                throw new SemanticException("NaN");
            }
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
            break;
        case CHAR:
            if (value instanceof HiveChar) {
                value = ((HiveChar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case VARCHAR:
            if (value instanceof HiveVarchar) {
                value = ((HiveVarchar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case STRING:
            Object constantDescVal = literal.getValue();
            constantDescVal = constantDescVal instanceof NlsString ? constantDescVal : asUnicodeString((String) value);
            // calcite treat string literal as char type, we should treat it as string just like
            // hive
            RelDataType type = HiveParserTypeConverter.convert(hiveType, dtFactory);
            // if we get here, the value is not null
            type = dtFactory.createTypeWithNullability(type, false);
            calciteLiteral = rexBuilder.makeLiteral(constantDescVal, type, true);
            break;
        case DATE:
            LocalDate localDate = HiveParserUtils.getSessionHiveShim().toFlinkDate(value);
            DateString dateString = new DateString(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth());
            calciteLiteral = rexBuilder.makeDateLiteral(dateString);
            break;
        case TIMESTAMP:
            TimestampString timestampString;
            if (value instanceof Calendar) {
                timestampString = TimestampString.fromCalendarFields((Calendar) value);
            } else {
                LocalDateTime localDateTime = HiveParserUtils.getSessionHiveShim().toFlinkTimestamp(value);
                timestampString = new TimestampString(localDateTime.getYear(), localDateTime.getMonthValue(), localDateTime.getDayOfMonth(), localDateTime.getHour(), localDateTime.getMinute(), localDateTime.getSecond());
                timestampString = timestampString.withNanos(localDateTime.getNano());
            }
            // hive always treats timestamp with precision 9
            calciteLiteral = rexBuilder.makeTimestampLiteral(timestampString, 9);
            break;
        case VOID:
            calciteLiteral = cluster.getRexBuilder().makeLiteral(null, dtFactory.createSqlType(SqlTypeName.NULL), true);
            break;
        case BINARY:
        case UNKNOWN:
        default:
            if (hiveShim.isIntervalYearMonthType(hiveTypeCategory)) {
                // Calcite year-month literal value is months as BigDecimal
                BigDecimal totalMonths = BigDecimal.valueOf(((HiveParserIntervalYearMonth) value).getTotalMonths());
                calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            } else if (hiveShim.isIntervalDayTimeType(hiveTypeCategory)) {
                // Calcite day-time interval is millis value as BigDecimal
                // Seconds converted to millis
                BigDecimal secsValueBd = BigDecimal.valueOf(((HiveParserIntervalDayTime) value).getTotalSeconds() * 1000);
                // Nanos converted to millis
                BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveParserIntervalDayTime) value).getNanos(), 6);
                calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
            } else {
                throw new RuntimeException("UnSupported Literal type " + hiveTypeCategory);
            }
    }
    return calciteLiteral;
}
Also used : LocalDateTime(java.time.LocalDateTime) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) RelDataType(org.apache.calcite.rel.type.RelDataType) LocalDate(java.time.LocalDate) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) RexBuilder(org.apache.calcite.rex.RexBuilder) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) Calendar(java.util.Calendar) Decimal128(org.apache.hadoop.hive.common.type.Decimal128) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) DateString(org.apache.calcite.util.DateString) BigInteger(java.math.BigInteger) NlsString(org.apache.calcite.util.NlsString) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) TimestampString(org.apache.calcite.util.TimestampString) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) HiveParserIntervalDayTime(org.apache.flink.table.planner.delegation.hive.copy.HiveParserIntervalDayTime) RexNode(org.apache.calcite.rex.RexNode)

Example 98 with RexBuilder

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexBuilder in project flink by apache.

the class HiveParserBaseSemanticAnalyzer method genValues.

public static RelNode genValues(String tabAlias, Table tmpTable, HiveParserRowResolver rowResolver, RelOptCluster cluster, List<List<String>> values) {
    List<TypeInfo> tmpTableTypes = tmpTable.getCols().stream().map(f -> TypeInfoUtils.getTypeInfoFromTypeString(f.getType())).collect(Collectors.toList());
    RexBuilder rexBuilder = cluster.getRexBuilder();
    // calcite types for each field
    List<RelDataType> calciteTargetTypes = tmpTableTypes.stream().map(ti -> HiveParserTypeConverter.convert((PrimitiveTypeInfo) ti, rexBuilder.getTypeFactory())).collect(Collectors.toList());
    // calcite field names
    List<String> calciteFieldNames = IntStream.range(0, calciteTargetTypes.size()).mapToObj(SqlUtil::deriveAliasFromOrdinal).collect(Collectors.toList());
    // calcite type for each row
    List<RelDataType> calciteRowTypes = new ArrayList<>();
    List<List<RexLiteral>> rows = new ArrayList<>();
    for (List<String> value : values) {
        Preconditions.checkArgument(value.size() == tmpTableTypes.size(), String.format("Values table col length (%d) and data length (%d) mismatch", tmpTableTypes.size(), value.size()));
        List<RexLiteral> row = new ArrayList<>();
        for (int i = 0; i < tmpTableTypes.size(); i++) {
            PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) tmpTableTypes.get(i);
            RelDataType calciteType = calciteTargetTypes.get(i);
            String col = value.get(i);
            if (col == null) {
                row.add(rexBuilder.makeNullLiteral(calciteType));
            } else {
                switch(primitiveTypeInfo.getPrimitiveCategory()) {
                    case BYTE:
                    case SHORT:
                    case INT:
                    case LONG:
                        row.add(rexBuilder.makeExactLiteral(new BigDecimal(col), calciteType));
                        break;
                    case DECIMAL:
                        BigDecimal bigDec = new BigDecimal(col);
                        row.add(SqlTypeUtil.isValidDecimalValue(bigDec, calciteType) ? rexBuilder.makeExactLiteral(bigDec, calciteType) : rexBuilder.makeNullLiteral(calciteType));
                        break;
                    case FLOAT:
                    case DOUBLE:
                        row.add(rexBuilder.makeApproxLiteral(new BigDecimal(col), calciteType));
                        break;
                    case BOOLEAN:
                        row.add(rexBuilder.makeLiteral(Boolean.parseBoolean(col)));
                        break;
                    default:
                        row.add(rexBuilder.makeCharLiteral(HiveParserUtils.asUnicodeString(col)));
                }
            }
        }
        calciteRowTypes.add(rexBuilder.getTypeFactory().createStructType(row.stream().map(RexLiteral::getType).collect(Collectors.toList()), calciteFieldNames));
        rows.add(row);
    }
    // compute the final row type
    RelDataType calciteRowType = rexBuilder.getTypeFactory().leastRestrictive(calciteRowTypes);
    for (int i = 0; i < calciteFieldNames.size(); i++) {
        ColumnInfo colInfo = new ColumnInfo(calciteFieldNames.get(i), HiveParserTypeConverter.convert(calciteRowType.getFieldList().get(i).getType()), tabAlias, false);
        rowResolver.put(tabAlias, calciteFieldNames.get(i), colInfo);
    }
    return HiveParserUtils.genValuesRelNode(cluster, rexBuilder.getTypeFactory().createStructType(calciteRowType.getFieldList()), rows);
}
Also used : Tuple2(org.apache.flink.api.java.tuple.Tuple2) Order(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order) LoggerFactory(org.slf4j.LoggerFactory) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) FunctionRegistry(org.apache.hadoop.hive.ql.exec.FunctionRegistry) OrderSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderSpec) StringUtils(org.apache.commons.lang3.StringUtils) HiveParserUtils(org.apache.flink.table.planner.delegation.hive.HiveParserUtils) SqlCall(org.apache.calcite.sql.SqlCall) BigDecimal(java.math.BigDecimal) CorrelationId(org.apache.calcite.rel.core.CorrelationId) SqlNode(org.apache.calcite.sql.SqlNode) HiveParserErrorMsg(org.apache.flink.table.planner.delegation.hive.parse.HiveParserErrorMsg) SqlUtil(org.apache.calcite.sql.SqlUtil) Pair(org.apache.commons.lang3.tuple.Pair) HiveParserTypeCheckProcFactory(org.apache.flink.table.planner.delegation.hive.HiveParserTypeCheckProcFactory) RexNode(org.apache.calcite.rex.RexNode) Map(java.util.Map) HiveParserUtils.removeASTChild(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.removeASTChild) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RelOptCluster(org.apache.calcite.plan.RelOptCluster) RexWindowBound(org.apache.calcite.rex.RexWindowBound) ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserDDLSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.parse.HiveParserDDLSemanticAnalyzer) SqlKind(org.apache.calcite.sql.SqlKind) TreeVisitor(org.antlr.runtime.tree.TreeVisitor) RexLiteral(org.apache.calcite.rex.RexLiteral) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) Set(java.util.Set) Preconditions(org.apache.flink.util.Preconditions) SessionState(org.apache.hadoop.hive.ql.session.SessionState) Collectors(java.util.stream.Collectors) PartitionSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitionSpec) Serializable(java.io.Serializable) VirtualColumn(org.apache.hadoop.hive.ql.metadata.VirtualColumn) List(java.util.List) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) ObjectInspectorConverters(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters) UnsupportedEncodingException(java.io.UnsupportedEncodingException) HiveASTParser(org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser) ErrorMsg(org.apache.hadoop.hive.ql.ErrorMsg) OrderExpression(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression) RexCall(org.apache.calcite.rex.RexCall) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IntStream(java.util.stream.IntStream) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) PartitioningSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitioningSpec) HashMap(java.util.HashMap) Deque(java.util.Deque) NullOrder(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.NullOrder) ArrayList(java.util.ArrayList) Utilities(org.apache.hadoop.hive.ql.exec.Utilities) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) SqlLiteral(org.apache.calcite.sql.SqlLiteral) PartitionExpression(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitionExpression) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) SqlWindow(org.apache.calcite.sql.SqlWindow) TreeVisitorAction(org.antlr.runtime.tree.TreeVisitorAction) PlanUtils(org.apache.hadoop.hive.ql.plan.PlanUtils) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) Hive(org.apache.hadoop.hive.ql.metadata.Hive) WindowingSpec(org.apache.hadoop.hive.ql.parse.WindowingSpec) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) Logger(org.slf4j.Logger) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) TypeInfoFactory(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory) Iterator(java.util.Iterator) RexBuilder(org.apache.calcite.rex.RexBuilder) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GroupByDesc(org.apache.hadoop.hive.ql.plan.GroupByDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Table(org.apache.hadoop.hive.ql.metadata.Table) SqlTypeUtil(org.apache.calcite.sql.type.SqlTypeUtil) RelNode(org.apache.calcite.rel.RelNode) FrameworkConfig(org.apache.calcite.tools.FrameworkConfig) Node(org.apache.hadoop.hive.ql.lib.Node) Partition(org.apache.hadoop.hive.ql.metadata.Partition) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) Tree(org.antlr.runtime.tree.Tree) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveParserRexNodeConverter(org.apache.flink.table.planner.delegation.hive.HiveParserRexNodeConverter) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserConstants(org.apache.flink.table.planner.delegation.hive.HiveParserConstants) BitSet(java.util.BitSet) ArrayDeque(java.util.ArrayDeque) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) Collections(java.util.Collections) RexLiteral(org.apache.calcite.rex.RexLiteral) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) BigDecimal(java.math.BigDecimal) RexBuilder(org.apache.calcite.rex.RexBuilder) List(java.util.List) ArrayList(java.util.ArrayList)

Example 99 with RexBuilder

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexBuilder in project flink by apache.

the class HiveParserTypeConverter method getType.

public static RelDataType getType(RelOptCluster cluster, HiveParserRowResolver rr, List<String> neededCols) throws SemanticException {
    RexBuilder rexBuilder = cluster.getRexBuilder();
    RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    RowSchema rs = rr.getRowSchema();
    List<RelDataType> fieldTypes = new LinkedList<>();
    List<String> fieldNames = new LinkedList<>();
    for (ColumnInfo ci : rs.getSignature()) {
        if (neededCols == null || neededCols.contains(ci.getInternalName())) {
            fieldTypes.add(convert(ci.getType(), dtFactory));
            fieldNames.add(ci.getInternalName());
        }
    }
    return dtFactory.createStructType(fieldTypes, fieldNames);
}
Also used : RowSchema(org.apache.hadoop.hive.ql.exec.RowSchema) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) RexBuilder(org.apache.calcite.rex.RexBuilder) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) LinkedList(java.util.LinkedList)

Example 100 with RexBuilder

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexBuilder in project flink by apache.

the class CalcPythonCorrelateTransposeRule method onMatch.

@Override
public void onMatch(RelOptRuleCall call) {
    FlinkLogicalCorrelate correlate = call.rel(0);
    FlinkLogicalCalc right = call.rel(2);
    RexBuilder rexBuilder = call.builder().getRexBuilder();
    FlinkLogicalCalc mergedCalc = StreamPhysicalCorrelateRule.getMergedCalc(right);
    FlinkLogicalTableFunctionScan tableScan = StreamPhysicalCorrelateRule.getTableScan(mergedCalc);
    RexProgram mergedCalcProgram = mergedCalc.getProgram();
    InputRefRewriter inputRefRewriter = new InputRefRewriter(correlate.getRowType().getFieldCount() - mergedCalc.getRowType().getFieldCount());
    List<RexNode> correlateFilters = RelOptUtil.conjunctions(mergedCalcProgram.expandLocalRef(mergedCalcProgram.getCondition())).stream().map(x -> x.accept(inputRefRewriter)).collect(Collectors.toList());
    FlinkLogicalCorrelate newCorrelate = new FlinkLogicalCorrelate(correlate.getCluster(), correlate.getTraitSet(), correlate.getLeft(), tableScan, correlate.getCorrelationId(), correlate.getRequiredColumns(), correlate.getJoinType());
    RexNode topCalcCondition = RexUtil.composeConjunction(rexBuilder, correlateFilters);
    RexProgram rexProgram = new RexProgramBuilder(newCorrelate.getRowType(), rexBuilder).getProgram();
    FlinkLogicalCalc newTopCalc = new FlinkLogicalCalc(newCorrelate.getCluster(), newCorrelate.getTraitSet(), newCorrelate, RexProgram.create(newCorrelate.getRowType(), rexProgram.getExprList(), topCalcCondition, newCorrelate.getRowType(), rexBuilder));
    call.transformTo(newTopCalc);
}
Also used : PythonUtil(org.apache.flink.table.planner.plan.utils.PythonUtil) RexProgram(org.apache.calcite.rex.RexProgram) RexBuilder(org.apache.calcite.rex.RexBuilder) FlinkLogicalTableFunctionScan(org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalTableFunctionScan) FlinkLogicalCalc(org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalCalc) RelOptUtil(org.apache.calcite.plan.RelOptUtil) Collectors(java.util.stream.Collectors) RelOptRuleCall(org.apache.calcite.plan.RelOptRuleCall) FlinkLogicalRel(org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalRel) RelOptRule(org.apache.calcite.plan.RelOptRule) RexProgramBuilder(org.apache.calcite.rex.RexProgramBuilder) RexUtil(org.apache.calcite.rex.RexUtil) List(java.util.List) StreamPhysicalCorrelateRule(org.apache.flink.table.planner.plan.rules.physical.stream.StreamPhysicalCorrelateRule) RexNode(org.apache.calcite.rex.RexNode) JoinRelType(org.apache.calcite.rel.core.JoinRelType) FlinkLogicalCorrelate(org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalCorrelate) FlinkLogicalCorrelate(org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalCorrelate) FlinkLogicalCalc(org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalCalc) RexProgram(org.apache.calcite.rex.RexProgram) RexBuilder(org.apache.calcite.rex.RexBuilder) FlinkLogicalTableFunctionScan(org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalTableFunctionScan) RexProgramBuilder(org.apache.calcite.rex.RexProgramBuilder) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

RexBuilder (org.apache.calcite.rex.RexBuilder)314 RexNode (org.apache.calcite.rex.RexNode)248 ArrayList (java.util.ArrayList)151 RelNode (org.apache.calcite.rel.RelNode)121 RelDataType (org.apache.calcite.rel.type.RelDataType)121 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)77 ImmutableBitSet (org.apache.calcite.util.ImmutableBitSet)59 RexInputRef (org.apache.calcite.rex.RexInputRef)49 RelBuilder (org.apache.calcite.tools.RelBuilder)49 AggregateCall (org.apache.calcite.rel.core.AggregateCall)48 List (java.util.List)41 RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)41 RelOptCluster (org.apache.calcite.plan.RelOptCluster)36 HashMap (java.util.HashMap)33 RelOptPredicateList (org.apache.calcite.plan.RelOptPredicateList)24 Project (org.apache.calcite.rel.core.Project)24 RexCall (org.apache.calcite.rex.RexCall)24 BigDecimal (java.math.BigDecimal)23 Collectors (java.util.stream.Collectors)23 RelMetadataQuery (org.apache.calcite.rel.metadata.RelMetadataQuery)22