Search in sources :

Example 1 with FlinkRelBuilder

use of org.apache.flink.table.planner.calcite.FlinkRelBuilder in project flink by apache.

the class SubQueryDecorrelator method decorrelateQuery.

/**
 * Decorrelates a subquery.
 *
 * <p>This is the main entry point to {@code SubQueryDecorrelator}.
 *
 * @param rootRel The node which has SubQuery.
 * @return Decorrelate result.
 */
public static Result decorrelateQuery(RelNode rootRel) {
    int maxCnfNodeCount = FlinkRelOptUtil.getMaxCnfNodeCount(rootRel);
    final CorelMapBuilder builder = new CorelMapBuilder(maxCnfNodeCount);
    final CorelMap corelMap = builder.build(rootRel);
    if (builder.hasNestedCorScope || builder.hasUnsupportedCorCondition) {
        return null;
    }
    if (!corelMap.hasCorrelation()) {
        return Result.EMPTY;
    }
    RelOptCluster cluster = rootRel.getCluster();
    RelBuilder relBuilder = new FlinkRelBuilder(cluster.getPlanner().getContext(), cluster, null);
    RexBuilder rexBuilder = cluster.getRexBuilder();
    final SubQueryDecorrelator decorrelator = new SubQueryDecorrelator(new SubQueryRelDecorrelator(corelMap, relBuilder, rexBuilder, maxCnfNodeCount), relBuilder);
    rootRel.accept(decorrelator);
    return new Result(decorrelator.subQueryMap);
}
Also used : RelOptCluster(org.apache.calcite.plan.RelOptCluster) RelBuilder(org.apache.calcite.tools.RelBuilder) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) RexBuilder(org.apache.calcite.rex.RexBuilder)

Example 2 with FlinkRelBuilder

use of org.apache.flink.table.planner.calcite.FlinkRelBuilder in project flink by apache.

the class DynamicSourceUtils method pushGeneratedProjection.

/**
 * Creates a projection that adds computed columns and finalizes the table schema.
 */
private static void pushGeneratedProjection(FlinkRelBuilder relBuilder, ResolvedSchema schema) {
    final ExpressionConverter converter = new ExpressionConverter(relBuilder);
    final List<RexNode> projection = schema.getColumns().stream().map(c -> {
        if (c instanceof ComputedColumn) {
            final ComputedColumn computedColumn = (ComputedColumn) c;
            return computedColumn.getExpression().accept(converter);
        } else {
            return relBuilder.field(c.getName());
        }
    }).collect(Collectors.toList());
    relBuilder.projectNamed(projection, schema.getColumns().stream().map(Column::getName).collect(Collectors.toList()), true);
}
Also used : DataType(org.apache.flink.table.types.DataType) ScanRuntimeProvider(org.apache.flink.table.connector.source.ScanTableSource.ScanRuntimeProvider) ChangelogMode(org.apache.flink.table.connector.ChangelogMode) Column(org.apache.flink.table.catalog.Column) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) ShortcutUtils(org.apache.flink.table.planner.utils.ShortcutUtils) RowType(org.apache.flink.table.types.logical.RowType) ScanTableSource(org.apache.flink.table.connector.source.ScanTableSource) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) ReadableConfig(org.apache.flink.configuration.ReadableConfig) RexNode(org.apache.calcite.rex.RexNode) RowField(org.apache.flink.table.types.logical.RowType.RowField) RelHint(org.apache.calcite.rel.hint.RelHint) Map(java.util.Map) LogicalTypeCasts.supportsExplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsExplicitCast) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) RelDataType(org.apache.calcite.rel.type.RelDataType) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) TableConfig(org.apache.flink.table.api.TableConfig) WatermarkSpec(org.apache.flink.table.catalog.WatermarkSpec) RexBuilder(org.apache.calcite.rex.RexBuilder) TableException(org.apache.flink.table.api.TableException) Set(java.util.Set) ExpressionConverter(org.apache.flink.table.planner.expressions.converter.ExpressionConverter) RelNode(org.apache.calcite.rel.RelNode) Collectors(java.util.stream.Collectors) SourceAbilitySpec(org.apache.flink.table.planner.plan.abilities.source.SourceAbilitySpec) TableSourceTable(org.apache.flink.table.planner.plan.schema.TableSourceTable) ComputedColumn(org.apache.flink.table.catalog.Column.ComputedColumn) DataStream(org.apache.flink.streaming.api.datastream.DataStream) List(java.util.List) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) FlinkStatistic(org.apache.flink.table.planner.plan.stats.FlinkStatistic) RowKind(org.apache.flink.types.RowKind) ValidationException(org.apache.flink.table.api.ValidationException) SupportsReadingMetadata(org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) ExecutionConfigOptions(org.apache.flink.table.api.config.ExecutionConfigOptions) Collections(java.util.Collections) ScanRuntimeProviderContext(org.apache.flink.table.runtime.connector.source.ScanRuntimeProviderContext) LogicalTableScan(org.apache.calcite.rel.logical.LogicalTableScan) Column(org.apache.flink.table.catalog.Column) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) ComputedColumn(org.apache.flink.table.catalog.Column.ComputedColumn) ExpressionConverter(org.apache.flink.table.planner.expressions.converter.ExpressionConverter) ComputedColumn(org.apache.flink.table.catalog.Column.ComputedColumn) RexNode(org.apache.calcite.rex.RexNode)

Example 3 with FlinkRelBuilder

use of org.apache.flink.table.planner.calcite.FlinkRelBuilder in project flink by apache.

the class PlannerContext method createRelBuilder.

/**
 * Creates a configured {@link FlinkRelBuilder} for a planning session.
 *
 * @param currentCatalog the current default catalog to look for first during planning.
 * @param currentDatabase the current default database to look for first during planning.
 * @return configured rel builder
 */
public FlinkRelBuilder createRelBuilder(String currentCatalog, String currentDatabase) {
    FlinkCalciteCatalogReader relOptSchema = createCatalogReader(false, currentCatalog, currentDatabase);
    Context chain = Contexts.of(context, // Sets up the ViewExpander explicitly for FlinkRelBuilder.
    createFlinkPlanner(currentCatalog, currentDatabase).createToRelContext());
    return new FlinkRelBuilder(chain, cluster, relOptSchema);
}
Also used : Context(org.apache.calcite.plan.Context) FlinkContext(org.apache.flink.table.planner.calcite.FlinkContext) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) FlinkCalciteCatalogReader(org.apache.flink.table.planner.plan.FlinkCalciteCatalogReader)

Example 4 with FlinkRelBuilder

use of org.apache.flink.table.planner.calcite.FlinkRelBuilder in project flink by apache.

the class InConverter method convert.

@Override
public RexNode convert(CallExpression call, CallExpressionConvertRule.ConvertContext context) {
    checkArgument(call, call.getChildren().size() > 1);
    Expression headExpr = call.getChildren().get(1);
    if (headExpr instanceof TableReferenceExpression) {
        QueryOperation tableOperation = ((TableReferenceExpression) headExpr).getQueryOperation();
        RexNode child = context.toRexNode(call.getChildren().get(0));
        return RexSubQuery.in(((FlinkRelBuilder) context.getRelBuilder()).queryOperation(tableOperation).build(), ImmutableList.of(child));
    } else {
        List<RexNode> child = toRexNodes(context, call.getChildren());
        return context.getRelBuilder().getRexBuilder().makeIn(child.get(0), child.subList(1, child.size()));
    }
}
Also used : CallExpression(org.apache.flink.table.expressions.CallExpression) Expression(org.apache.flink.table.expressions.Expression) TableReferenceExpression(org.apache.flink.table.expressions.TableReferenceExpression) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) TableReferenceExpression(org.apache.flink.table.expressions.TableReferenceExpression) QueryOperation(org.apache.flink.table.operations.QueryOperation) RexNode(org.apache.calcite.rex.RexNode)

Example 5 with FlinkRelBuilder

use of org.apache.flink.table.planner.calcite.FlinkRelBuilder in project flink by apache.

the class ProjectWatermarkAssignerTransposeRule method onMatch.

@Override
public void onMatch(RelOptRuleCall call) {
    LogicalProject project = call.rel(0);
    LogicalWatermarkAssigner watermarkAssigner = call.rel(1);
    // NOTES: DON'T use the nestedSchema datatype to build the transposed project.
    NestedSchema nestedSchema = getUsedFieldsInTopLevelProjectAndWatermarkAssigner(project, watermarkAssigner);
    FlinkRelBuilder builder = (FlinkRelBuilder) call.builder().push(watermarkAssigner.getInput());
    List<RexInputRef> transposedProjects = new LinkedList<>();
    List<String> usedNames = new LinkedList<>();
    // add the used column RexInputRef and names into list
    for (NestedColumn column : nestedSchema.columns().values()) {
        // mark by hand
        column.setIndexOfLeafInNewSchema(transposedProjects.size());
        column.markLeaf();
        usedNames.add(column.name());
        transposedProjects.add(builder.field(column.indexInOriginSchema()));
    }
    // get the rowtime field index in the transposed project
    String rowTimeName = watermarkAssigner.getRowType().getFieldNames().get(watermarkAssigner.rowtimeFieldIndex());
    int indexOfRowTimeInTransposedProject;
    if (nestedSchema.columns().get(rowTimeName) == null) {
        // push the RexInputRef of the rowtime into the list
        int rowTimeIndexInInput = watermarkAssigner.rowtimeFieldIndex();
        indexOfRowTimeInTransposedProject = transposedProjects.size();
        transposedProjects.add(builder.field(rowTimeIndexInInput));
        usedNames.add(rowTimeName);
    } else {
        // find rowtime ref in the list and mark the location
        indexOfRowTimeInTransposedProject = nestedSchema.columns().get(rowTimeName).indexOfLeafInNewSchema();
    }
    // the rowtime column has no rowtime indicator
    builder.project(transposedProjects, usedNames);
    // rewrite the top level field reference
    RexNode newWatermarkExpr = watermarkAssigner.watermarkExpr().accept(new RexShuttle() {

        @Override
        public RexNode visitInputRef(RexInputRef inputRef) {
            String fieldName = watermarkAssigner.getRowType().getFieldNames().get(inputRef.getIndex());
            return builder.field(nestedSchema.columns().get(fieldName).indexOfLeafInNewSchema());
        }
    });
    builder.watermark(indexOfRowTimeInTransposedProject, newWatermarkExpr);
    List<RexNode> newProjects = NestedProjectionUtil.rewrite(project.getProjects(), nestedSchema, call.builder().getRexBuilder());
    RelNode newProject = builder.project(newProjects, project.getRowType().getFieldNames()).build();
    call.transformTo(newProject);
}
Also used : RexShuttle(org.apache.calcite.rex.RexShuttle) NestedColumn(org.apache.flink.table.planner.plan.utils.NestedColumn) LinkedList(java.util.LinkedList) RelNode(org.apache.calcite.rel.RelNode) LogicalWatermarkAssigner(org.apache.flink.table.planner.plan.nodes.calcite.LogicalWatermarkAssigner) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) RexInputRef(org.apache.calcite.rex.RexInputRef) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) NestedSchema(org.apache.flink.table.planner.plan.utils.NestedSchema) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

FlinkRelBuilder (org.apache.flink.table.planner.calcite.FlinkRelBuilder)8 RexNode (org.apache.calcite.rex.RexNode)5 RelNode (org.apache.calcite.rel.RelNode)4 RelHint (org.apache.calcite.rel.hint.RelHint)4 RexBuilder (org.apache.calcite.rex.RexBuilder)4 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)4 Collections (java.util.Collections)3 List (java.util.List)3 Map (java.util.Map)3 Set (java.util.Set)3 Collectors (java.util.stream.Collectors)3 Stream (java.util.stream.Stream)3 RelDataType (org.apache.calcite.rel.type.RelDataType)3 Internal (org.apache.flink.annotation.Internal)3 ReadableConfig (org.apache.flink.configuration.ReadableConfig)3 DataStream (org.apache.flink.streaming.api.datastream.DataStream)3 TableException (org.apache.flink.table.api.TableException)3 ValidationException (org.apache.flink.table.api.ValidationException)3 ExecutionConfigOptions (org.apache.flink.table.api.config.ExecutionConfigOptions)3 Column (org.apache.flink.table.catalog.Column)3