Search in sources :

Example 16 with FlinkTypeFactory

use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.

the class PushProjectIntoTableSourceScanRule method getPrimaryKeyProjections.

private List<RexNode> getPrimaryKeyProjections(LogicalTableScan scan) {
    final TableSourceTable source = scan.getTable().unwrap(TableSourceTable.class);
    final ResolvedSchema schema = source.contextResolvedTable().getResolvedSchema();
    if (!schema.getPrimaryKey().isPresent()) {
        return Collections.emptyList();
    }
    final FlinkTypeFactory typeFactory = unwrapTypeFactory(scan);
    final UniqueConstraint primaryKey = schema.getPrimaryKey().get();
    return primaryKey.getColumns().stream().map(columnName -> {
        final int idx = scan.getRowType().getFieldNames().indexOf(columnName);
        final Column column = schema.getColumn(idx).orElseThrow(() -> new TableException(String.format("Column at index %d not found.", idx)));
        return new RexInputRef(idx, typeFactory.createFieldTypeFromLogicalType(column.getDataType().getLogicalType()));
    }).collect(Collectors.toList());
}
Also used : IntStream(java.util.stream.IntStream) NestedProjectionUtil(org.apache.flink.table.planner.plan.utils.NestedProjectionUtil) Arrays(java.util.Arrays) ShortcutUtils.unwrapTypeFactory(org.apache.flink.table.planner.utils.ShortcutUtils.unwrapTypeFactory) SourceAbilityContext(org.apache.flink.table.planner.plan.abilities.source.SourceAbilityContext) Column(org.apache.flink.table.catalog.Column) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) RexNodeExtractor(org.apache.flink.table.planner.plan.utils.RexNodeExtractor) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RowType(org.apache.flink.table.types.logical.RowType) SupportsProjectionPushDown(org.apache.flink.table.connector.source.abilities.SupportsProjectionPushDown) ArrayList(java.util.ArrayList) RexNode(org.apache.calcite.rex.RexNode) NestedSchema(org.apache.flink.table.planner.plan.utils.NestedSchema) Projection(org.apache.flink.table.connector.Projection) ProjectRemoveRule(org.apache.calcite.rel.rules.ProjectRemoveRule) DynamicSourceUtils.createProducedType(org.apache.flink.table.planner.connectors.DynamicSourceUtils.createProducedType) RelDataType(org.apache.calcite.rel.type.RelDataType) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) TableConfig(org.apache.flink.table.api.TableConfig) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) ProjectPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.ProjectPushDownSpec) TableException(org.apache.flink.table.api.TableException) ShortcutUtils.unwrapContext(org.apache.flink.table.planner.utils.ShortcutUtils.unwrapContext) RelRule(org.apache.calcite.plan.RelRule) NestedColumn(org.apache.flink.table.planner.plan.utils.NestedColumn) Collectors(java.util.stream.Collectors) DynamicSourceUtils.createRequiredMetadataKeys(org.apache.flink.table.planner.connectors.DynamicSourceUtils.createRequiredMetadataKeys) SourceAbilitySpec(org.apache.flink.table.planner.plan.abilities.source.SourceAbilitySpec) TableSourceTable(org.apache.flink.table.planner.plan.schema.TableSourceTable) RelOptRuleCall(org.apache.calcite.plan.RelOptRuleCall) RexInputRef(org.apache.calcite.rex.RexInputRef) Objects(java.util.Objects) DynamicSourceUtils(org.apache.flink.table.planner.connectors.DynamicSourceUtils) RelOptRule(org.apache.calcite.plan.RelOptRule) List(java.util.List) Stream(java.util.stream.Stream) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) SupportsReadingMetadata(org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata) ReadingMetadataSpec(org.apache.flink.table.planner.plan.abilities.source.ReadingMetadataSpec) Internal(org.apache.flink.annotation.Internal) Collections(java.util.Collections) LogicalTableScan(org.apache.calcite.rel.logical.LogicalTableScan) TableException(org.apache.flink.table.api.TableException) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) Column(org.apache.flink.table.catalog.Column) NestedColumn(org.apache.flink.table.planner.plan.utils.NestedColumn) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) RexInputRef(org.apache.calcite.rex.RexInputRef) TableSourceTable(org.apache.flink.table.planner.plan.schema.TableSourceTable) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 17 with FlinkTypeFactory

use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.

the class DynamicSinkUtils method convertSinkToRel.

private static RelNode convertSinkToRel(FlinkRelBuilder relBuilder, RelNode input, Map<String, String> dynamicOptions, ContextResolvedTable contextResolvedTable, Map<String, String> staticPartitions, boolean isOverwrite, DynamicTableSink sink) {
    final DataTypeFactory dataTypeFactory = unwrapContext(relBuilder).getCatalogManager().getDataTypeFactory();
    final FlinkTypeFactory typeFactory = unwrapTypeFactory(relBuilder);
    final ResolvedSchema schema = contextResolvedTable.getResolvedSchema();
    final String tableDebugName = contextResolvedTable.getIdentifier().asSummaryString();
    List<SinkAbilitySpec> sinkAbilitySpecs = new ArrayList<>();
    // 1. prepare table sink
    prepareDynamicSink(tableDebugName, staticPartitions, isOverwrite, sink, contextResolvedTable.getResolvedTable(), sinkAbilitySpecs);
    sinkAbilitySpecs.forEach(spec -> spec.apply(sink));
    // 2. validate the query schema to the sink's table schema and apply cast if possible
    final RelNode query = validateSchemaAndApplyImplicitCast(input, schema, tableDebugName, dataTypeFactory, typeFactory);
    relBuilder.push(query);
    // 3. convert the sink's table schema to the consumed data type of the sink
    final List<Integer> metadataColumns = extractPersistedMetadataColumns(schema);
    if (!metadataColumns.isEmpty()) {
        pushMetadataProjection(relBuilder, typeFactory, schema, sink);
    }
    List<RelHint> hints = new ArrayList<>();
    if (!dynamicOptions.isEmpty()) {
        hints.add(RelHint.builder("OPTIONS").hintOptions(dynamicOptions).build());
    }
    final RelNode finalQuery = relBuilder.build();
    return LogicalSink.create(finalQuery, hints, contextResolvedTable, sink, staticPartitions, sinkAbilitySpecs.toArray(new SinkAbilitySpec[0]));
}
Also used : FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RelNode(org.apache.calcite.rel.RelNode) ArrayList(java.util.ArrayList) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) SinkAbilitySpec(org.apache.flink.table.planner.plan.abilities.sink.SinkAbilitySpec) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) RelHint(org.apache.calcite.rel.hint.RelHint)

Example 18 with FlinkTypeFactory

use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.

the class DynamicSinkUtils method pushMetadataProjection.

/**
 * Creates a projection that reorders physical and metadata columns according to the consumed
 * data type of the sink. It casts metadata columns into the expected data type.
 *
 * @see SupportsWritingMetadata
 */
private static void pushMetadataProjection(FlinkRelBuilder relBuilder, FlinkTypeFactory typeFactory, ResolvedSchema schema, DynamicTableSink sink) {
    final RexBuilder rexBuilder = relBuilder.getRexBuilder();
    final List<Column> columns = schema.getColumns();
    final List<Integer> physicalColumns = extractPhysicalColumns(schema);
    final Map<String, Integer> keyToMetadataColumn = extractPersistedMetadataColumns(schema).stream().collect(Collectors.toMap(pos -> {
        final MetadataColumn metadataColumn = (MetadataColumn) columns.get(pos);
        return metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
    }, Function.identity()));
    final List<Integer> metadataColumns = createRequiredMetadataKeys(schema, sink).stream().map(keyToMetadataColumn::get).collect(Collectors.toList());
    final List<String> fieldNames = Stream.concat(physicalColumns.stream().map(columns::get).map(Column::getName), metadataColumns.stream().map(columns::get).map(MetadataColumn.class::cast).map(c -> c.getMetadataKey().orElse(c.getName()))).collect(Collectors.toList());
    final Map<String, DataType> metadataMap = extractMetadataMap(sink);
    final List<RexNode> fieldNodes = Stream.concat(physicalColumns.stream().map(pos -> {
        final int posAdjusted = adjustByVirtualColumns(columns, pos);
        return relBuilder.field(posAdjusted);
    }), metadataColumns.stream().map(pos -> {
        final MetadataColumn metadataColumn = (MetadataColumn) columns.get(pos);
        final String metadataKey = metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
        final LogicalType expectedType = metadataMap.get(metadataKey).getLogicalType();
        final RelDataType expectedRelDataType = typeFactory.createFieldTypeFromLogicalType(expectedType);
        final int posAdjusted = adjustByVirtualColumns(columns, pos);
        return rexBuilder.makeAbstractCast(expectedRelDataType, relBuilder.field(posAdjusted));
    })).collect(Collectors.toList());
    relBuilder.projectNamed(fieldNodes, fieldNames, true);
}
Also used : DataType(org.apache.flink.table.types.DataType) Schema(org.apache.flink.table.api.Schema) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) SupportsWritingMetadata(org.apache.flink.table.connector.sink.abilities.SupportsWritingMetadata) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) TableConfigOptions(org.apache.flink.table.api.config.TableConfigOptions) CollectSinkOperatorFactory(org.apache.flink.streaming.api.operators.collect.CollectSinkOperatorFactory) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) SupportsPartitioning(org.apache.flink.table.connector.sink.abilities.SupportsPartitioning) RexNode(org.apache.calcite.rex.RexNode) RowField(org.apache.flink.table.types.logical.RowType.RowField) RelHint(org.apache.calcite.rel.hint.RelHint) Map(java.util.Map) LogicalTypeCasts.supportsExplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsExplicitCast) LogicalTypeCasts.supportsAvoidingCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsAvoidingCast) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) Set(java.util.Set) OverwriteSpec(org.apache.flink.table.planner.plan.abilities.sink.OverwriteSpec) Collectors(java.util.stream.Collectors) ZoneId(java.time.ZoneId) SinkAbilitySpec(org.apache.flink.table.planner.plan.abilities.sink.SinkAbilitySpec) List(java.util.List) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) ValidationException(org.apache.flink.table.api.ValidationException) CollectModifyOperation(org.apache.flink.table.operations.CollectModifyOperation) TableResult(org.apache.flink.table.api.TableResult) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) LogicalTypeCasts.supportsImplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsImplicitCast) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) IntStream(java.util.stream.IntStream) ShortcutUtils.unwrapTypeFactory(org.apache.flink.table.planner.utils.ShortcutUtils.unwrapTypeFactory) WritingMetadataSpec(org.apache.flink.table.planner.plan.abilities.sink.WritingMetadataSpec) Column(org.apache.flink.table.catalog.Column) RowType(org.apache.flink.table.types.logical.RowType) RelOptUtil(org.apache.calcite.plan.RelOptUtil) Function(java.util.function.Function) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) ArrayList(java.util.ArrayList) ReadableConfig(org.apache.flink.configuration.ReadableConfig) SupportsOverwrite(org.apache.flink.table.connector.sink.abilities.SupportsOverwrite) ExternalModifyOperation(org.apache.flink.table.operations.ExternalModifyOperation) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) LogicalSink(org.apache.flink.table.planner.plan.nodes.calcite.LogicalSink) DataTypeUtils(org.apache.flink.table.types.utils.DataTypeUtils) RelDataType(org.apache.calcite.rel.type.RelDataType) RexBuilder(org.apache.calcite.rex.RexBuilder) TableException(org.apache.flink.table.api.TableException) ShortcutUtils.unwrapContext(org.apache.flink.table.planner.utils.ShortcutUtils.unwrapContext) TypeTransformations(org.apache.flink.table.types.inference.TypeTransformations) RelNode(org.apache.calcite.rel.RelNode) DataStream(org.apache.flink.streaming.api.datastream.DataStream) ExternalCatalogTable(org.apache.flink.table.catalog.ExternalCatalogTable) Internal(org.apache.flink.annotation.Internal) ExecutionConfigOptions(org.apache.flink.table.api.config.ExecutionConfigOptions) Collections(java.util.Collections) LogicalType(org.apache.flink.table.types.logical.LogicalType) RelDataType(org.apache.calcite.rel.type.RelDataType) RelHint(org.apache.calcite.rel.hint.RelHint) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) Column(org.apache.flink.table.catalog.Column) RexBuilder(org.apache.calcite.rex.RexBuilder) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

FlinkTypeFactory (org.apache.flink.table.planner.calcite.FlinkTypeFactory)18 RelDataType (org.apache.calcite.rel.type.RelDataType)13 RexNode (org.apache.calcite.rex.RexNode)9 LogicalType (org.apache.flink.table.types.logical.LogicalType)8 ArrayList (java.util.ArrayList)7 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)6 List (java.util.List)5 RexBuilder (org.apache.calcite.rex.RexBuilder)5 RowType (org.apache.flink.table.types.logical.RowType)4 BigDecimal (java.math.BigDecimal)3 Arrays (java.util.Arrays)3 Collections (java.util.Collections)3 Collectors (java.util.stream.Collectors)3 LogicalTableScan (org.apache.calcite.rel.logical.LogicalTableScan)3 RexInputRef (org.apache.calcite.rex.RexInputRef)3 TableException (org.apache.flink.table.api.TableException)3 Map (java.util.Map)2 Optional (java.util.Optional)2 Set (java.util.Set)2 Function (java.util.function.Function)2