use of org.apache.flink.table.planner.plan.abilities.sink.SinkAbilitySpec in project flink by apache.
the class DynamicSinkUtils method convertSinkToRel.
private static RelNode convertSinkToRel(FlinkRelBuilder relBuilder, RelNode input, Map<String, String> dynamicOptions, ContextResolvedTable contextResolvedTable, Map<String, String> staticPartitions, boolean isOverwrite, DynamicTableSink sink) {
final DataTypeFactory dataTypeFactory = unwrapContext(relBuilder).getCatalogManager().getDataTypeFactory();
final FlinkTypeFactory typeFactory = unwrapTypeFactory(relBuilder);
final ResolvedSchema schema = contextResolvedTable.getResolvedSchema();
final String tableDebugName = contextResolvedTable.getIdentifier().asSummaryString();
List<SinkAbilitySpec> sinkAbilitySpecs = new ArrayList<>();
// 1. prepare table sink
prepareDynamicSink(tableDebugName, staticPartitions, isOverwrite, sink, contextResolvedTable.getResolvedTable(), sinkAbilitySpecs);
sinkAbilitySpecs.forEach(spec -> spec.apply(sink));
// 2. validate the query schema to the sink's table schema and apply cast if possible
final RelNode query = validateSchemaAndApplyImplicitCast(input, schema, tableDebugName, dataTypeFactory, typeFactory);
relBuilder.push(query);
// 3. convert the sink's table schema to the consumed data type of the sink
final List<Integer> metadataColumns = extractPersistedMetadataColumns(schema);
if (!metadataColumns.isEmpty()) {
pushMetadataProjection(relBuilder, typeFactory, schema, sink);
}
List<RelHint> hints = new ArrayList<>();
if (!dynamicOptions.isEmpty()) {
hints.add(RelHint.builder("OPTIONS").hintOptions(dynamicOptions).build());
}
final RelNode finalQuery = relBuilder.build();
return LogicalSink.create(finalQuery, hints, contextResolvedTable, sink, staticPartitions, sinkAbilitySpecs.toArray(new SinkAbilitySpec[0]));
}
Aggregations