Search in sources :

Example 11 with Schema

use of org.apache.flink.table.api.Schema in project flink by apache.

the class DataStreamJavaITCase method createTableFromElements.

private void createTableFromElements(StreamTableEnvironment tableEnv, String name, ChangelogMode changelogMode, Schema schema, List<TypeInformation<?>> fieldTypeInfo, Row... elements) {
    final String[] fieldNames = schema.getColumns().stream().map(Schema.UnresolvedColumn::getName).toArray(String[]::new);
    final TypeInformation<?>[] fieldTypes = fieldTypeInfo.toArray(new TypeInformation[0]);
    final DataStream<Row> dataStream = env.fromElements(elements).returns(Types.ROW_NAMED(fieldNames, fieldTypes));
    final Table table = tableEnv.fromChangelogStream(dataStream, schema, changelogMode);
    tableEnv.createTemporaryView(name, table);
}
Also used : Table(org.apache.flink.table.api.Table) Schema(org.apache.flink.table.api.Schema) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) Row(org.apache.flink.types.Row) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation)

Example 12 with Schema

use of org.apache.flink.table.api.Schema in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithFullDataTypes.

// TODO: tweak the tests when FLINK-13604 is fixed.
@Test
public void testCreateTableWithFullDataTypes() {
    final List<TestItem> testItems = Arrays.asList(createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3).
    createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
    createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>.
    createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW(DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))));
    StringBuilder buffer = new StringBuilder("create table t1(\n");
    for (int i = 0; i < testItems.size(); i++) {
        buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr);
        if (i == testItems.size() - 1) {
            buffer.append(")");
        } else {
            buffer.append(",\n");
        }
    }
    final String sql = buffer.toString();
    final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
    SqlNode node = parser.parse(sql);
    assertThat(node).isInstanceOf(SqlCreateTable.class);
    Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
    TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema();
    Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray();
    assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes);
}
Also used : FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ExplainDetail(org.apache.flink.table.api.ExplainDetail) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) Map(java.util.Map) SqlCreateTable(org.apache.flink.sql.parser.ddl.SqlCreateTable) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) ModuleManager(org.apache.flink.table.module.ModuleManager) TableConfig(org.apache.flink.table.api.TableConfig) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) JavaUserDefinedScalarFunctions(org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ExpressionResolverMocks(org.apache.flink.table.utils.ExpressionResolverMocks) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) ParserImpl(org.apache.flink.table.planner.delegation.ParserImpl) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) Func8$(org.apache.flink.table.planner.expressions.utils.Func8$) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) Func0$(org.apache.flink.table.planner.expressions.utils.Func0$) Supplier(java.util.function.Supplier) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) TableColumn(org.apache.flink.table.api.TableColumn) Catalog(org.apache.flink.table.catalog.Catalog) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TestManagedTableFactory(org.apache.flink.table.factories.TestManagedTableFactory) Nullable(javax.annotation.Nullable) Before(org.junit.Before) OperationMatchers.withSchema(org.apache.flink.table.planner.utils.OperationMatchers.withSchema) FunctionScope(org.apache.flink.table.operations.ShowFunctionsOperation.FunctionScope) Func1$(org.apache.flink.table.planner.expressions.utils.Func1$) Test(org.junit.Test) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) TreeMap(java.util.TreeMap) CatalogManagerMocks(org.apache.flink.table.utils.CatalogManagerMocks) CatalogManagerCalciteSchema(org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema) Schema(org.apache.flink.table.api.Schema) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) OperationMatchers.partitionedBy(org.apache.flink.table.planner.utils.OperationMatchers.partitionedBy) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) SetOperation(org.apache.flink.table.operations.command.SetOperation) After(org.junit.After) ExecutionOptions(org.apache.flink.configuration.ExecutionOptions) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) InstanceOfAssertFactories.type(org.assertj.core.api.InstanceOfAssertFactories.type) Collectors(java.util.stream.Collectors) CalciteSchemaBuilder.asRootSchema(org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema) List(java.util.List) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SqlRichExplain(org.apache.flink.sql.parser.dql.SqlRichExplain) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) ValidationException(org.apache.flink.table.api.ValidationException) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HamcrestCondition(org.assertj.core.api.HamcrestCondition) OperationMatchers.entry(org.apache.flink.table.planner.utils.OperationMatchers.entry) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) HashMap(java.util.HashMap) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AtomicReference(java.util.concurrent.atomic.AtomicReference) OperationMatchers.withOptions(org.apache.flink.table.planner.utils.OperationMatchers.withOptions) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) HashSet(java.util.HashSet) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Configuration(org.apache.flink.configuration.Configuration) Parser(org.apache.flink.table.delegation.Parser) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) DataTypes(org.apache.flink.table.api.DataTypes) SqlDialect(org.apache.flink.table.api.SqlDialect) RuntimeExecutionMode(org.apache.flink.api.common.RuntimeExecutionMode) Collections(java.util.Collections) TableSchema(org.apache.flink.table.api.TableSchema) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 13 with Schema

use of org.apache.flink.table.api.Schema in project flink by apache.

the class CommonExecSinkITCase method testStreamRecordTimestampInserterDataStreamSinkProvider.

@Test
public void testStreamRecordTimestampInserterDataStreamSinkProvider() throws ExecutionException, InterruptedException {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final SharedReference<List<Long>> timestamps = sharedObjects.add(new ArrayList<>());
    final List<Row> rows = Arrays.asList(Row.of(1, "foo", Instant.parse("2020-11-10T11:34:56.123Z")), Row.of(2, "foo", Instant.parse("2020-11-10T12:34:56.789Z")), Row.of(3, "foo", Instant.parse("2020-11-11T10:11:22.777Z")), Row.of(4, "foo", Instant.parse("2020-11-11T10:11:23.888Z")));
    final SinkFunction<RowData> sinkFunction = new SinkFunction<RowData>() {

        @Override
        public void invoke(RowData value, Context context) {
            addElement(timestamps, context.timestamp());
        }
    };
    final TableDescriptor sourceDescriptor = TableFactoryHarness.newBuilder().schema(schemaStreamRecordTimestampInserter(true)).source(new TestSource(rows)).sink(new TableFactoryHarness.SinkBase() {

        @Override
        public DataStreamSinkProvider getSinkRuntimeProvider(DynamicTableSink.Context context) {
            return (providerContext, dataStream) -> dataStream.addSink(sinkFunction);
        }
    }).build();
    tableEnv.createTable("T1", sourceDescriptor);
    final String sqlStmt = "INSERT INTO T1 SELECT * FROM T1";
    assertPlan(tableEnv, sqlStmt, true);
    tableEnv.executeSql(sqlStmt).await();
    Collections.sort(timestamps.get());
    assertTimestampResults(timestamps, rows);
}
Also used : StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Arrays(java.util.Arrays) Schema(org.apache.flink.table.api.Schema) TableDescriptor(org.apache.flink.table.api.TableDescriptor) SinkV1Adapter(org.apache.flink.streaming.api.transformations.SinkV1Adapter) SharedObjects(org.apache.flink.testutils.junit.SharedObjects) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ExplainDetail(org.apache.flink.table.api.ExplainDetail) ExceptionUtils(org.apache.flink.util.ExceptionUtils) TestSink(org.apache.flink.streaming.runtime.operators.sink.TestSink) Parameterized(org.junit.runners.Parameterized) AbstractTestBase(org.apache.flink.test.util.AbstractTestBase) TableFactoryHarness(org.apache.flink.table.planner.factories.TableFactoryHarness) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) Collection(java.util.Collection) Table(org.apache.flink.table.api.Table) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) List(java.util.List) DataStreamSinkProvider(org.apache.flink.table.connector.sink.DataStreamSinkProvider) ValidationException(org.apache.flink.table.api.ValidationException) TableResult(org.apache.flink.table.api.TableResult) Row(org.apache.flink.types.Row) NotNull(org.jetbrains.annotations.NotNull) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Assert.assertThrows(org.junit.Assert.assertThrows) RunWith(org.junit.runner.RunWith) Watermark(org.apache.flink.streaming.api.watermark.Watermark) ScanTableSource(org.apache.flink.table.connector.source.ScanTableSource) ArrayList(java.util.ArrayList) SinkV2Provider(org.apache.flink.table.connector.sink.SinkV2Provider) TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER(org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER) SourceFunction(org.apache.flink.streaming.api.functions.source.SourceFunction) TABLE_EXEC_SINK_NOT_NULL_ENFORCER(org.apache.flink.table.api.config.ExecutionConfigOptions.TABLE_EXEC_SINK_NOT_NULL_ENFORCER) SourceFunctionProvider(org.apache.flink.table.connector.source.SourceFunctionProvider) SharedReference(org.apache.flink.testutils.junit.SharedReference) INT(org.apache.flink.table.api.DataTypes.INT) Before(org.junit.Before) RowData(org.apache.flink.table.data.RowData) DataTypes(org.apache.flink.table.api.DataTypes) SinkProvider(org.apache.flink.table.connector.sink.SinkProvider) Test(org.junit.Test) ExecutionException(java.util.concurrent.ExecutionException) Rule(org.junit.Rule) ExecutionConfigOptions(org.apache.flink.table.api.config.ExecutionConfigOptions) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) TableDescriptor(org.apache.flink.table.api.TableDescriptor) RowData(org.apache.flink.table.data.RowData) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) List(java.util.List) ArrayList(java.util.ArrayList) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 14 with Schema

use of org.apache.flink.table.api.Schema in project flink by apache.

the class DynamicSinkUtils method pushMetadataProjection.

/**
 * Creates a projection that reorders physical and metadata columns according to the consumed
 * data type of the sink. It casts metadata columns into the expected data type.
 *
 * @see SupportsWritingMetadata
 */
private static void pushMetadataProjection(FlinkRelBuilder relBuilder, FlinkTypeFactory typeFactory, ResolvedSchema schema, DynamicTableSink sink) {
    final RexBuilder rexBuilder = relBuilder.getRexBuilder();
    final List<Column> columns = schema.getColumns();
    final List<Integer> physicalColumns = extractPhysicalColumns(schema);
    final Map<String, Integer> keyToMetadataColumn = extractPersistedMetadataColumns(schema).stream().collect(Collectors.toMap(pos -> {
        final MetadataColumn metadataColumn = (MetadataColumn) columns.get(pos);
        return metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
    }, Function.identity()));
    final List<Integer> metadataColumns = createRequiredMetadataKeys(schema, sink).stream().map(keyToMetadataColumn::get).collect(Collectors.toList());
    final List<String> fieldNames = Stream.concat(physicalColumns.stream().map(columns::get).map(Column::getName), metadataColumns.stream().map(columns::get).map(MetadataColumn.class::cast).map(c -> c.getMetadataKey().orElse(c.getName()))).collect(Collectors.toList());
    final Map<String, DataType> metadataMap = extractMetadataMap(sink);
    final List<RexNode> fieldNodes = Stream.concat(physicalColumns.stream().map(pos -> {
        final int posAdjusted = adjustByVirtualColumns(columns, pos);
        return relBuilder.field(posAdjusted);
    }), metadataColumns.stream().map(pos -> {
        final MetadataColumn metadataColumn = (MetadataColumn) columns.get(pos);
        final String metadataKey = metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
        final LogicalType expectedType = metadataMap.get(metadataKey).getLogicalType();
        final RelDataType expectedRelDataType = typeFactory.createFieldTypeFromLogicalType(expectedType);
        final int posAdjusted = adjustByVirtualColumns(columns, pos);
        return rexBuilder.makeAbstractCast(expectedRelDataType, relBuilder.field(posAdjusted));
    })).collect(Collectors.toList());
    relBuilder.projectNamed(fieldNodes, fieldNames, true);
}
Also used : DataType(org.apache.flink.table.types.DataType) Schema(org.apache.flink.table.api.Schema) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) SupportsWritingMetadata(org.apache.flink.table.connector.sink.abilities.SupportsWritingMetadata) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) TableConfigOptions(org.apache.flink.table.api.config.TableConfigOptions) CollectSinkOperatorFactory(org.apache.flink.streaming.api.operators.collect.CollectSinkOperatorFactory) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) SupportsPartitioning(org.apache.flink.table.connector.sink.abilities.SupportsPartitioning) RexNode(org.apache.calcite.rex.RexNode) RowField(org.apache.flink.table.types.logical.RowType.RowField) RelHint(org.apache.calcite.rel.hint.RelHint) Map(java.util.Map) LogicalTypeCasts.supportsExplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsExplicitCast) LogicalTypeCasts.supportsAvoidingCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsAvoidingCast) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) Set(java.util.Set) OverwriteSpec(org.apache.flink.table.planner.plan.abilities.sink.OverwriteSpec) Collectors(java.util.stream.Collectors) ZoneId(java.time.ZoneId) SinkAbilitySpec(org.apache.flink.table.planner.plan.abilities.sink.SinkAbilitySpec) List(java.util.List) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) ValidationException(org.apache.flink.table.api.ValidationException) CollectModifyOperation(org.apache.flink.table.operations.CollectModifyOperation) TableResult(org.apache.flink.table.api.TableResult) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) LogicalTypeCasts.supportsImplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsImplicitCast) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) IntStream(java.util.stream.IntStream) ShortcutUtils.unwrapTypeFactory(org.apache.flink.table.planner.utils.ShortcutUtils.unwrapTypeFactory) WritingMetadataSpec(org.apache.flink.table.planner.plan.abilities.sink.WritingMetadataSpec) Column(org.apache.flink.table.catalog.Column) RowType(org.apache.flink.table.types.logical.RowType) RelOptUtil(org.apache.calcite.plan.RelOptUtil) Function(java.util.function.Function) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) ArrayList(java.util.ArrayList) ReadableConfig(org.apache.flink.configuration.ReadableConfig) SupportsOverwrite(org.apache.flink.table.connector.sink.abilities.SupportsOverwrite) ExternalModifyOperation(org.apache.flink.table.operations.ExternalModifyOperation) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) LogicalSink(org.apache.flink.table.planner.plan.nodes.calcite.LogicalSink) DataTypeUtils(org.apache.flink.table.types.utils.DataTypeUtils) RelDataType(org.apache.calcite.rel.type.RelDataType) RexBuilder(org.apache.calcite.rex.RexBuilder) TableException(org.apache.flink.table.api.TableException) ShortcutUtils.unwrapContext(org.apache.flink.table.planner.utils.ShortcutUtils.unwrapContext) TypeTransformations(org.apache.flink.table.types.inference.TypeTransformations) RelNode(org.apache.calcite.rel.RelNode) DataStream(org.apache.flink.streaming.api.datastream.DataStream) ExternalCatalogTable(org.apache.flink.table.catalog.ExternalCatalogTable) Internal(org.apache.flink.annotation.Internal) ExecutionConfigOptions(org.apache.flink.table.api.config.ExecutionConfigOptions) Collections(java.util.Collections) LogicalType(org.apache.flink.table.types.logical.LogicalType) RelDataType(org.apache.calcite.rel.type.RelDataType) RelHint(org.apache.calcite.rel.hint.RelHint) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) Column(org.apache.flink.table.catalog.Column) RexBuilder(org.apache.calcite.rex.RexBuilder) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

Schema (org.apache.flink.table.api.Schema)14 DataType (org.apache.flink.table.types.DataType)8 Test (org.junit.Test)7 Collections (java.util.Collections)5 List (java.util.List)5 ArrayList (java.util.ArrayList)4 Collectors (java.util.stream.Collectors)4 DataTypes (org.apache.flink.table.api.DataTypes)4 ValidationException (org.apache.flink.table.api.ValidationException)4 AbstractDataType (org.apache.flink.table.types.AbstractDataType)4 Arrays (java.util.Arrays)3 UnresolvedColumn (org.apache.flink.table.api.Schema.UnresolvedColumn)3 Row (org.apache.flink.types.Row)3 ZoneId (java.time.ZoneId)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 Set (java.util.Set)2 Nullable (javax.annotation.Nullable)2 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)2 ExplainDetail (org.apache.flink.table.api.ExplainDetail)2