Search in sources :

Example 81 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class SqlCreateTableConverter method createCatalogTable.

private CatalogTable createCatalogTable(SqlCreateTable sqlCreateTable) {
    final TableSchema sourceTableSchema;
    final List<String> sourcePartitionKeys;
    final List<SqlTableLike.SqlTableLikeOption> likeOptions;
    final Map<String, String> sourceProperties;
    if (sqlCreateTable.getTableLike().isPresent()) {
        SqlTableLike sqlTableLike = sqlCreateTable.getTableLike().get();
        CatalogTable table = lookupLikeSourceTable(sqlTableLike);
        sourceTableSchema = TableSchema.fromResolvedSchema(table.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        sourcePartitionKeys = table.getPartitionKeys();
        likeOptions = sqlTableLike.getOptions();
        sourceProperties = table.getOptions();
    } else {
        sourceTableSchema = TableSchema.builder().build();
        sourcePartitionKeys = Collections.emptyList();
        likeOptions = Collections.emptyList();
        sourceProperties = Collections.emptyMap();
    }
    Map<SqlTableLike.FeatureOption, SqlTableLike.MergingStrategy> mergingStrategies = mergeTableLikeUtil.computeMergingStrategies(likeOptions);
    Map<String, String> mergedOptions = mergeOptions(sqlCreateTable, sourceProperties, mergingStrategies);
    Optional<SqlTableConstraint> primaryKey = sqlCreateTable.getFullConstraints().stream().filter(SqlTableConstraint::isPrimaryKey).findAny();
    TableSchema mergedSchema = mergeTableLikeUtil.mergeTables(mergingStrategies, sourceTableSchema, sqlCreateTable.getColumnList().getList(), sqlCreateTable.getWatermark().map(Collections::singletonList).orElseGet(Collections::emptyList), primaryKey.orElse(null));
    List<String> partitionKeys = mergePartitions(sourcePartitionKeys, sqlCreateTable.getPartitionKeyList(), mergingStrategies);
    verifyPartitioningColumnsExist(mergedSchema, partitionKeys);
    String tableComment = sqlCreateTable.getComment().map(comment -> comment.getNlsString().getValue()).orElse(null);
    return new CatalogTableImpl(mergedSchema, partitionKeys, mergedOptions, tableComment);
}
Also used : CatalogManager(org.apache.flink.table.catalog.CatalogManager) Arrays(java.util.Arrays) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) SqlTableOption(org.apache.flink.sql.parser.ddl.SqlTableOption) CatalogTable(org.apache.flink.table.catalog.CatalogTable) HashMap(java.util.HashMap) Function(java.util.function.Function) SqlNode(org.apache.calcite.sql.SqlNode) Map(java.util.Map) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) SqlCreateTable(org.apache.flink.sql.parser.ddl.SqlCreateTable) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Operation(org.apache.flink.table.operations.Operation) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) TableSchema(org.apache.flink.table.api.TableSchema) Collectors(java.util.stream.Collectors) Consumer(java.util.function.Consumer) SqlTableLike(org.apache.flink.sql.parser.ddl.SqlTableLike) List(java.util.List) ValidationException(org.apache.flink.table.api.ValidationException) FlinkCalciteSqlValidator(org.apache.flink.table.planner.calcite.FlinkCalciteSqlValidator) Optional(java.util.Optional) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) SqlNodeList(org.apache.calcite.sql.SqlNodeList) Collections(java.util.Collections) TableSchema(org.apache.flink.table.api.TableSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) SqlTableLike(org.apache.flink.sql.parser.ddl.SqlTableLike) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) Collections(java.util.Collections)

Example 82 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class DescriptorPropertiesTest method testLegacyTableSchema.

@Test
public void testLegacyTableSchema() {
    DescriptorProperties properties = new DescriptorProperties();
    Map<String, String> map = new HashMap<>();
    map.put("schema.0.name", "f0");
    map.put("schema.0.type", "VARCHAR");
    map.put("schema.1.name", "f1");
    map.put("schema.1.type", "BIGINT");
    map.put("schema.2.name", "f2");
    map.put("schema.2.type", "DECIMAL");
    map.put("schema.3.name", "f3");
    map.put("schema.3.type", "TIMESTAMP");
    map.put("schema.4.name", "f4");
    map.put("schema.4.type", "MAP<TINYINT, SMALLINT>");
    map.put("schema.5.name", "f5");
    map.put("schema.5.type", "ANY<java.lang.Class>");
    map.put("schema.6.name", "f6");
    map.put("schema.6.type", "PRIMITIVE_ARRAY<DOUBLE>");
    map.put("schema.7.name", "f7");
    map.put("schema.7.type", "OBJECT_ARRAY<TIME>");
    map.put("schema.8.name", "f8");
    map.put("schema.8.type", "ROW<q1 VARCHAR, q2 DATE>");
    map.put("schema.9.name", "f9");
    map.put("schema.9.type", "POJO<org.apache.flink.table.types.LogicalTypeParserTest$MyPojo>");
    properties.putProperties(map);
    TableSchema restored = properties.getTableSchema("schema");
    TableSchema expected = TableSchema.builder().field("f0", Types.STRING).field("f1", Types.LONG).field("f2", Types.BIG_DEC).field("f3", Types.SQL_TIMESTAMP).field("f4", Types.MAP(Types.BYTE, Types.SHORT)).field("f5", Types.GENERIC(Class.class)).field("f6", Types.PRIMITIVE_ARRAY(Types.DOUBLE)).field("f7", Types.OBJECT_ARRAY(Types.SQL_TIME)).field("f8", Types.ROW_NAMED(new String[] { "q1", "q2" }, Types.STRING, Types.SQL_DATE)).field("f9", Types.POJO(LogicalTypeParserTest.MyPojo.class)).build();
    assertEquals(expected, restored);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) LogicalTypeParserTest(org.apache.flink.table.types.LogicalTypeParserTest) Test(org.junit.Test) LogicalTypeParserTest(org.apache.flink.table.types.LogicalTypeParserTest)

Example 83 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class DescriptorPropertiesTest method testTableSchema.

@Test
public void testTableSchema() {
    TableSchema schema = TableSchema.builder().add(TableColumn.physical("f0", DataTypes.BIGINT().notNull())).add(TableColumn.physical("f1", DataTypes.ROW(DataTypes.FIELD("q1", DataTypes.STRING()), DataTypes.FIELD("q2", DataTypes.TIMESTAMP(9))))).add(TableColumn.physical("f2", DataTypes.STRING().notNull())).add(TableColumn.computed("f3", DataTypes.BIGINT().notNull(), "f0 + 1")).add(TableColumn.physical("f4", DataTypes.DECIMAL(10, 3))).add(TableColumn.metadata("f5", DataTypes.DECIMAL(10, 3))).add(TableColumn.metadata("f6", DataTypes.INT(), "other.key")).add(TableColumn.metadata("f7", DataTypes.INT(), true)).add(TableColumn.metadata("f8", DataTypes.INT(), "other.key", true)).watermark("f1.q2", "`f1`.`q2` - INTERVAL '5' SECOND", DataTypes.TIMESTAMP(3)).primaryKey("constraint1", new String[] { "f0", "f2" }).build();
    DescriptorProperties properties = new DescriptorProperties();
    properties.putTableSchema("schema", schema);
    Map<String, String> actual = properties.asMap();
    Map<String, String> expected = new HashMap<>();
    expected.put("schema.0.name", "f0");
    expected.put("schema.0.data-type", "BIGINT NOT NULL");
    expected.put("schema.1.name", "f1");
    expected.put("schema.1.data-type", "ROW<`q1` VARCHAR(2147483647), `q2` TIMESTAMP(9)>");
    expected.put("schema.2.name", "f2");
    expected.put("schema.2.data-type", "VARCHAR(2147483647) NOT NULL");
    expected.put("schema.3.name", "f3");
    expected.put("schema.3.data-type", "BIGINT NOT NULL");
    expected.put("schema.3.expr", "f0 + 1");
    expected.put("schema.4.name", "f4");
    expected.put("schema.4.data-type", "DECIMAL(10, 3)");
    expected.put("schema.5.name", "f5");
    expected.put("schema.5.data-type", "DECIMAL(10, 3)");
    expected.put("schema.5.metadata", "f5");
    expected.put("schema.5.virtual", "false");
    expected.put("schema.6.name", "f6");
    expected.put("schema.6.data-type", "INT");
    expected.put("schema.6.metadata", "other.key");
    expected.put("schema.6.virtual", "false");
    expected.put("schema.7.name", "f7");
    expected.put("schema.7.data-type", "INT");
    expected.put("schema.7.metadata", "f7");
    expected.put("schema.7.virtual", "true");
    expected.put("schema.8.name", "f8");
    expected.put("schema.8.data-type", "INT");
    expected.put("schema.8.metadata", "other.key");
    expected.put("schema.8.virtual", "true");
    expected.put("schema.watermark.0.rowtime", "f1.q2");
    expected.put("schema.watermark.0.strategy.expr", "`f1`.`q2` - INTERVAL '5' SECOND");
    expected.put("schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
    expected.put("schema.primary-key.name", "constraint1");
    expected.put("schema.primary-key.columns", "f0,f2");
    assertEquals(expected, actual);
    TableSchema restored = properties.getTableSchema("schema");
    assertEquals(schema, restored);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) Test(org.junit.Test) LogicalTypeParserTest(org.apache.flink.table.types.LogicalTypeParserTest)

Example 84 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CatalogConstraintTest method testWithoutPrimaryKey.

@Test
public void testWithoutPrimaryKey() throws Exception {
    TableSchema tableSchema = TableSchema.builder().fields(new String[] { "a", "b", "c" }, new DataType[] { DataTypes.BIGINT(), DataTypes.STRING(), DataTypes.INT() }).build();
    Map<String, String> properties = buildCatalogTableProperties(tableSchema);
    catalog.createTable(new ObjectPath(databaseName, "T1"), new CatalogTableImpl(tableSchema, properties, ""), false);
    RelNode t1 = TableTestUtil.toRelNode(tEnv.sqlQuery("select * from T1"));
    FlinkRelMetadataQuery mq = FlinkRelMetadataQuery.reuseOrCreate(t1.getCluster().getMetadataQuery());
    assertEquals(ImmutableSet.of(), mq.getUniqueKeys(t1));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) RelNode(org.apache.calcite.rel.RelNode) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) DataType(org.apache.flink.table.types.DataType) FlinkRelMetadataQuery(org.apache.flink.table.planner.plan.metadata.FlinkRelMetadataQuery) Test(org.junit.Test)

Example 85 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CatalogConstraintTest method testWithPrimaryKey.

@Test
public void testWithPrimaryKey() throws Exception {
    TableSchema tableSchema = TableSchema.builder().fields(new String[] { "a", "b", "c" }, new DataType[] { DataTypes.STRING(), DataTypes.BIGINT().notNull(), DataTypes.INT() }).primaryKey("b").build();
    Map<String, String> properties = buildCatalogTableProperties(tableSchema);
    catalog.createTable(new ObjectPath(databaseName, "T1"), new CatalogTableImpl(tableSchema, properties, ""), false);
    RelNode t1 = TableTestUtil.toRelNode(tEnv.sqlQuery("select * from T1"));
    FlinkRelMetadataQuery mq = FlinkRelMetadataQuery.reuseOrCreate(t1.getCluster().getMetadataQuery());
    assertEquals(ImmutableSet.of(ImmutableBitSet.of(1)), mq.getUniqueKeys(t1));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) RelNode(org.apache.calcite.rel.RelNode) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) FlinkRelMetadataQuery(org.apache.flink.table.planner.plan.metadata.FlinkRelMetadataQuery) Test(org.junit.Test)

Aggregations

TableSchema (org.apache.flink.table.api.TableSchema)86 Test (org.junit.Test)54 HashMap (java.util.HashMap)26 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)21 SqlNode (org.apache.calcite.sql.SqlNode)19 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 CatalogTable (org.apache.flink.table.catalog.CatalogTable)18 DataType (org.apache.flink.table.types.DataType)16 ValidationException (org.apache.flink.table.api.ValidationException)14 TableColumn (org.apache.flink.table.api.TableColumn)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Map (java.util.Map)9 FeatureOption (org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption)9 MergingStrategy (org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy)9 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)8 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 Arrays (java.util.Arrays)7 Configuration (org.apache.flink.configuration.Configuration)7