Search in sources :

Example 66 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeExcludingGeneratedColumnsDuplicate.

@Test
public void mergeExcludingGeneratedColumnsDuplicate() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.computed("two", DataTypes.INT(), "one + 1")).build();
    List<SqlNode> derivedColumns = Collections.singletonList(computedColumn("two", plus("one", "3")));
    Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
    mergingStrategies.put(FeatureOption.GENERATED, MergingStrategy.EXCLUDING);
    TableSchema mergedSchema = util.mergeTables(mergingStrategies, sourceSchema, derivedColumns, Collections.emptyList(), null);
    TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.computed("two", DataTypes.INT(), "`one` + 3")).build();
    assertThat(mergedSchema, equalTo(expectedSchema));
}
Also used : FeatureOption(org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption) TableSchema(org.apache.flink.table.api.TableSchema) MergingStrategy(org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 67 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeOverwritingPhysicalColumnWithGeneratedColumn.

@Test
public void mergeOverwritingPhysicalColumnWithGeneratedColumn() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.physical("two", DataTypes.INT())).build();
    List<SqlNode> derivedColumns = Collections.singletonList(computedColumn("two", plus("one", "3")));
    Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
    mergingStrategies.put(FeatureOption.GENERATED, MergingStrategy.OVERWRITING);
    thrown.expect(ValidationException.class);
    thrown.expectMessage("A column named 'two' already exists in the table. " + "Duplicate columns exist in the compute column and regular column. ");
    util.mergeTables(mergingStrategies, sourceSchema, derivedColumns, Collections.emptyList(), null);
}
Also used : FeatureOption(org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption) TableSchema(org.apache.flink.table.api.TableSchema) MergingStrategy(org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 68 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeGeneratedColumns.

@Test
public void mergeGeneratedColumns() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.computed("two", DataTypes.INT(), "one + 1")).build();
    List<SqlNode> derivedColumns = Arrays.asList(regularColumn("three", DataTypes.INT()), computedColumn("four", plus("one", "3")));
    TableSchema mergedSchema = util.mergeTables(getDefaultMergingStrategies(), sourceSchema, derivedColumns, Collections.emptyList(), null);
    TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.computed("two", DataTypes.INT(), "one + 1")).add(TableColumn.physical("three", DataTypes.INT())).add(TableColumn.computed("four", DataTypes.INT(), "`one` + 3")).build();
    assertThat(mergedSchema, equalTo(expectedSchema));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 69 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeOverwritingMetadataColumnsDuplicate.

@Test
public void mergeOverwritingMetadataColumnsDuplicate() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.metadata("two", DataTypes.INT())).build();
    List<SqlNode> derivedColumns = Collections.singletonList(metadataColumn("two", DataTypes.BOOLEAN(), true));
    Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
    mergingStrategies.put(FeatureOption.METADATA, MergingStrategy.OVERWRITING);
    TableSchema mergedSchema = util.mergeTables(mergingStrategies, sourceSchema, derivedColumns, Collections.emptyList(), null);
    TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.metadata("two", DataTypes.BOOLEAN(), true)).build();
    assertThat(mergedSchema, equalTo(expectedSchema));
}
Also used : FeatureOption(org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption) TableSchema(org.apache.flink.table.api.TableSchema) MergingStrategy(org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 70 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class MergeTableLikeUtilTest method mergeIncludingConstraintsFailsOnDuplicate.

@Test
public void mergeIncludingConstraintsFailsOnDuplicate() {
    TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT().notNull())).add(TableColumn.physical("two", DataTypes.STRING().notNull())).add(TableColumn.physical("three", DataTypes.FLOAT())).primaryKey("constraint-42", new String[] { "one", "two" }).build();
    thrown.expect(ValidationException.class);
    thrown.expectMessage("The base table already has a primary key. You might want to specify " + "EXCLUDING CONSTRAINTS.");
    util.mergeTables(getDefaultMergingStrategies(), sourceSchema, Collections.emptyList(), Collections.emptyList(), primaryKey("one", "two"));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) Test(org.junit.Test)

Aggregations

TableSchema (org.apache.flink.table.api.TableSchema)86 Test (org.junit.Test)54 HashMap (java.util.HashMap)26 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)21 SqlNode (org.apache.calcite.sql.SqlNode)19 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 CatalogTable (org.apache.flink.table.catalog.CatalogTable)18 DataType (org.apache.flink.table.types.DataType)16 ValidationException (org.apache.flink.table.api.ValidationException)14 TableColumn (org.apache.flink.table.api.TableColumn)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Map (java.util.Map)9 FeatureOption (org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption)9 MergingStrategy (org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy)9 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)8 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 Arrays (java.util.Arrays)7 Configuration (org.apache.flink.configuration.Configuration)7