use of org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy in project flink by apache.
the class MergeTableLikeUtilTest method mergeExcludingMetadataColumnsDuplicate.
@Test
public void mergeExcludingMetadataColumnsDuplicate() {
TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.metadata("two", DataTypes.INT())).build();
List<SqlNode> derivedColumns = Collections.singletonList(metadataColumn("two", DataTypes.BOOLEAN(), false));
Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
mergingStrategies.put(FeatureOption.METADATA, MergingStrategy.EXCLUDING);
TableSchema mergedSchema = util.mergeTables(mergingStrategies, sourceSchema, derivedColumns, Collections.emptyList(), null);
TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.metadata("two", DataTypes.BOOLEAN())).build();
assertThat(mergedSchema, equalTo(expectedSchema));
}
use of org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy in project flink by apache.
the class MergeTableLikeUtilTest method mergeExcludingWatermarksDuplicate.
@Test
public void mergeExcludingWatermarksDuplicate() {
TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.physical("timestamp", DataTypes.TIMESTAMP())).watermark("timestamp", "timestamp - INTERVAL '5' SECOND", DataTypes.TIMESTAMP()).build();
List<SqlWatermark> derivedWatermarkSpecs = Collections.singletonList(new SqlWatermark(SqlParserPos.ZERO, identifier("timestamp"), boundedStrategy("timestamp", "10")));
Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
mergingStrategies.put(FeatureOption.WATERMARKS, MergingStrategy.EXCLUDING);
TableSchema mergedSchema = util.mergeTables(mergingStrategies, sourceSchema, Collections.emptyList(), derivedWatermarkSpecs, null);
TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.physical("timestamp", DataTypes.TIMESTAMP())).watermark("timestamp", "`timestamp` - INTERVAL '10' SECOND", DataTypes.TIMESTAMP()).build();
assertThat(mergedSchema, equalTo(expectedSchema));
}
use of org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy in project flink by apache.
the class MergeTableLikeUtilTest method mergeExcludingConstraintsOnDuplicate.
@Test
public void mergeExcludingConstraintsOnDuplicate() {
TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT().notNull())).add(TableColumn.physical("two", DataTypes.STRING().notNull())).add(TableColumn.physical("three", DataTypes.FLOAT().notNull())).primaryKey("constraint-42", new String[] { "one", "two", "three" }).build();
Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
mergingStrategies.put(FeatureOption.CONSTRAINTS, MergingStrategy.EXCLUDING);
TableSchema mergedSchema = util.mergeTables(mergingStrategies, sourceSchema, Collections.emptyList(), Collections.emptyList(), primaryKey("one", "two"));
TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT().notNull())).add(TableColumn.physical("two", DataTypes.STRING().notNull())).add(TableColumn.physical("three", DataTypes.FLOAT().notNull())).primaryKey("PK_3531879", new String[] { "one", "two" }).build();
assertThat(mergedSchema, equalTo(expectedSchema));
}
use of org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy in project flink by apache.
the class MergeTableLikeUtilTest method includingAllOverwriteOptionsMergeStrategyExpansion.
@Test
public void includingAllOverwriteOptionsMergeStrategyExpansion() {
List<SqlTableLikeOption> inputOptions = Arrays.asList(new SqlTableLikeOption(MergingStrategy.EXCLUDING, FeatureOption.ALL), new SqlTableLikeOption(MergingStrategy.INCLUDING, FeatureOption.CONSTRAINTS));
Map<FeatureOption, MergingStrategy> mergingStrategies = util.computeMergingStrategies(inputOptions);
assertThat(mergingStrategies.get(FeatureOption.OPTIONS), is(MergingStrategy.EXCLUDING));
assertThat(mergingStrategies.get(FeatureOption.PARTITIONS), is(MergingStrategy.EXCLUDING));
assertThat(mergingStrategies.get(FeatureOption.CONSTRAINTS), is(MergingStrategy.INCLUDING));
assertThat(mergingStrategies.get(FeatureOption.GENERATED), is(MergingStrategy.EXCLUDING));
assertThat(mergingStrategies.get(FeatureOption.METADATA), is(MergingStrategy.EXCLUDING));
assertThat(mergingStrategies.get(FeatureOption.WATERMARKS), is(MergingStrategy.EXCLUDING));
}
use of org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy in project flink by apache.
the class MergeTableLikeUtilTest method includingAllMergeStrategyExpansion.
@Test
public void includingAllMergeStrategyExpansion() {
List<SqlTableLikeOption> inputOptions = Collections.singletonList(new SqlTableLikeOption(MergingStrategy.INCLUDING, FeatureOption.ALL));
Map<FeatureOption, MergingStrategy> mergingStrategies = util.computeMergingStrategies(inputOptions);
assertThat(mergingStrategies.get(FeatureOption.OPTIONS), is(MergingStrategy.INCLUDING));
assertThat(mergingStrategies.get(FeatureOption.PARTITIONS), is(MergingStrategy.INCLUDING));
assertThat(mergingStrategies.get(FeatureOption.CONSTRAINTS), is(MergingStrategy.INCLUDING));
assertThat(mergingStrategies.get(FeatureOption.GENERATED), is(MergingStrategy.INCLUDING));
assertThat(mergingStrategies.get(FeatureOption.WATERMARKS), is(MergingStrategy.INCLUDING));
}
Aggregations