use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithFullDataTypes.
// TODO: tweak the tests when FLINK-13604 is fixed.
@Test
public void testCreateTableWithFullDataTypes() {
final List<TestItem> testItems = Arrays.asList(createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3).
createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3).
createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>.
createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>.
createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET(DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW(DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))));
StringBuilder buffer = new StringBuilder("create table t1(\n");
for (int i = 0; i < testItems.size(); i++) {
buffer.append("f").append(i).append(" ").append(testItems.get(i).testExpr);
if (i == testItems.size() - 1) {
buffer.append(")");
} else {
buffer.append(",\n");
}
}
final String sql = buffer.toString();
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema();
Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray();
assertThat(schema.getFieldDataTypes()).isEqualTo(expectedDataTypes);
}
use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class BatchOperatorNameTest method testLegacySourceSink.
@Test
public void testLegacySourceSink() {
TableSchema schema = TestLegacyFilterableTableSource.defaultSchema();
TestLegacyFilterableTableSource.createTemporaryTable(tEnv, schema, "MySource", true, TestLegacyFilterableTableSource.defaultRows().toList(), TestLegacyFilterableTableSource.defaultFilterableFields());
TableSink<Row> sink = ((BatchTableTestUtil) util).createCollectTableSink(schema.getFieldNames(), schema.getTableColumns().stream().map(col -> col.getType().getLogicalType()).toArray(LogicalType[]::new));
util.testingTableEnv().registerTableSinkInternal("MySink", sink);
verifyInsert("insert into MySink select * from MySource");
}
use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class StreamOperatorNameTest method testLegacySourceSink.
/**
* Verify LegacySource and LegacySink.
*/
@Test
public void testLegacySourceSink() {
TableSchema schema = TestLegacyFilterableTableSource.defaultSchema();
TestLegacyFilterableTableSource.createTemporaryTable(tEnv, schema, "MySource", true, TestLegacyFilterableTableSource.defaultRows().toList(), TestLegacyFilterableTableSource.defaultFilterableFields());
TableSink<Row> sink = util.createAppendTableSink(schema.getFieldNames(), schema.getTableColumns().stream().map(col -> col.getType().getLogicalType()).toArray(LogicalType[]::new));
util.testingTableEnv().registerTableSinkInternal("MySink", sink);
verifyInsert("insert into MySink select * from MySource");
}
use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class MergeTableLikeUtilTest method mergeOverwritingComputedColumnWithMetadataColumn.
@Test
public void mergeOverwritingComputedColumnWithMetadataColumn() {
TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.computed("two", DataTypes.INT(), "one + 3")).build();
List<SqlNode> derivedColumns = Collections.singletonList(metadataColumn("two", DataTypes.BOOLEAN(), false));
Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
mergingStrategies.put(FeatureOption.METADATA, MergingStrategy.OVERWRITING);
thrown.expect(ValidationException.class);
thrown.expectMessage("A column named 'two' already exists in the base table." + " Metadata columns can only overwrite other metadata columns.");
util.mergeTables(mergingStrategies, sourceSchema, derivedColumns, Collections.emptyList(), null);
}
use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class MergeTableLikeUtilTest method mergeWithIncludeFailsOnDuplicateRegularColumnAndMetadataColumn.
@Test
public void mergeWithIncludeFailsOnDuplicateRegularColumnAndMetadataColumn() {
TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).build();
List<SqlNode> derivedColumns = Arrays.asList(metadataColumn("two", DataTypes.INT(), true), computedColumn("three", plus("two", "3")), regularColumn("two", DataTypes.INT()), regularColumn("four", DataTypes.STRING()));
thrown.expect(ValidationException.class);
thrown.expectMessage("A column named 'two' already exists in the table. " + "Duplicate columns exist in the metadata column and regular column. ");
util.mergeTables(getDefaultMergingStrategies(), sourceSchema, derivedColumns, Collections.emptyList(), null);
}
Aggregations