use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class SqlToOperationConverterTest method before.
@Before
public void before() throws TableAlreadyExistException, DatabaseNotExistException {
catalogManager.initSchemaResolver(isStreamingMode, ExpressionResolverMocks.basicResolver(catalogManager, functionCatalog, parser));
final ObjectPath path1 = new ObjectPath(catalogManager.getCurrentDatabase(), "t1");
final ObjectPath path2 = new ObjectPath(catalogManager.getCurrentDatabase(), "t2");
final TableSchema tableSchema = TableSchema.builder().field("a", DataTypes.BIGINT()).field("b", DataTypes.VARCHAR(Integer.MAX_VALUE)).field("c", DataTypes.INT()).field("d", DataTypes.VARCHAR(Integer.MAX_VALUE)).build();
Map<String, String> options = new HashMap<>();
options.put("connector", "COLLECTION");
final CatalogTable catalogTable = new CatalogTableImpl(tableSchema, options, "");
catalog.createTable(path1, catalogTable, true);
catalog.createTable(path2, catalogTable, true);
}
use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithPrimaryKey.
@Test
public void testCreateTableWithPrimaryKey() {
final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + " constraint ct1 primary key(a, b) not enforced\n" + ") with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
TableSchema tableSchema = catalogTable.getSchema();
assertThat(tableSchema.getPrimaryKey().map(UniqueConstraint::asSummaryString).orElse("fakeVal")).isEqualTo("CONSTRAINT ct1 PRIMARY KEY (a, b)");
assertThat(tableSchema.getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d" });
assertThat(tableSchema.getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.BIGINT().notNull(), DataTypes.STRING().notNull(), DataTypes.INT(), DataTypes.STRING() });
}
use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class MergeTableLikeUtilTest method mergeWithIncludeFailsOnDuplicateRegularColumnAndComputeColumn.
@Test
public void mergeWithIncludeFailsOnDuplicateRegularColumnAndComputeColumn() {
TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).build();
List<SqlNode> derivedColumns = Arrays.asList(regularColumn("two", DataTypes.INT()), computedColumn("three", plus("two", "3")), regularColumn("three", DataTypes.INT()), regularColumn("four", DataTypes.STRING()));
thrown.expect(ValidationException.class);
thrown.expectMessage("A column named 'three' already exists in the table. " + "Duplicate columns exist in the compute column and regular column. ");
util.mergeTables(getDefaultMergingStrategies(), sourceSchema, derivedColumns, Collections.emptyList(), null);
}
use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class MergeTableLikeUtilTest method mergeIncludingWatermarksFailsOnDuplicate.
@Test
public void mergeIncludingWatermarksFailsOnDuplicate() {
TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).add(TableColumn.physical("timestamp", DataTypes.TIMESTAMP())).watermark("timestamp", "timestamp - INTERVAL '5' SECOND", DataTypes.TIMESTAMP()).build();
List<SqlWatermark> derivedWatermarkSpecs = Collections.singletonList(new SqlWatermark(SqlParserPos.ZERO, identifier("timestamp"), boundedStrategy("timestamp", "10")));
thrown.expect(ValidationException.class);
thrown.expectMessage("There already exists a watermark spec for column 'timestamp' in the " + "base table. You might want to specify EXCLUDING WATERMARKS or OVERWRITING WATERMARKS.");
util.mergeTables(getDefaultMergingStrategies(), sourceSchema, Collections.emptyList(), derivedWatermarkSpecs, null);
}
use of org.apache.flink.table.api.TableSchema in project flink by apache.
the class MergeTableLikeUtilTest method mergeWithIncludeFailsOnDuplicateColumn.
@Test
public void mergeWithIncludeFailsOnDuplicateColumn() {
TableSchema sourceSchema = TableSchema.builder().add(TableColumn.physical("one", DataTypes.INT())).build();
List<SqlNode> derivedColumns = Arrays.asList(regularColumn("one", DataTypes.INT()), regularColumn("four", DataTypes.STRING()));
thrown.expect(ValidationException.class);
thrown.expectMessage("A column named 'one' already exists in the base table.");
util.mergeTables(getDefaultMergingStrategies(), sourceSchema, derivedColumns, Collections.emptyList(), null);
}
Aggregations