use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithMetadataColumn.
@Test
public void testCreateTableWithMetadataColumn() {
final String sql = "CREATE TABLE tbl1 (\n" + " a INT,\n" + " b STRING,\n" + " c INT METADATA,\n" + " d INT METADATA FROM 'other.key',\n" + " e INT METADATA VIRTUAL\n" + ")\n" + " WITH (\n" + " 'connector' = 'kafka',\n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT));
assertThat(operation).isInstanceOf(CreateTableOperation.class);
final CreateTableOperation op = (CreateTableOperation) operation;
final TableSchema actualSchema = op.getCatalogTable().getSchema();
final TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("a", DataTypes.INT())).add(TableColumn.physical("b", DataTypes.STRING())).add(TableColumn.metadata("c", DataTypes.INT())).add(TableColumn.metadata("d", DataTypes.INT(), "other.key")).add(TableColumn.metadata("e", DataTypes.INT(), true)).build();
assertThat(actualSchema).isEqualTo(expectedSchema);
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithMinusInOptionKey.
@Test
public void testCreateTableWithMinusInOptionKey() {
final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c varchar\n" + ") with (\n" + " 'a-B-c-d124' = 'Ab',\n" + " 'a.b-c-d.e-f.g' = 'ada',\n" + " 'a.b-c-d.e-f1231.g' = 'ada',\n" + " 'a.b-c-d.*' = 'adad')\n";
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
Map<String, String> options = catalogTable.getOptions().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
Map<String, String> sortedProperties = new TreeMap<>(options);
final String expected = "{a-B-c-d124=Ab, " + "a.b-c-d.*=adad, " + "a.b-c-d.e-f.g=ada, " + "a.b-c-d.e-f1231.g=ada}";
assertThat(sortedProperties.toString()).isEqualTo(expected);
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testExplainDetailsWithSelect.
@Test
public void testExplainDetailsWithSelect() {
final String sql = "explain estimated_cost, changelog_mode select a, b, c, d from t2";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
assertExplainDetails(parse(sql, planner, parser));
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method checkExplainSql.
private void checkExplainSql(String sql) {
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlRichExplain.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
assertThat(operation).isInstanceOf(ExplainOperation.class);
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithUniqueKey.
@Test
public void testCreateTableWithUniqueKey() {
final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + " constraint ct1 unique (a, b) not enforced\n" + ") with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
assertThatThrownBy(() -> parse(sql, planner, parser)).isInstanceOf(UnsupportedOperationException.class).hasMessageContaining("UNIQUE constraint is not supported yet");
}
Aggregations