use of org.apache.flink.table.planner.parse.CalciteParser in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithMinusInOptionKey.
@Test
public void testCreateTableWithMinusInOptionKey() {
final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c varchar\n" + ") with (\n" + " 'a-B-c-d124' = 'Ab',\n" + " 'a.b-c-d.e-f.g' = 'ada',\n" + " 'a.b-c-d.e-f1231.g' = 'ada',\n" + " 'a.b-c-d.*' = 'adad')\n";
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
Map<String, String> options = catalogTable.getOptions().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
Map<String, String> sortedProperties = new TreeMap<>(options);
final String expected = "{a-B-c-d124=Ab, " + "a.b-c-d.*=adad, " + "a.b-c-d.e-f.g=ada, " + "a.b-c-d.e-f1231.g=ada}";
assertThat(sortedProperties.toString()).isEqualTo(expected);
}
use of org.apache.flink.table.planner.parse.CalciteParser in project flink by apache.
the class SqlToOperationConverterTest method testExplainDetailsWithSelect.
@Test
public void testExplainDetailsWithSelect() {
final String sql = "explain estimated_cost, changelog_mode select a, b, c, d from t2";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
assertExplainDetails(parse(sql, planner, parser));
}
use of org.apache.flink.table.planner.parse.CalciteParser in project flink by apache.
the class SqlToOperationConverterTest method checkExplainSql.
private void checkExplainSql(String sql) {
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlRichExplain.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
assertThat(operation).isInstanceOf(ExplainOperation.class);
}
use of org.apache.flink.table.planner.parse.CalciteParser in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithUniqueKey.
@Test
public void testCreateTableWithUniqueKey() {
final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + " constraint ct1 unique (a, b) not enforced\n" + ") with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
assertThatThrownBy(() -> parse(sql, planner, parser)).isInstanceOf(UnsupportedOperationException.class).hasMessageContaining("UNIQUE constraint is not supported yet");
}
use of org.apache.flink.table.planner.parse.CalciteParser in project flink by apache.
the class SqlToOperationConverterTest method testSqlRichExplainWithStatementSet.
@Test
public void testSqlRichExplainWithStatementSet() {
final String sql = "explain plan for statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(ExplainOperation.class);
}
Aggregations