use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testSqlRichExplainWithStatementSet.
@Test
public void testSqlRichExplainWithStatementSet() {
final String sql = "explain plan for statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(ExplainOperation.class);
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testDynamicTableWithInvalidOptions.
@Test
public void testDynamicTableWithInvalidOptions() {
final String sql = "select * from t1 /*+ OPTIONS('opt1', 'opt2') */";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
assertThatThrownBy(() -> parse(sql, planner, parser)).isInstanceOf(AssertionError.class).hasMessageContaining("Hint [OPTIONS] only support " + "non empty key value options");
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithPrimaryKeyEnforced.
@Test
public void testCreateTableWithPrimaryKeyEnforced() {
final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + // Default is enforced.
" constraint ct1 primary key(a, b)\n" + ") with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
assertThatThrownBy(() -> parse(sql, planner, parser)).isInstanceOf(ValidationException.class).hasMessageContaining("Flink doesn't support ENFORCED mode for PRIMARY KEY " + "constraint. ENFORCED/NOT ENFORCED controls if the constraint " + "checks are performed on the incoming/outgoing data. " + "Flink does not own the data therefore the only supported mode is the NOT ENFORCED mode");
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithPrimaryKey.
@Test
public void testCreateTableWithPrimaryKey() {
final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + " constraint ct1 primary key(a, b) not enforced\n" + ") with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
TableSchema tableSchema = catalogTable.getSchema();
assertThat(tableSchema.getPrimaryKey().map(UniqueConstraint::asSummaryString).orElse("fakeVal")).isEqualTo("CONSTRAINT ct1 PRIMARY KEY (a, b)");
assertThat(tableSchema.getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d" });
assertThat(tableSchema.getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.BIGINT().notNull(), DataTypes.STRING().notNull(), DataTypes.INT(), DataTypes.STRING() });
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testExplainDetailsWithStatementSet.
@Test
public void testExplainDetailsWithStatementSet() {
final String sql = "explain estimated_cost, changelog_mode statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
assertExplainDetails(parse(sql, planner, parser));
}
Aggregations