use of org.apache.flink.table.api.StatementSet in project zeppelin by apache.
the class Flink113Shims method startMultipleInsert.
@Override
public void startMultipleInsert(Object tblEnv, InterpreterContext context) throws Exception {
StatementSet statementSet = ((TableEnvironment) tblEnv).createStatementSet();
statementSetMap.put(context.getParagraphId(), statementSet);
}
use of org.apache.flink.table.api.StatementSet in project zeppelin by apache.
the class Flink114Shims method startMultipleInsert.
@Override
public void startMultipleInsert(Object tblEnv, InterpreterContext context) throws Exception {
StatementSet statementSet = ((TableEnvironment) tblEnv).createStatementSet();
statementSetMap.put(context.getParagraphId(), statementSet);
}
use of org.apache.flink.table.api.StatementSet in project flink by apache.
the class HiveRunnerITCase method testWhitespacePartValue.
@Test
public void testWhitespacePartValue() throws Exception {
TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
tableEnv.executeSql("create database db1");
try {
tableEnv.executeSql("create table db1.dest (x int) partitioned by (p string)");
StatementSet stmtSet = tableEnv.createStatementSet();
stmtSet.addInsertSql("insert into db1.dest select 1,' '");
stmtSet.addInsertSql("insert into db1.dest select 2,'a \t'");
stmtSet.execute().await();
assertEquals("[p= , p=a %09]", hiveShell.executeQuery("show partitions db1.dest").toString());
} finally {
tableEnv.executeSql("drop database db1 cascade");
}
}
use of org.apache.flink.table.api.StatementSet in project zeppelin by apache.
the class Flink112Shims method startMultipleInsert.
@Override
public void startMultipleInsert(Object tblEnv, InterpreterContext context) throws Exception {
StatementSet statementSet = ((TableEnvironment) tblEnv).createStatementSet();
statementSetMap.put(context.getParagraphId(), statementSet);
}
use of org.apache.flink.table.api.StatementSet in project flink by apache.
the class StatementSetImplTest method testGetJsonPlan.
@Test
public void testGetJsonPlan() throws IOException {
String srcTableDdl = "CREATE TABLE MyTable (\n" + " a bigint,\n" + " b int,\n" + " c varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'false')";
tableEnv.executeSql(srcTableDdl);
String sinkTableDdl = "CREATE TABLE MySink (\n" + " a bigint,\n" + " b int,\n" + " c varchar\n" + ") with (\n" + " 'connector' = 'values',\n" + " 'table-sink-class' = 'DEFAULT')";
tableEnv.executeSql(sinkTableDdl);
StatementSet stmtSet = tableEnv.createStatementSet();
stmtSet.addInsertSql("INSERT INTO MySink SELECT * FROM MyTable");
String jsonPlan = stmtSet.compilePlan().asJsonString();
String actual = TableTestUtil.readFromResource("/jsonplan/testGetJsonPlan.out");
assertEquals(TableTestUtil.getFormattedJson(TableTestUtil.replaceExecNodeId(TableTestUtil.replaceFlinkVersion(TableTestUtil.getFormattedJson(jsonPlan)))), TableTestUtil.getFormattedJson(TableTestUtil.replaceExecNodeId(TableTestUtil.getFormattedJson(actual))));
}
Aggregations