use of org.apache.flink.table.api.CompiledPlan in project flink by apache.
the class TableEnvironmentImpl method compilePlanAndWrite.
private CompiledPlan compilePlanAndWrite(String filePath, boolean ifNotExists, Operation operation) {
File file = Paths.get(filePath).toFile();
if (file.exists()) {
if (ifNotExists) {
return loadPlan(PlanReference.fromFile(filePath));
}
if (!tableConfig.getConfiguration().get(TableConfigOptions.PLAN_FORCE_RECOMPILE)) {
throw new TableException(String.format("Cannot overwrite the plan file '%s'. " + "Either manually remove the file or, " + "if you're debugging your job, " + "set the option '%s' to true.", filePath, TableConfigOptions.PLAN_FORCE_RECOMPILE.key()));
}
}
CompiledPlan compiledPlan;
if (operation instanceof StatementSetOperation) {
compiledPlan = compilePlan(((StatementSetOperation) operation).getOperations());
} else if (operation instanceof ModifyOperation) {
compiledPlan = compilePlan(Collections.singletonList((ModifyOperation) operation));
} else {
throw new TableException("Unsupported operation to compile: " + operation.getClass() + ". This is a bug, please file an issue.");
}
compiledPlan.writeToFile(file, false);
return compiledPlan;
}
use of org.apache.flink.table.api.CompiledPlan in project flink by apache.
the class TableCsvFormatITCase method testReadingMetadata.
@Test
public void testReadingMetadata() throws Exception {
createTestValuesSourceTable("MyTable", JavaScalaConversionUtil.toJava(TestData.smallData3()), new String[] { "a int", "b bigint", "m varchar metadata" }, new HashMap<String, String>() {
{
put("readable-metadata", "m:STRING");
}
});
File sinkPath = createSinkTable("MySink", "a bigint", "m varchar");
CompiledPlan compiledPlan = tableEnv.compilePlanSql("insert into MySink select a, m from MyTable");
tableEnv.executePlan(compiledPlan).await();
assertResult(Arrays.asList("1,Hi", "2,Hello", "3,Hello world"), sinkPath);
}
use of org.apache.flink.table.api.CompiledPlan in project flink by apache.
the class TableCsvFormatITCase method testPushDowns.
@Test
public void testPushDowns() throws Exception {
createTestValuesSourceTable("MyTable", JavaScalaConversionUtil.toJava(TestData.data3WithTimestamp()), new String[] { "a int", "b bigint", "c varchar", "ts timestamp(3)", "watermark for ts as ts - interval '5' second" }, "b", new HashMap<String, String>() {
{
put("readable-metadata", "a:INT");
put("filterable-fields", "a");
put("enable-watermark-push-down", "true");
put("partition-list", "b:1;b:2;b:3;b:4;b:5;b:6");
}
});
File sinkPath = createSinkTable("MySink", "a int", "ts timestamp(3)");
CompiledPlan compiledPlan = tableEnv.compilePlanSql("insert into MySink select a, ts from MyTable where b = 3 and a > 4");
tableEnv.executePlan(compiledPlan).await();
assertResult(Arrays.asList("5," + formatSqlTimestamp(5000L), "6," + formatSqlTimestamp(6000L)), sinkPath);
}
use of org.apache.flink.table.api.CompiledPlan in project flink by apache.
the class TableCsvFormatITCase method testPartitionPushDown.
@Test
public void testPartitionPushDown() throws Exception {
createTestValuesSourceTable("MyTable", JavaScalaConversionUtil.toJava(TestData.smallData3()), new String[] { "a int", "p bigint", "c varchar" }, "p", new HashMap<String, String>() {
{
put("partition-list", "p:1;p:2");
}
});
File sinkPath = createSinkTable("MySink", "a int", "p bigint", "c varchar");
CompiledPlan compiledPlan = tableEnv.compilePlanSql("insert into MySink select * from MyTable where p = 2");
tableEnv.executePlan(compiledPlan).await();
assertResult(Arrays.asList("2,2,Hello", "3,2,Hello world"), sinkPath);
}
use of org.apache.flink.table.api.CompiledPlan in project flink by apache.
the class JsonPlanTestBase method compileSqlAndExecutePlan.
protected TableResult compileSqlAndExecutePlan(String sql) {
CompiledPlan compiledPlan = tableEnv.compilePlanSql(sql);
checkTransformationUids(compiledPlan);
return tableEnv.executePlan(compiledPlan);
}
Aggregations