use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithWatermark.
@Test
public void testCreateTableWithWatermark() throws FunctionAlreadyExistException, DatabaseNotExistException {
CatalogFunction cf = new CatalogFunctionImpl(JavaUserDefinedScalarFunctions.JavaFunc5.class.getName());
catalog.createFunction(ObjectPath.fromString("default.myfunc"), cf, true);
final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c timestamp(3),\n" + " watermark for `c` as myfunc(c, 1) - interval '5' second\n" + ") with (\n" + " 'connector.type' = 'kafka')\n";
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
Map<String, String> properties = catalogTable.toProperties();
Map<String, String> expected = new HashMap<>();
expected.put("schema.0.name", "a");
expected.put("schema.0.data-type", "INT");
expected.put("schema.1.name", "b");
expected.put("schema.1.data-type", "BIGINT");
expected.put("schema.2.name", "c");
expected.put("schema.2.data-type", "TIMESTAMP(3)");
expected.put("schema.watermark.0.rowtime", "c");
expected.put("schema.watermark.0.strategy.expr", "`builtin`.`default`.`myfunc`(`c`, 1) - INTERVAL '5' SECOND");
expected.put("schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
expected.put("connector.type", "kafka");
assertThat(properties).isEqualTo(expected);
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testSqlExecuteWithInsert.
@Test
public void testSqlExecuteWithInsert() {
final String sql = "execute insert into t1 select a, b, c, d from t2 where a > 1";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(SinkModifyOperation.class);
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method parse.
private Operation parse(String sql, SqlDialect sqlDialect) {
FlinkPlannerImpl planner = getPlannerBySqlDialect(sqlDialect);
final CalciteParser parser = getParserBySqlDialect(sqlDialect);
SqlNode node = parser.parse(sql);
return SqlToOperationConverter.convert(planner, catalogManager, node).get();
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testCreateTable.
@Test
public void testCreateTable() {
final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar" + ")\n" + " PARTITIONED BY (a, d)\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
assertThat(catalogTable.getPartitionKeys()).hasSameElementsAs(Arrays.asList("a", "d"));
assertThat(catalogTable.getSchema().getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d" });
assertThat(catalogTable.getSchema().getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.BIGINT(), DataTypes.VARCHAR(Integer.MAX_VALUE), DataTypes.INT(), DataTypes.VARCHAR(Integer.MAX_VALUE) });
}
use of org.apache.flink.table.planner.calcite.FlinkPlannerImpl in project flink by apache.
the class SqlToOperationConverterTest method testSqlRichExplainWithSelect.
@Test
public void testSqlRichExplainWithSelect() {
final String sql = "explain plan for select a, b, c, d from t2";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(ExplainOperation.class);
}
Aggregations