use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testSqlInsertWithStaticPartition.
@Test
public void testSqlInsertWithStaticPartition() {
final String sql = "insert into t1 partition(a=1) select b, c, d from t2";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(SinkModifyOperation.class);
SinkModifyOperation sinkModifyOperation = (SinkModifyOperation) operation;
final Map<String, String> expectedStaticPartitions = new HashMap<>();
expectedStaticPartitions.put("a", "1");
assertThat(sinkModifyOperation.getStaticPartitions()).isEqualTo(expectedStaticPartitions);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testSqlExecuteWithStatementSet.
@Test
public void testSqlExecuteWithStatementSet() {
final String sql = "execute statement set begin " + "insert into t1 select a, b, c, d from t2 where a > 1;" + "insert into t1 select a, b, c, d from t2 where a > 2;" + "end";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(StatementSetOperation.class);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testAlterTable.
@Test
public void testAlterTable() throws Exception {
prepareNonManagedTable(false);
final String[] renameTableSqls = new String[] { "alter table cat1.db1.tb1 rename to tb2", "alter table db1.tb1 rename to tb2", "alter table tb1 rename to cat1.db1.tb2" };
final ObjectIdentifier expectedIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
final ObjectIdentifier expectedNewIdentifier = ObjectIdentifier.of("cat1", "db1", "tb2");
// test rename table converter
for (int i = 0; i < renameTableSqls.length; i++) {
Operation operation = parse(renameTableSqls[i], SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(AlterTableRenameOperation.class);
final AlterTableRenameOperation alterTableRenameOperation = (AlterTableRenameOperation) operation;
assertThat(alterTableRenameOperation.getTableIdentifier()).isEqualTo(expectedIdentifier);
assertThat(alterTableRenameOperation.getNewTableIdentifier()).isEqualTo(expectedNewIdentifier);
}
// test alter table options
Operation operation = parse("alter table cat1.db1.tb1 set ('k1' = 'v1', 'K2' = 'V2')", SqlDialect.DEFAULT);
Map<String, String> expectedOptions = new HashMap<>();
expectedOptions.put("connector", "dummy");
expectedOptions.put("k", "v");
expectedOptions.put("k1", "v1");
expectedOptions.put("K2", "V2");
assertAlterTableOptions(operation, expectedIdentifier, expectedOptions);
// test alter table reset
operation = parse("alter table cat1.db1.tb1 reset ('k')", SqlDialect.DEFAULT);
assertAlterTableOptions(operation, expectedIdentifier, Collections.singletonMap("connector", "dummy"));
assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ('connector')", SqlDialect.DEFAULT)).isInstanceOf(ValidationException.class).hasMessageContaining("ALTER TABLE RESET does not support changing 'connector'");
assertThatThrownBy(() -> parse("alter table cat1.db1.tb1 reset ()", SqlDialect.DEFAULT)).isInstanceOf(ValidationException.class).hasMessageContaining("ALTER TABLE RESET does not support empty key");
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testSqlRichExplainWithInsert.
@Test
public void testSqlRichExplainWithInsert() {
final String sql = "explain plan for insert into t1 select a, b, c, d from t2";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(ExplainOperation.class);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithWatermark.
@Test
public void testCreateTableWithWatermark() throws FunctionAlreadyExistException, DatabaseNotExistException {
CatalogFunction cf = new CatalogFunctionImpl(JavaUserDefinedScalarFunctions.JavaFunc5.class.getName());
catalog.createFunction(ObjectPath.fromString("default.myfunc"), cf, true);
final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c timestamp(3),\n" + " watermark for `c` as myfunc(c, 1) - interval '5' second\n" + ") with (\n" + " 'connector.type' = 'kafka')\n";
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
Map<String, String> properties = catalogTable.toProperties();
Map<String, String> expected = new HashMap<>();
expected.put("schema.0.name", "a");
expected.put("schema.0.data-type", "INT");
expected.put("schema.1.name", "b");
expected.put("schema.1.data-type", "BIGINT");
expected.put("schema.2.name", "c");
expected.put("schema.2.data-type", "TIMESTAMP(3)");
expected.put("schema.watermark.0.rowtime", "c");
expected.put("schema.watermark.0.strategy.expr", "`builtin`.`default`.`myfunc`(`c`, 1) - INTERVAL '5' SECOND");
expected.put("schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
expected.put("connector.type", "kafka");
assertThat(properties).isEqualTo(expected);
}
Aggregations