use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testUseCatalog.
@Test
public void testUseCatalog() {
final String sql = "USE CATALOG cat1";
Operation operation = parse(sql, SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(UseCatalogOperation.class);
assertThat(((UseCatalogOperation) operation).getCatalogName()).isEqualTo("cat1");
assertThat(operation.asSummaryString()).isEqualTo("USE CATALOG cat1");
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableLikeWithFullPath.
@Test
public void testCreateTableLikeWithFullPath() {
Map<String, String> sourceProperties = new HashMap<>();
sourceProperties.put("connector.type", "kafka");
sourceProperties.put("format.type", "json");
CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().column("f0", DataTypes.INT().notNull()).column("f1", DataTypes.TIMESTAMP(3)).build(), null, Collections.emptyList(), sourceProperties);
catalogManager.createTable(catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false);
final String sql = "create table mytable like `builtin`.`default`.sourceTable";
Operation operation = parseAndConvert(sql);
assertThat(operation).is(new HamcrestCondition<>(isCreateTableOperation(withSchema(Schema.newBuilder().column("f0", DataTypes.INT().notNull()).column("f1", DataTypes.TIMESTAMP(3)).build()), withOptions(entry("connector.type", "kafka"), entry("format.type", "json")))));
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithPrimaryKey.
@Test
public void testCreateTableWithPrimaryKey() {
final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + " constraint ct1 primary key(a, b) not enforced\n" + ") with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
Operation operation = parse(sql, planner, parser);
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
TableSchema tableSchema = catalogTable.getSchema();
assertThat(tableSchema.getPrimaryKey().map(UniqueConstraint::asSummaryString).orElse("fakeVal")).isEqualTo("CONSTRAINT ct1 PRIMARY KEY (a, b)");
assertThat(tableSchema.getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d" });
assertThat(tableSchema.getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.BIGINT().notNull(), DataTypes.STRING().notNull(), DataTypes.INT(), DataTypes.STRING() });
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testUseMultipleModules.
@Test
public void testUseMultipleModules() {
final String sql = "USE MODULES x, y, z";
final List<String> expectedModuleNames = Arrays.asList("x", "y", "z");
Operation operation = parse(sql, SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(UseModulesOperation.class);
final UseModulesOperation useModulesOperation = (UseModulesOperation) operation;
assertThat(useModulesOperation.getModuleNames()).isEqualTo(expectedModuleNames);
assertThat(useModulesOperation.asSummaryString()).isEqualTo("USE MODULES: [x, y, z]");
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testShowJars.
@Test
public void testShowJars() {
final String sql = "SHOW JARS";
Operation operation = parse(sql, SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(ShowJarsOperation.class);
final ShowJarsOperation showModulesOperation = (ShowJarsOperation) operation;
assertThat(showModulesOperation.asSummaryString()).isEqualTo("SHOW JARS");
}
Aggregations