use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverter method convertCreateView.
/**
* Convert CREATE VIEW statement.
*/
private Operation convertCreateView(SqlCreateView sqlCreateView) {
final SqlNode query = sqlCreateView.getQuery();
final SqlNodeList fieldList = sqlCreateView.getFieldList();
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlCreateView.fullViewName());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
String comment = sqlCreateView.getComment().map(c -> c.getNlsString().getValue()).orElse(null);
CatalogView catalogView = convertViewQuery(query, fieldList.getList(), OperationConverterUtils.extractProperties(sqlCreateView.getProperties().orElse(null)), comment);
return new CreateViewOperation(identifier, catalogView, sqlCreateView.isIfNotExists(), sqlCreateView.isTemporary());
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverter method convertRichExplain.
/**
* Convert RICH EXPLAIN statement.
*/
private Operation convertRichExplain(SqlRichExplain sqlExplain) {
SqlNode sqlNode = sqlExplain.getStatement();
Operation operation;
if (sqlNode instanceof RichSqlInsert) {
operation = convertSqlInsert((RichSqlInsert) sqlNode);
} else if (sqlNode instanceof SqlStatementSet) {
operation = convertSqlStatementSet((SqlStatementSet) sqlNode);
} else if (sqlNode.getKind().belongsTo(SqlKind.QUERY)) {
operation = convertSqlQuery(sqlExplain.getStatement());
} else {
throw new ValidationException(String.format("EXPLAIN statement doesn't support %s", sqlNode.getKind()));
}
return new ExplainOperation(operation, sqlExplain.getExplainDetails());
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithMetadataColumn.
@Test
public void testCreateTableWithMetadataColumn() {
final String sql = "CREATE TABLE tbl1 (\n" + " a INT,\n" + " b STRING,\n" + " c INT METADATA,\n" + " d INT METADATA FROM 'other.key',\n" + " e INT METADATA VIRTUAL\n" + ")\n" + " WITH (\n" + " 'connector' = 'kafka',\n" + " 'kafka.topic' = 'log.test'\n" + ")\n";
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT));
assertThat(operation).isInstanceOf(CreateTableOperation.class);
final CreateTableOperation op = (CreateTableOperation) operation;
final TableSchema actualSchema = op.getCatalogTable().getSchema();
final TableSchema expectedSchema = TableSchema.builder().add(TableColumn.physical("a", DataTypes.INT())).add(TableColumn.physical("b", DataTypes.STRING())).add(TableColumn.metadata("c", DataTypes.INT())).add(TableColumn.metadata("d", DataTypes.INT(), "other.key")).add(TableColumn.metadata("e", DataTypes.INT(), true)).build();
assertThat(actualSchema).isEqualTo(expectedSchema);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithMinusInOptionKey.
@Test
public void testCreateTableWithMinusInOptionKey() {
final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c varchar\n" + ") with (\n" + " 'a-B-c-d124' = 'Ab',\n" + " 'a.b-c-d.e-f.g' = 'ada',\n" + " 'a.b-c-d.e-f1231.g' = 'ada',\n" + " 'a.b-c-d.*' = 'adad')\n";
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
Map<String, String> options = catalogTable.getOptions().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
Map<String, String> sortedProperties = new TreeMap<>(options);
final String expected = "{a-B-c-d124=Ab, " + "a.b-c-d.*=adad, " + "a.b-c-d.e-f.g=ada, " + "a.b-c-d.e-f1231.g=ada}";
assertThat(sortedProperties.toString()).isEqualTo(expected);
}
use of org.apache.flink.table.operations.Operation in project flink by apache.
the class SqlToOperationConverterTest method testUseOneModule.
@Test
public void testUseOneModule() {
final String sql = "USE MODULES dummy";
final List<String> expectedModuleNames = Collections.singletonList("dummy");
Operation operation = parse(sql, SqlDialect.DEFAULT);
assertThat(operation).isInstanceOf(UseModulesOperation.class);
final UseModulesOperation useModulesOperation = (UseModulesOperation) operation;
assertThat(useModulesOperation.getModuleNames()).isEqualTo(expectedModuleNames);
assertThat(useModulesOperation.asSummaryString()).isEqualTo("USE MODULES: [dummy]");
}
Aggregations