Search in sources :

Example 26 with Operation

use of org.apache.flink.table.operations.Operation in project flink by apache.

the class SqlToOperationConverterTest method testCreateTableWithComputedColumn.

@Test
public void testCreateTableWithComputedColumn() {
    final String sql = "CREATE TABLE tbl1 (\n" + "  a int,\n" + "  b varchar, \n" + "  c as a - 1, \n" + "  d as b || '$$', \n" + "  e as my_udf1(a)," + "  f as `default`.my_udf2(a) + 1," + "  g as builtin.`default`.my_udf3(a) || '##'\n" + ")\n" + "  with (\n" + "    'connector' = 'kafka', \n" + "    'kafka.topic' = 'log.test'\n" + ")\n";
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf1"), Func0$.MODULE$);
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf2"), Func1$.MODULE$);
    functionCatalog.registerTempCatalogScalarFunction(ObjectIdentifier.of("builtin", "default", "my_udf3"), Func8$.MODULE$);
    FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
    Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT));
    assertThat(operation).isInstanceOf(CreateTableOperation.class);
    CreateTableOperation op = (CreateTableOperation) operation;
    CatalogTable catalogTable = op.getCatalogTable();
    assertThat(catalogTable.getSchema().getFieldNames()).isEqualTo(new String[] { "a", "b", "c", "d", "e", "f", "g" });
    assertThat(catalogTable.getSchema().getFieldDataTypes()).isEqualTo(new DataType[] { DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING(), DataTypes.INT().notNull(), DataTypes.INT(), DataTypes.STRING() });
    String[] columnExpressions = catalogTable.getSchema().getTableColumns().stream().filter(ComputedColumn.class::isInstance).map(ComputedColumn.class::cast).map(ComputedColumn::getExpression).toArray(String[]::new);
    String[] expected = new String[] { "`a` - 1", "`b` || '$$'", "`builtin`.`default`.`my_udf1`(`a`)", "`builtin`.`default`.`my_udf2`(`a`) + 1", "`builtin`.`default`.`my_udf3`(`a`) || '##'" };
    assertThat(columnExpressions).isEqualTo(expected);
}
Also used : FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) SetOperation(org.apache.flink.table.operations.command.SetOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) OperationMatchers.isCreateTableOperation(org.apache.flink.table.planner.utils.OperationMatchers.isCreateTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 27 with Operation

use of org.apache.flink.table.operations.Operation in project flink by apache.

the class ParserImpl method parse.

/**
 * When parsing statement, it first uses {@link ExtendedParser} to parse statements. If {@link
 * ExtendedParser} fails to parse statement, it uses the {@link CalciteParser} to parse
 * statements.
 *
 * @param statement input statement.
 * @return parsed operations.
 */
@Override
public List<Operation> parse(String statement) {
    CalciteParser parser = calciteParserSupplier.get();
    FlinkPlannerImpl planner = validatorSupplier.get();
    Optional<Operation> command = EXTENDED_PARSER.parse(statement);
    if (command.isPresent()) {
        return Collections.singletonList(command.get());
    }
    // parse the sql query
    // use parseSqlList here because we need to support statement end with ';' in sql client.
    SqlNodeList sqlNodeList = parser.parseSqlList(statement);
    List<SqlNode> parsed = sqlNodeList.getList();
    Preconditions.checkArgument(parsed.size() == 1, "only single statement supported");
    return Collections.singletonList(SqlToOperationConverter.convert(planner, catalogManager, parsed.get(0)).orElseThrow(() -> new TableException("Unsupported query: " + statement)));
}
Also used : TableException(org.apache.flink.table.api.TableException) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) SqlNodeList(org.apache.calcite.sql.SqlNodeList) Operation(org.apache.flink.table.operations.Operation) CalciteParser(org.apache.flink.table.planner.parse.CalciteParser) SqlNode(org.apache.calcite.sql.SqlNode)

Example 28 with Operation

use of org.apache.flink.table.operations.Operation in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTable.

private Operation convertAlterTable(HiveParserASTNode input) throws SemanticException {
    Operation operation = null;
    HiveParserASTNode ast = (HiveParserASTNode) input.getChild(1);
    String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) input.getChild(0));
    String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    HashMap<String, String> partSpec = null;
    HiveParserASTNode partSpecNode = (HiveParserASTNode) input.getChild(2);
    if (partSpecNode != null) {
        partSpec = getPartSpec(partSpecNode);
    }
    CatalogBaseTable alteredTable = getAlteredTable(tableName, false);
    switch(ast.getType()) {
        case HiveASTParser.TOK_ALTERTABLE_RENAME:
            operation = convertAlterTableRename(tableName, ast, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_ADDCOLS:
            operation = convertAlterTableModifyCols(alteredTable, tableName, ast, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_REPLACECOLS:
            operation = convertAlterTableModifyCols(alteredTable, tableName, ast, true);
            break;
        case HiveASTParser.TOK_ALTERTABLE_RENAMECOL:
            operation = convertAlterTableChangeCol(alteredTable, qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_ADDPARTS:
            operation = convertAlterTableAddParts(qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_DROPPARTS:
            operation = convertAlterTableDropParts(qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_PROPERTIES:
            operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_DROPPROPERTIES:
            operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, true);
            break;
        case HiveASTParser.TOK_ALTERTABLE_UPDATESTATS:
            operation = convertAlterTableProps(alteredTable, tableName, partSpec, ast, false, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_FILEFORMAT:
            operation = convertAlterTableFileFormat(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_LOCATION:
            operation = convertAlterTableLocation(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_SERIALIZER:
            operation = convertAlterTableSerde(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_SERDEPROPERTIES:
            operation = convertAlterTableSerdeProps(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_TOUCH:
        case HiveASTParser.TOK_ALTERTABLE_ARCHIVE:
        case HiveASTParser.TOK_ALTERTABLE_UNARCHIVE:
        case HiveASTParser.TOK_ALTERTABLE_PARTCOLTYPE:
        case HiveASTParser.TOK_ALTERTABLE_SKEWED:
        case HiveASTParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
        case HiveASTParser.TOK_ALTERTABLE_MERGEFILES:
        case HiveASTParser.TOK_ALTERTABLE_RENAMEPART:
        case HiveASTParser.TOK_ALTERTABLE_SKEWED_LOCATION:
        case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
        case HiveASTParser.TOK_ALTERTABLE_CLUSTER_SORT:
        case HiveASTParser.TOK_ALTERTABLE_COMPACT:
        case HiveASTParser.TOK_ALTERTABLE_UPDATECOLSTATS:
        case HiveASTParser.TOK_ALTERTABLE_DROPCONSTRAINT:
        case HiveASTParser.TOK_ALTERTABLE_ADDCONSTRAINT:
            handleUnsupportedOperation(ast);
            break;
        default:
            throw new ValidationException("Unknown AST node for ALTER TABLE: " + ast);
    }
    return operation;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) Operation(org.apache.flink.table.operations.Operation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation)

Example 29 with Operation

use of org.apache.flink.table.operations.Operation in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterView.

private Operation convertAlterView(HiveParserASTNode ast) throws SemanticException {
    Operation operation = null;
    String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    CatalogBaseTable alteredTable = getAlteredTable(tableName, true);
    if (ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        // alter view as
        operation = convertCreateView(ast);
    } else {
        ast = (HiveParserASTNode) ast.getChild(1);
        switch(ast.getType()) {
            case HiveASTParser.TOK_ALTERVIEW_PROPERTIES:
                operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, false);
                break;
            case HiveASTParser.TOK_ALTERVIEW_DROPPROPERTIES:
                operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, true);
                break;
            case HiveASTParser.TOK_ALTERVIEW_RENAME:
                operation = convertAlterTableRename(tableName, ast, true);
                break;
            case HiveASTParser.TOK_ALTERVIEW_ADDPARTS:
            case HiveASTParser.TOK_ALTERVIEW_DROPPARTS:
                handleUnsupportedOperation("ADD/DROP PARTITION for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for ALTER VIEW: " + ast);
        }
    }
    return operation;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) Operation(org.apache.flink.table.operations.Operation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation)

Example 30 with Operation

use of org.apache.flink.table.operations.Operation in project zeppelin by apache.

the class Flink113Shims method parseBySqlParser.

private SqlCommandCall parseBySqlParser(Parser sqlParser, String stmt) throws Exception {
    List<Operation> operations;
    try {
        operations = sqlParser.parse(stmt);
    } catch (Throwable e) {
        throw new Exception("Invalidate SQL statement.", e);
    }
    if (operations.size() != 1) {
        throw new Exception("Only single statement is supported now.");
    }
    final SqlCommand cmd;
    String[] operands = new String[] { stmt };
    Operation operation = operations.get(0);
    if (operation instanceof CatalogSinkModifyOperation) {
        boolean overwrite = ((CatalogSinkModifyOperation) operation).isOverwrite();
        cmd = overwrite ? SqlCommand.INSERT_OVERWRITE : SqlCommand.INSERT_INTO;
    } else if (operation instanceof CreateTableOperation) {
        cmd = SqlCommand.CREATE_TABLE;
    } else if (operation instanceof DropTableOperation) {
        cmd = SqlCommand.DROP_TABLE;
    } else if (operation instanceof AlterTableOperation) {
        cmd = SqlCommand.ALTER_TABLE;
    } else if (operation instanceof CreateViewOperation) {
        cmd = SqlCommand.CREATE_VIEW;
    } else if (operation instanceof DropViewOperation) {
        cmd = SqlCommand.DROP_VIEW;
    } else if (operation instanceof CreateDatabaseOperation) {
        cmd = SqlCommand.CREATE_DATABASE;
    } else if (operation instanceof DropDatabaseOperation) {
        cmd = SqlCommand.DROP_DATABASE;
    } else if (operation instanceof AlterDatabaseOperation) {
        cmd = SqlCommand.ALTER_DATABASE;
    } else if (operation instanceof CreateCatalogOperation) {
        cmd = SqlCommand.CREATE_CATALOG;
    } else if (operation instanceof DropCatalogOperation) {
        cmd = SqlCommand.DROP_CATALOG;
    } else if (operation instanceof UseCatalogOperation) {
        cmd = SqlCommand.USE_CATALOG;
        operands = new String[] { ((UseCatalogOperation) operation).getCatalogName() };
    } else if (operation instanceof UseDatabaseOperation) {
        cmd = SqlCommand.USE;
        operands = new String[] { ((UseDatabaseOperation) operation).getDatabaseName() };
    } else if (operation instanceof ShowCatalogsOperation) {
        cmd = SqlCommand.SHOW_CATALOGS;
        operands = new String[0];
    } else if (operation instanceof ShowDatabasesOperation) {
        cmd = SqlCommand.SHOW_DATABASES;
        operands = new String[0];
    } else if (operation instanceof ShowTablesOperation) {
        cmd = SqlCommand.SHOW_TABLES;
        operands = new String[0];
    } else if (operation instanceof ShowFunctionsOperation) {
        cmd = SqlCommand.SHOW_FUNCTIONS;
        operands = new String[0];
    } else if (operation instanceof CreateCatalogFunctionOperation || operation instanceof CreateTempSystemFunctionOperation) {
        cmd = SqlCommand.CREATE_FUNCTION;
    } else if (operation instanceof DropCatalogFunctionOperation || operation instanceof DropTempSystemFunctionOperation) {
        cmd = SqlCommand.DROP_FUNCTION;
    } else if (operation instanceof AlterCatalogFunctionOperation) {
        cmd = SqlCommand.ALTER_FUNCTION;
    } else if (operation instanceof ExplainOperation) {
        cmd = SqlCommand.EXPLAIN;
    } else if (operation instanceof DescribeTableOperation) {
        cmd = SqlCommand.DESCRIBE;
        operands = new String[] { ((DescribeTableOperation) operation).getSqlIdentifier().asSerializableString() };
    } else if (operation instanceof QueryOperation) {
        cmd = SqlCommand.SELECT;
    } else {
        throw new Exception("Unknown operation: " + operation.asSummaryString());
    }
    return new SqlCommandCall(cmd, operands, stmt);
}
Also used : DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) SqlCommandCall(org.apache.zeppelin.flink.sql.SqlCommandParser.SqlCommandCall) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) AlterTableOperation(org.apache.flink.table.operations.ddl.AlterTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) CatalogSinkModifyOperation(org.apache.flink.table.operations.CatalogSinkModifyOperation) Operation(org.apache.flink.table.operations.Operation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) AttributedString(org.jline.utils.AttributedString) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) SqlCommand(org.apache.zeppelin.flink.sql.SqlCommandParser.SqlCommand) QueryOperation(org.apache.flink.table.operations.QueryOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) CatalogSinkModifyOperation(org.apache.flink.table.operations.CatalogSinkModifyOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) FlinkException(org.apache.flink.util.FlinkException) TableException(org.apache.flink.table.api.TableException) IOException(java.io.IOException) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) AlterTableOperation(org.apache.flink.table.operations.ddl.AlterTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation)

Aggregations

Operation (org.apache.flink.table.operations.Operation)58 QueryOperation (org.apache.flink.table.operations.QueryOperation)54 ExplainOperation (org.apache.flink.table.operations.ExplainOperation)51 ShowFunctionsOperation (org.apache.flink.table.operations.ShowFunctionsOperation)51 UseDatabaseOperation (org.apache.flink.table.operations.UseDatabaseOperation)51 AlterDatabaseOperation (org.apache.flink.table.operations.ddl.AlterDatabaseOperation)51 CreateDatabaseOperation (org.apache.flink.table.operations.ddl.CreateDatabaseOperation)51 CreateViewOperation (org.apache.flink.table.operations.ddl.CreateViewOperation)51 DropDatabaseOperation (org.apache.flink.table.operations.ddl.DropDatabaseOperation)51 UseCatalogOperation (org.apache.flink.table.operations.UseCatalogOperation)49 AlterTableOptionsOperation (org.apache.flink.table.operations.ddl.AlterTableOptionsOperation)48 AlterTableRenameOperation (org.apache.flink.table.operations.ddl.AlterTableRenameOperation)48 CreateTableOperation (org.apache.flink.table.operations.ddl.CreateTableOperation)48 LoadModuleOperation (org.apache.flink.table.operations.LoadModuleOperation)47 SinkModifyOperation (org.apache.flink.table.operations.SinkModifyOperation)47 StatementSetOperation (org.apache.flink.table.operations.StatementSetOperation)47 UnloadModuleOperation (org.apache.flink.table.operations.UnloadModuleOperation)47 ShowModulesOperation (org.apache.flink.table.operations.ShowModulesOperation)46 SourceQueryOperation (org.apache.flink.table.operations.SourceQueryOperation)46 UseModulesOperation (org.apache.flink.table.operations.UseModulesOperation)46