Search in sources :

Example 1 with AlterTableSchemaOperation

use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableModifyCols.

private Operation convertAlterTableModifyCols(CatalogBaseTable alteredTable, String tblName, HiveParserASTNode ast, boolean replace) throws SemanticException {
    List<FieldSchema> newCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) ast.getChild(0));
    boolean isCascade = false;
    if (null != ast.getFirstChildWithType(HiveASTParser.TOK_CASCADE)) {
        isCascade = true;
    }
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    // prepare properties
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    TableSchema oldSchema = oldTable.getSchema();
    final int numPartCol = oldTable.getPartitionKeys().size();
    TableSchema.Builder builder = TableSchema.builder();
    // add existing non-part col if we're not replacing
    if (!replace) {
        List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
        for (TableColumn column : nonPartCols) {
            builder.add(column);
        }
        setWatermarkAndPK(builder, oldSchema);
    }
    // add new cols
    for (FieldSchema col : newCols) {
        builder.add(TableColumn.physical(col.getName(), HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(col.getType()))));
    }
    // add part cols
    List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
    for (TableColumn column : partCols) {
        builder.add(column);
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 2 with AlterTableSchemaOperation

use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.

the class TableEnvironmentImpl method executeInternal.

@Override
public TableResultInternal executeInternal(Operation operation) {
    if (operation instanceof ModifyOperation) {
        return executeInternal(Collections.singletonList((ModifyOperation) operation));
    } else if (operation instanceof StatementSetOperation) {
        return executeInternal(((StatementSetOperation) operation).getOperations());
    } else if (operation instanceof CreateTableOperation) {
        CreateTableOperation createTableOperation = (CreateTableOperation) operation;
        if (createTableOperation.isTemporary()) {
            catalogManager.createTemporaryTable(createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists());
        } else {
            catalogManager.createTable(createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof DropTableOperation) {
        DropTableOperation dropTableOperation = (DropTableOperation) operation;
        if (dropTableOperation.isTemporary()) {
            catalogManager.dropTemporaryTable(dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
        } else {
            catalogManager.dropTable(dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof AlterTableOperation) {
        AlterTableOperation alterTableOperation = (AlterTableOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterTableOperation.getTableIdentifier().getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString());
        try {
            if (alterTableOperation instanceof AlterTableRenameOperation) {
                AlterTableRenameOperation alterTableRenameOp = (AlterTableRenameOperation) operation;
                catalog.renameTable(alterTableRenameOp.getTableIdentifier().toObjectPath(), alterTableRenameOp.getNewTableIdentifier().getObjectName(), false);
            } else if (alterTableOperation instanceof AlterTableOptionsOperation) {
                AlterTableOptionsOperation alterTablePropertiesOp = (AlterTableOptionsOperation) operation;
                catalogManager.alterTable(alterTablePropertiesOp.getCatalogTable(), alterTablePropertiesOp.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterTableAddConstraintOperation) {
                AlterTableAddConstraintOperation addConstraintOP = (AlterTableAddConstraintOperation) operation;
                CatalogTable oriTable = catalogManager.getTable(addConstraintOP.getTableIdentifier()).get().getTable();
                TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriTable.getSchema());
                if (addConstraintOP.getConstraintName().isPresent()) {
                    builder.primaryKey(addConstraintOP.getConstraintName().get(), addConstraintOP.getColumnNames());
                } else {
                    builder.primaryKey(addConstraintOP.getColumnNames());
                }
                CatalogTable newTable = new CatalogTableImpl(builder.build(), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment());
                catalogManager.alterTable(newTable, addConstraintOP.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterTableDropConstraintOperation) {
                AlterTableDropConstraintOperation dropConstraintOperation = (AlterTableDropConstraintOperation) operation;
                CatalogTable oriTable = catalogManager.getTable(dropConstraintOperation.getTableIdentifier()).get().getTable();
                CatalogTable newTable = new CatalogTableImpl(TableSchemaUtils.dropConstraint(oriTable.getSchema(), dropConstraintOperation.getConstraintName()), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment());
                catalogManager.alterTable(newTable, dropConstraintOperation.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) {
                AlterPartitionPropertiesOperation alterPartPropsOp = (AlterPartitionPropertiesOperation) operation;
                catalog.alterPartition(alterPartPropsOp.getTableIdentifier().toObjectPath(), alterPartPropsOp.getPartitionSpec(), alterPartPropsOp.getCatalogPartition(), false);
            } else if (alterTableOperation instanceof AlterTableSchemaOperation) {
                AlterTableSchemaOperation alterTableSchemaOperation = (AlterTableSchemaOperation) alterTableOperation;
                catalogManager.alterTable(alterTableSchemaOperation.getCatalogTable(), alterTableSchemaOperation.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AddPartitionsOperation) {
                AddPartitionsOperation addPartitionsOperation = (AddPartitionsOperation) alterTableOperation;
                List<CatalogPartitionSpec> specs = addPartitionsOperation.getPartitionSpecs();
                List<CatalogPartition> partitions = addPartitionsOperation.getCatalogPartitions();
                boolean ifNotExists = addPartitionsOperation.ifNotExists();
                ObjectPath tablePath = addPartitionsOperation.getTableIdentifier().toObjectPath();
                for (int i = 0; i < specs.size(); i++) {
                    catalog.createPartition(tablePath, specs.get(i), partitions.get(i), ifNotExists);
                }
            } else if (alterTableOperation instanceof DropPartitionsOperation) {
                DropPartitionsOperation dropPartitionsOperation = (DropPartitionsOperation) alterTableOperation;
                ObjectPath tablePath = dropPartitionsOperation.getTableIdentifier().toObjectPath();
                boolean ifExists = dropPartitionsOperation.ifExists();
                for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) {
                    catalog.dropPartition(tablePath, spec, ifExists);
                }
            }
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (TableAlreadyExistException | TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateViewOperation) {
        CreateViewOperation createViewOperation = (CreateViewOperation) operation;
        if (createViewOperation.isTemporary()) {
            catalogManager.createTemporaryTable(createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists());
        } else {
            catalogManager.createTable(createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof DropViewOperation) {
        DropViewOperation dropViewOperation = (DropViewOperation) operation;
        if (dropViewOperation.isTemporary()) {
            catalogManager.dropTemporaryView(dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
        } else {
            catalogManager.dropView(dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof AlterViewOperation) {
        AlterViewOperation alterViewOperation = (AlterViewOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterViewOperation.getViewIdentifier().getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString());
        try {
            if (alterViewOperation instanceof AlterViewRenameOperation) {
                AlterViewRenameOperation alterTableRenameOp = (AlterViewRenameOperation) operation;
                catalog.renameTable(alterTableRenameOp.getViewIdentifier().toObjectPath(), alterTableRenameOp.getNewViewIdentifier().getObjectName(), false);
            } else if (alterViewOperation instanceof AlterViewPropertiesOperation) {
                AlterViewPropertiesOperation alterTablePropertiesOp = (AlterViewPropertiesOperation) operation;
                catalogManager.alterTable(alterTablePropertiesOp.getCatalogView(), alterTablePropertiesOp.getViewIdentifier(), false);
            } else if (alterViewOperation instanceof AlterViewAsOperation) {
                AlterViewAsOperation alterViewAsOperation = (AlterViewAsOperation) alterViewOperation;
                catalogManager.alterTable(alterViewAsOperation.getNewView(), alterViewAsOperation.getViewIdentifier(), false);
            }
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (TableAlreadyExistException | TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateDatabaseOperation) {
        CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString());
        try {
            catalog.createDatabase(createDatabaseOperation.getDatabaseName(), createDatabaseOperation.getCatalogDatabase(), createDatabaseOperation.isIgnoreIfExists());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseAlreadyExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof DropDatabaseOperation) {
        DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString());
        try {
            catalog.dropDatabase(dropDatabaseOperation.getDatabaseName(), dropDatabaseOperation.isIfExists(), dropDatabaseOperation.isCascade());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseNotExistException | DatabaseNotEmptyException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof AlterDatabaseOperation) {
        AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString());
        try {
            catalog.alterDatabase(alterDatabaseOperation.getDatabaseName(), alterDatabaseOperation.getCatalogDatabase(), false);
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateCatalogFunctionOperation) {
        return createCatalogFunction((CreateCatalogFunctionOperation) operation);
    } else if (operation instanceof CreateTempSystemFunctionOperation) {
        return createSystemFunction((CreateTempSystemFunctionOperation) operation);
    } else if (operation instanceof DropCatalogFunctionOperation) {
        return dropCatalogFunction((DropCatalogFunctionOperation) operation);
    } else if (operation instanceof DropTempSystemFunctionOperation) {
        return dropSystemFunction((DropTempSystemFunctionOperation) operation);
    } else if (operation instanceof AlterCatalogFunctionOperation) {
        return alterCatalogFunction((AlterCatalogFunctionOperation) operation);
    } else if (operation instanceof CreateCatalogOperation) {
        return createCatalog((CreateCatalogOperation) operation);
    } else if (operation instanceof DropCatalogOperation) {
        DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation;
        String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString());
        try {
            catalogManager.unregisterCatalog(dropCatalogOperation.getCatalogName(), dropCatalogOperation.isIfExists());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (CatalogException e) {
            throw new ValidationException(exMsg, e);
        }
    } else if (operation instanceof LoadModuleOperation) {
        return loadModule((LoadModuleOperation) operation);
    } else if (operation instanceof UnloadModuleOperation) {
        return unloadModule((UnloadModuleOperation) operation);
    } else if (operation instanceof UseModulesOperation) {
        return useModules((UseModulesOperation) operation);
    } else if (operation instanceof UseCatalogOperation) {
        UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation;
        catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof UseDatabaseOperation) {
        UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation;
        catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName());
        catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof ShowCatalogsOperation) {
        return buildShowResult("catalog name", listCatalogs());
    } else if (operation instanceof ShowCreateTableOperation) {
        ShowCreateTableOperation showCreateTableOperation = (ShowCreateTableOperation) operation;
        ContextResolvedTable table = catalogManager.getTable(showCreateTableOperation.getTableIdentifier()).orElseThrow(() -> new ValidationException(String.format("Could not execute SHOW CREATE TABLE. Table with identifier %s does not exist.", showCreateTableOperation.getTableIdentifier().asSerializableString())));
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(ShowCreateUtil.buildShowCreateTableRow(table.getResolvedTable(), showCreateTableOperation.getTableIdentifier(), table.isTemporary())))).build();
    } else if (operation instanceof ShowCreateViewOperation) {
        ShowCreateViewOperation showCreateViewOperation = (ShowCreateViewOperation) operation;
        final ContextResolvedTable table = catalogManager.getTable(showCreateViewOperation.getViewIdentifier()).orElseThrow(() -> new ValidationException(String.format("Could not execute SHOW CREATE VIEW. View with identifier %s does not exist.", showCreateViewOperation.getViewIdentifier().asSerializableString())));
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(ShowCreateUtil.buildShowCreateViewRow(table.getResolvedTable(), showCreateViewOperation.getViewIdentifier(), table.isTemporary())))).build();
    } else if (operation instanceof ShowCurrentCatalogOperation) {
        return buildShowResult("current catalog name", new String[] { catalogManager.getCurrentCatalog() });
    } else if (operation instanceof ShowDatabasesOperation) {
        return buildShowResult("database name", listDatabases());
    } else if (operation instanceof ShowCurrentDatabaseOperation) {
        return buildShowResult("current database name", new String[] { catalogManager.getCurrentDatabase() });
    } else if (operation instanceof ShowModulesOperation) {
        ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation;
        if (showModulesOperation.requireFull()) {
            return buildShowFullModulesResult(listFullModules());
        } else {
            return buildShowResult("module name", listModules());
        }
    } else if (operation instanceof ShowTablesOperation) {
        return buildShowResult("table name", listTables());
    } else if (operation instanceof ShowFunctionsOperation) {
        ShowFunctionsOperation showFunctionsOperation = (ShowFunctionsOperation) operation;
        String[] functionNames = null;
        ShowFunctionsOperation.FunctionScope functionScope = showFunctionsOperation.getFunctionScope();
        switch(functionScope) {
            case USER:
                functionNames = listUserDefinedFunctions();
                break;
            case ALL:
                functionNames = listFunctions();
                break;
            default:
                throw new UnsupportedOperationException(String.format("SHOW FUNCTIONS with %s scope is not supported.", functionScope));
        }
        return buildShowResult("function name", functionNames);
    } else if (operation instanceof ShowViewsOperation) {
        return buildShowResult("view name", listViews());
    } else if (operation instanceof ShowColumnsOperation) {
        ShowColumnsOperation showColumnsOperation = (ShowColumnsOperation) operation;
        Optional<ContextResolvedTable> result = catalogManager.getTable(showColumnsOperation.getTableIdentifier());
        if (result.isPresent()) {
            return buildShowColumnsResult(result.get().getResolvedSchema(), showColumnsOperation);
        } else {
            throw new ValidationException(String.format("Tables or views with the identifier '%s' doesn't exist.", showColumnsOperation.getTableIdentifier().asSummaryString()));
        }
    } else if (operation instanceof ShowPartitionsOperation) {
        String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
        try {
            ShowPartitionsOperation showPartitionsOperation = (ShowPartitionsOperation) operation;
            Catalog catalog = getCatalogOrThrowException(showPartitionsOperation.getTableIdentifier().getCatalogName());
            ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath();
            CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec();
            List<CatalogPartitionSpec> partitionSpecs = partitionSpec == null ? catalog.listPartitions(tablePath) : catalog.listPartitions(tablePath, partitionSpec);
            List<String> partitionNames = new ArrayList<>(partitionSpecs.size());
            for (CatalogPartitionSpec spec : partitionSpecs) {
                List<String> partitionKVs = new ArrayList<>(spec.getPartitionSpec().size());
                for (Map.Entry<String, String> partitionKV : spec.getPartitionSpec().entrySet()) {
                    partitionKVs.add(partitionKV.getKey() + "=" + partitionKV.getValue());
                }
                partitionNames.add(String.join("/", partitionKVs));
            }
            return buildShowResult("partition name", partitionNames.toArray(new String[0]));
        } catch (TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof ExplainOperation) {
        ExplainOperation explainOperation = (ExplainOperation) operation;
        ExplainDetail[] explainDetails = explainOperation.getExplainDetails().stream().map(ExplainDetail::valueOf).toArray(ExplainDetail[]::new);
        Operation child = ((ExplainOperation) operation).getChild();
        List<Operation> operations;
        if (child instanceof StatementSetOperation) {
            operations = new ArrayList<>(((StatementSetOperation) child).getOperations());
        } else {
            operations = Collections.singletonList(child);
        }
        String explanation = explainInternal(operations, explainDetails);
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(explanation))).build();
    } else if (operation instanceof DescribeTableOperation) {
        DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation;
        Optional<ContextResolvedTable> result = catalogManager.getTable(describeTableOperation.getSqlIdentifier());
        if (result.isPresent()) {
            return buildDescribeResult(result.get().getResolvedSchema());
        } else {
            throw new ValidationException(String.format("Tables or views with the identifier '%s' doesn't exist", describeTableOperation.getSqlIdentifier().asSummaryString()));
        }
    } else if (operation instanceof QueryOperation) {
        return executeQueryOperation((QueryOperation) operation);
    } else if (operation instanceof CreateTableASOperation) {
        CreateTableASOperation createTableASOperation = (CreateTableASOperation) operation;
        executeInternal(createTableASOperation.getCreateTableOperation());
        return executeInternal(createTableASOperation.toSinkModifyOperation(catalogManager));
    } else if (operation instanceof ExecutePlanOperation) {
        ExecutePlanOperation executePlanOperation = (ExecutePlanOperation) operation;
        return (TableResultInternal) executePlan(PlanReference.fromFile(executePlanOperation.getFilePath()));
    } else if (operation instanceof CompilePlanOperation) {
        CompilePlanOperation compilePlanOperation = (CompilePlanOperation) operation;
        compilePlanAndWrite(compilePlanOperation.getFilePath(), compilePlanOperation.isIfNotExists(), compilePlanOperation.getOperation());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof CompileAndExecutePlanOperation) {
        CompileAndExecutePlanOperation compileAndExecutePlanOperation = (CompileAndExecutePlanOperation) operation;
        CompiledPlan compiledPlan = compilePlanAndWrite(compileAndExecutePlanOperation.getFilePath(), true, compileAndExecutePlanOperation.getOperation());
        return (TableResultInternal) executePlan(compiledPlan);
    } else if (operation instanceof NopOperation) {
        return TableResultImpl.TABLE_RESULT_OK;
    } else {
        throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
    }
}
Also used : NopOperation(org.apache.flink.table.operations.NopOperation) AlterViewOperation(org.apache.flink.table.operations.ddl.AlterViewOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) ShowCurrentCatalogOperation(org.apache.flink.table.operations.ShowCurrentCatalogOperation) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) ArrayList(java.util.ArrayList) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) ExecutePlanOperation(org.apache.flink.table.operations.command.ExecutePlanOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ArrayList(java.util.ArrayList) List(java.util.List) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) CompiledPlan(org.apache.flink.table.api.CompiledPlan) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) Optional(java.util.Optional) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) ExplainDetail(org.apache.flink.table.api.ExplainDetail) ShowCurrentDatabaseOperation(org.apache.flink.table.operations.ShowCurrentDatabaseOperation) AlterTableOperation(org.apache.flink.table.operations.ddl.AlterTableOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) CollectModifyOperation(org.apache.flink.table.operations.CollectModifyOperation) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) Map(java.util.Map) HashMap(java.util.HashMap) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) ShowCurrentDatabaseOperation(org.apache.flink.table.operations.ShowCurrentDatabaseOperation) ExecutePlanOperation(org.apache.flink.table.operations.command.ExecutePlanOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) ShowColumnsOperation(org.apache.flink.table.operations.ShowColumnsOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) CompilePlanOperation(org.apache.flink.table.operations.ddl.CompilePlanOperation) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) CollectModifyOperation(org.apache.flink.table.operations.CollectModifyOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowCurrentCatalogOperation(org.apache.flink.table.operations.ShowCurrentCatalogOperation) AlterTableOperation(org.apache.flink.table.operations.ddl.AlterTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) NopOperation(org.apache.flink.table.operations.NopOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) AlterViewOperation(org.apache.flink.table.operations.ddl.AlterViewOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) TableSourceQueryOperation(org.apache.flink.table.operations.TableSourceQueryOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ShowColumnsOperation(org.apache.flink.table.operations.ShowColumnsOperation) CompilePlanOperation(org.apache.flink.table.operations.ddl.CompilePlanOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) TableSourceQueryOperation(org.apache.flink.table.operations.TableSourceQueryOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) TableException(org.apache.flink.table.api.TableException) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TableException(org.apache.flink.table.api.TableException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) DatabaseNotEmptyException(org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) SqlParserException(org.apache.flink.table.api.SqlParserException) ValidationException(org.apache.flink.table.api.ValidationException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation)

Example 3 with AlterTableSchemaOperation

use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.

the class OperationConverterUtils method convertChangeColumn.

public static Operation convertChangeColumn(ObjectIdentifier tableIdentifier, SqlChangeColumn changeColumn, CatalogTable catalogTable, SqlValidator sqlValidator) {
    String oldName = changeColumn.getOldName().getSimple();
    if (catalogTable.getPartitionKeys().indexOf(oldName) >= 0) {
        // disallow changing partition columns
        throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
    }
    TableSchema oldSchema = catalogTable.getSchema();
    boolean first = changeColumn.isFirst();
    String after = changeColumn.getAfter() == null ? null : changeColumn.getAfter().getSimple();
    TableColumn newTableColumn = toTableColumn(changeColumn.getNewColumn(), sqlValidator);
    TableSchema newSchema = changeColumn(oldSchema, oldName, newTableColumn, first, after);
    Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
    newProperties.putAll(extractProperties(changeColumn.getProperties()));
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, catalogTable.getPartitionKeys(), newProperties, catalogTable.getComment()));
// TODO: handle watermark and constraints
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) TableColumn(org.apache.flink.table.api.TableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn)

Example 4 with AlterTableSchemaOperation

use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableChangeCol.

private Operation convertAlterTableChangeCol(CatalogBaseTable alteredTable, String[] qualified, HiveParserASTNode ast) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name]
    // [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = HiveParserBaseSemanticAnalyzer.getTypeStringFromAST((HiveParserASTNode) ast.getChild(2));
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveASTParser.StringLiteral:
                newComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getText());
                break;
            case HiveASTParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveASTParser.KW_FIRST:
                first = true;
                break;
            case HiveASTParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveASTParser.TOK_RESTRICT:
                break;
            default:
                throw new ValidationException("Unsupported token: " + child.getToken() + " for alter table");
        }
    }
    // Validate the operation of renaming a column name.
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new ValidationException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    String oldName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(oldColName);
    String newName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(newColName);
    if (oldTable.getPartitionKeys().contains(oldName)) {
        // disallow changing partition columns
        throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
    }
    TableSchema oldSchema = oldTable.getSchema();
    TableColumn newTableColumn = TableColumn.physical(newName, HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(newType)));
    TableSchema newSchema = OperationConverterUtils.changeColumn(oldSchema, oldName, newTableColumn, first, flagCol);
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 5 with AlterTableSchemaOperation

use of org.apache.flink.table.operations.ddl.AlterTableSchemaOperation in project flink by apache.

the class OperationConverterUtils method convertAddReplaceColumns.

public static Operation convertAddReplaceColumns(ObjectIdentifier tableIdentifier, SqlAddReplaceColumns addReplaceColumns, CatalogTable catalogTable, SqlValidator sqlValidator) {
    // This is only used by the Hive dialect at the moment. In Hive, only non-partition columns
    // can be
    // added/replaced and users will only define non-partition columns in the new column list.
    // Therefore, we require
    // that partitions columns must appear last in the schema (which is inline with Hive).
    // Otherwise, we won't be
    // able to determine the column positions after the non-partition columns are replaced.
    TableSchema oldSchema = catalogTable.getSchema();
    int numPartCol = catalogTable.getPartitionKeys().size();
    Set<String> lastCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount()).stream().map(TableColumn::getName).collect(Collectors.toSet());
    if (!lastCols.equals(new HashSet<>(catalogTable.getPartitionKeys()))) {
        throw new ValidationException("ADD/REPLACE COLUMNS on partitioned tables requires partition columns to appear last");
    }
    // set non-partition columns
    TableSchema.Builder builder = TableSchema.builder();
    if (!addReplaceColumns.isReplace()) {
        List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
        for (TableColumn column : nonPartCols) {
            builder.add(column);
        }
        setWatermarkAndPK(builder, catalogTable.getSchema());
    }
    for (SqlNode sqlNode : addReplaceColumns.getNewColumns()) {
        builder.add(toTableColumn((SqlTableColumn) sqlNode, sqlValidator));
    }
    // set partition columns
    List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
    for (TableColumn column : partCols) {
        builder.add(column);
    }
    // set properties
    Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
    newProperties.putAll(extractProperties(addReplaceColumns.getProperties()));
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), catalogTable.getPartitionKeys(), newProperties, catalogTable.getComment()));
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) TableColumn(org.apache.flink.table.api.TableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) HashSet(java.util.HashSet) SqlNode(org.apache.calcite.sql.SqlNode)

Aggregations

HashMap (java.util.HashMap)5 TableSchema (org.apache.flink.table.api.TableSchema)5 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)5 AlterTableSchemaOperation (org.apache.flink.table.operations.ddl.AlterTableSchemaOperation)5 TableColumn (org.apache.flink.table.api.TableColumn)4 ValidationException (org.apache.flink.table.api.ValidationException)4 CatalogTable (org.apache.flink.table.catalog.CatalogTable)3 LinkedHashMap (java.util.LinkedHashMap)2 SqlTableColumn (org.apache.flink.sql.parser.ddl.SqlTableColumn)2 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)2 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)2 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)2 ObjectPath (org.apache.flink.table.catalog.ObjectPath)2 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Optional (java.util.Optional)1 ExecutionException (java.util.concurrent.ExecutionException)1