Search in sources :

Example 1 with AlterDatabaseOperation

use of org.apache.flink.table.operations.ddl.AlterDatabaseOperation in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseOwner.

private Operation convertAlterDatabaseOwner(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    PrincipalDesc principalDesc = HiveParserAuthorizationParseUtils.getPrincipalDesc((HiveParserASTNode) ast.getChild(1));
    // The syntax should not allow these fields to be null, but lets verify
    String nullCmdMsg = "can't be null in alter database set owner command";
    if (principalDesc.getName() == null) {
        throw new ValidationException("Owner name " + nullCmdMsg);
    }
    if (principalDesc.getType() == null) {
        throw new ValidationException("Owner type " + nullCmdMsg);
    }
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_OWNER.name());
    props.put(DATABASE_OWNER_NAME, principalDesc.getName());
    props.put(DATABASE_OWNER_TYPE, principalDesc.getType().name().toLowerCase());
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) PrincipalDesc(org.apache.hadoop.hive.ql.plan.PrincipalDesc) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 2 with AlterDatabaseOperation

use of org.apache.flink.table.operations.ddl.AlterDatabaseOperation in project flink by apache.

the class TableEnvironmentImpl method executeInternal.

@Override
public TableResultInternal executeInternal(Operation operation) {
    if (operation instanceof ModifyOperation) {
        return executeInternal(Collections.singletonList((ModifyOperation) operation));
    } else if (operation instanceof StatementSetOperation) {
        return executeInternal(((StatementSetOperation) operation).getOperations());
    } else if (operation instanceof CreateTableOperation) {
        CreateTableOperation createTableOperation = (CreateTableOperation) operation;
        if (createTableOperation.isTemporary()) {
            catalogManager.createTemporaryTable(createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists());
        } else {
            catalogManager.createTable(createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof DropTableOperation) {
        DropTableOperation dropTableOperation = (DropTableOperation) operation;
        if (dropTableOperation.isTemporary()) {
            catalogManager.dropTemporaryTable(dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
        } else {
            catalogManager.dropTable(dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof AlterTableOperation) {
        AlterTableOperation alterTableOperation = (AlterTableOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterTableOperation.getTableIdentifier().getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString());
        try {
            if (alterTableOperation instanceof AlterTableRenameOperation) {
                AlterTableRenameOperation alterTableRenameOp = (AlterTableRenameOperation) operation;
                catalog.renameTable(alterTableRenameOp.getTableIdentifier().toObjectPath(), alterTableRenameOp.getNewTableIdentifier().getObjectName(), false);
            } else if (alterTableOperation instanceof AlterTableOptionsOperation) {
                AlterTableOptionsOperation alterTablePropertiesOp = (AlterTableOptionsOperation) operation;
                catalogManager.alterTable(alterTablePropertiesOp.getCatalogTable(), alterTablePropertiesOp.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterTableAddConstraintOperation) {
                AlterTableAddConstraintOperation addConstraintOP = (AlterTableAddConstraintOperation) operation;
                CatalogTable oriTable = catalogManager.getTable(addConstraintOP.getTableIdentifier()).get().getTable();
                TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriTable.getSchema());
                if (addConstraintOP.getConstraintName().isPresent()) {
                    builder.primaryKey(addConstraintOP.getConstraintName().get(), addConstraintOP.getColumnNames());
                } else {
                    builder.primaryKey(addConstraintOP.getColumnNames());
                }
                CatalogTable newTable = new CatalogTableImpl(builder.build(), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment());
                catalogManager.alterTable(newTable, addConstraintOP.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterTableDropConstraintOperation) {
                AlterTableDropConstraintOperation dropConstraintOperation = (AlterTableDropConstraintOperation) operation;
                CatalogTable oriTable = catalogManager.getTable(dropConstraintOperation.getTableIdentifier()).get().getTable();
                CatalogTable newTable = new CatalogTableImpl(TableSchemaUtils.dropConstraint(oriTable.getSchema(), dropConstraintOperation.getConstraintName()), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment());
                catalogManager.alterTable(newTable, dropConstraintOperation.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) {
                AlterPartitionPropertiesOperation alterPartPropsOp = (AlterPartitionPropertiesOperation) operation;
                catalog.alterPartition(alterPartPropsOp.getTableIdentifier().toObjectPath(), alterPartPropsOp.getPartitionSpec(), alterPartPropsOp.getCatalogPartition(), false);
            } else if (alterTableOperation instanceof AlterTableSchemaOperation) {
                AlterTableSchemaOperation alterTableSchemaOperation = (AlterTableSchemaOperation) alterTableOperation;
                catalogManager.alterTable(alterTableSchemaOperation.getCatalogTable(), alterTableSchemaOperation.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AddPartitionsOperation) {
                AddPartitionsOperation addPartitionsOperation = (AddPartitionsOperation) alterTableOperation;
                List<CatalogPartitionSpec> specs = addPartitionsOperation.getPartitionSpecs();
                List<CatalogPartition> partitions = addPartitionsOperation.getCatalogPartitions();
                boolean ifNotExists = addPartitionsOperation.ifNotExists();
                ObjectPath tablePath = addPartitionsOperation.getTableIdentifier().toObjectPath();
                for (int i = 0; i < specs.size(); i++) {
                    catalog.createPartition(tablePath, specs.get(i), partitions.get(i), ifNotExists);
                }
            } else if (alterTableOperation instanceof DropPartitionsOperation) {
                DropPartitionsOperation dropPartitionsOperation = (DropPartitionsOperation) alterTableOperation;
                ObjectPath tablePath = dropPartitionsOperation.getTableIdentifier().toObjectPath();
                boolean ifExists = dropPartitionsOperation.ifExists();
                for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) {
                    catalog.dropPartition(tablePath, spec, ifExists);
                }
            }
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (TableAlreadyExistException | TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateViewOperation) {
        CreateViewOperation createViewOperation = (CreateViewOperation) operation;
        if (createViewOperation.isTemporary()) {
            catalogManager.createTemporaryTable(createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists());
        } else {
            catalogManager.createTable(createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof DropViewOperation) {
        DropViewOperation dropViewOperation = (DropViewOperation) operation;
        if (dropViewOperation.isTemporary()) {
            catalogManager.dropTemporaryView(dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
        } else {
            catalogManager.dropView(dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof AlterViewOperation) {
        AlterViewOperation alterViewOperation = (AlterViewOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterViewOperation.getViewIdentifier().getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString());
        try {
            if (alterViewOperation instanceof AlterViewRenameOperation) {
                AlterViewRenameOperation alterTableRenameOp = (AlterViewRenameOperation) operation;
                catalog.renameTable(alterTableRenameOp.getViewIdentifier().toObjectPath(), alterTableRenameOp.getNewViewIdentifier().getObjectName(), false);
            } else if (alterViewOperation instanceof AlterViewPropertiesOperation) {
                AlterViewPropertiesOperation alterTablePropertiesOp = (AlterViewPropertiesOperation) operation;
                catalogManager.alterTable(alterTablePropertiesOp.getCatalogView(), alterTablePropertiesOp.getViewIdentifier(), false);
            } else if (alterViewOperation instanceof AlterViewAsOperation) {
                AlterViewAsOperation alterViewAsOperation = (AlterViewAsOperation) alterViewOperation;
                catalogManager.alterTable(alterViewAsOperation.getNewView(), alterViewAsOperation.getViewIdentifier(), false);
            }
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (TableAlreadyExistException | TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateDatabaseOperation) {
        CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString());
        try {
            catalog.createDatabase(createDatabaseOperation.getDatabaseName(), createDatabaseOperation.getCatalogDatabase(), createDatabaseOperation.isIgnoreIfExists());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseAlreadyExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof DropDatabaseOperation) {
        DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString());
        try {
            catalog.dropDatabase(dropDatabaseOperation.getDatabaseName(), dropDatabaseOperation.isIfExists(), dropDatabaseOperation.isCascade());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseNotExistException | DatabaseNotEmptyException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof AlterDatabaseOperation) {
        AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString());
        try {
            catalog.alterDatabase(alterDatabaseOperation.getDatabaseName(), alterDatabaseOperation.getCatalogDatabase(), false);
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateCatalogFunctionOperation) {
        return createCatalogFunction((CreateCatalogFunctionOperation) operation);
    } else if (operation instanceof CreateTempSystemFunctionOperation) {
        return createSystemFunction((CreateTempSystemFunctionOperation) operation);
    } else if (operation instanceof DropCatalogFunctionOperation) {
        return dropCatalogFunction((DropCatalogFunctionOperation) operation);
    } else if (operation instanceof DropTempSystemFunctionOperation) {
        return dropSystemFunction((DropTempSystemFunctionOperation) operation);
    } else if (operation instanceof AlterCatalogFunctionOperation) {
        return alterCatalogFunction((AlterCatalogFunctionOperation) operation);
    } else if (operation instanceof CreateCatalogOperation) {
        return createCatalog((CreateCatalogOperation) operation);
    } else if (operation instanceof DropCatalogOperation) {
        DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation;
        String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString());
        try {
            catalogManager.unregisterCatalog(dropCatalogOperation.getCatalogName(), dropCatalogOperation.isIfExists());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (CatalogException e) {
            throw new ValidationException(exMsg, e);
        }
    } else if (operation instanceof LoadModuleOperation) {
        return loadModule((LoadModuleOperation) operation);
    } else if (operation instanceof UnloadModuleOperation) {
        return unloadModule((UnloadModuleOperation) operation);
    } else if (operation instanceof UseModulesOperation) {
        return useModules((UseModulesOperation) operation);
    } else if (operation instanceof UseCatalogOperation) {
        UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation;
        catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof UseDatabaseOperation) {
        UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation;
        catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName());
        catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof ShowCatalogsOperation) {
        return buildShowResult("catalog name", listCatalogs());
    } else if (operation instanceof ShowCreateTableOperation) {
        ShowCreateTableOperation showCreateTableOperation = (ShowCreateTableOperation) operation;
        ContextResolvedTable table = catalogManager.getTable(showCreateTableOperation.getTableIdentifier()).orElseThrow(() -> new ValidationException(String.format("Could not execute SHOW CREATE TABLE. Table with identifier %s does not exist.", showCreateTableOperation.getTableIdentifier().asSerializableString())));
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(ShowCreateUtil.buildShowCreateTableRow(table.getResolvedTable(), showCreateTableOperation.getTableIdentifier(), table.isTemporary())))).build();
    } else if (operation instanceof ShowCreateViewOperation) {
        ShowCreateViewOperation showCreateViewOperation = (ShowCreateViewOperation) operation;
        final ContextResolvedTable table = catalogManager.getTable(showCreateViewOperation.getViewIdentifier()).orElseThrow(() -> new ValidationException(String.format("Could not execute SHOW CREATE VIEW. View with identifier %s does not exist.", showCreateViewOperation.getViewIdentifier().asSerializableString())));
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(ShowCreateUtil.buildShowCreateViewRow(table.getResolvedTable(), showCreateViewOperation.getViewIdentifier(), table.isTemporary())))).build();
    } else if (operation instanceof ShowCurrentCatalogOperation) {
        return buildShowResult("current catalog name", new String[] { catalogManager.getCurrentCatalog() });
    } else if (operation instanceof ShowDatabasesOperation) {
        return buildShowResult("database name", listDatabases());
    } else if (operation instanceof ShowCurrentDatabaseOperation) {
        return buildShowResult("current database name", new String[] { catalogManager.getCurrentDatabase() });
    } else if (operation instanceof ShowModulesOperation) {
        ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation;
        if (showModulesOperation.requireFull()) {
            return buildShowFullModulesResult(listFullModules());
        } else {
            return buildShowResult("module name", listModules());
        }
    } else if (operation instanceof ShowTablesOperation) {
        return buildShowResult("table name", listTables());
    } else if (operation instanceof ShowFunctionsOperation) {
        ShowFunctionsOperation showFunctionsOperation = (ShowFunctionsOperation) operation;
        String[] functionNames = null;
        ShowFunctionsOperation.FunctionScope functionScope = showFunctionsOperation.getFunctionScope();
        switch(functionScope) {
            case USER:
                functionNames = listUserDefinedFunctions();
                break;
            case ALL:
                functionNames = listFunctions();
                break;
            default:
                throw new UnsupportedOperationException(String.format("SHOW FUNCTIONS with %s scope is not supported.", functionScope));
        }
        return buildShowResult("function name", functionNames);
    } else if (operation instanceof ShowViewsOperation) {
        return buildShowResult("view name", listViews());
    } else if (operation instanceof ShowColumnsOperation) {
        ShowColumnsOperation showColumnsOperation = (ShowColumnsOperation) operation;
        Optional<ContextResolvedTable> result = catalogManager.getTable(showColumnsOperation.getTableIdentifier());
        if (result.isPresent()) {
            return buildShowColumnsResult(result.get().getResolvedSchema(), showColumnsOperation);
        } else {
            throw new ValidationException(String.format("Tables or views with the identifier '%s' doesn't exist.", showColumnsOperation.getTableIdentifier().asSummaryString()));
        }
    } else if (operation instanceof ShowPartitionsOperation) {
        String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
        try {
            ShowPartitionsOperation showPartitionsOperation = (ShowPartitionsOperation) operation;
            Catalog catalog = getCatalogOrThrowException(showPartitionsOperation.getTableIdentifier().getCatalogName());
            ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath();
            CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec();
            List<CatalogPartitionSpec> partitionSpecs = partitionSpec == null ? catalog.listPartitions(tablePath) : catalog.listPartitions(tablePath, partitionSpec);
            List<String> partitionNames = new ArrayList<>(partitionSpecs.size());
            for (CatalogPartitionSpec spec : partitionSpecs) {
                List<String> partitionKVs = new ArrayList<>(spec.getPartitionSpec().size());
                for (Map.Entry<String, String> partitionKV : spec.getPartitionSpec().entrySet()) {
                    partitionKVs.add(partitionKV.getKey() + "=" + partitionKV.getValue());
                }
                partitionNames.add(String.join("/", partitionKVs));
            }
            return buildShowResult("partition name", partitionNames.toArray(new String[0]));
        } catch (TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof ExplainOperation) {
        ExplainOperation explainOperation = (ExplainOperation) operation;
        ExplainDetail[] explainDetails = explainOperation.getExplainDetails().stream().map(ExplainDetail::valueOf).toArray(ExplainDetail[]::new);
        Operation child = ((ExplainOperation) operation).getChild();
        List<Operation> operations;
        if (child instanceof StatementSetOperation) {
            operations = new ArrayList<>(((StatementSetOperation) child).getOperations());
        } else {
            operations = Collections.singletonList(child);
        }
        String explanation = explainInternal(operations, explainDetails);
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(explanation))).build();
    } else if (operation instanceof DescribeTableOperation) {
        DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation;
        Optional<ContextResolvedTable> result = catalogManager.getTable(describeTableOperation.getSqlIdentifier());
        if (result.isPresent()) {
            return buildDescribeResult(result.get().getResolvedSchema());
        } else {
            throw new ValidationException(String.format("Tables or views with the identifier '%s' doesn't exist", describeTableOperation.getSqlIdentifier().asSummaryString()));
        }
    } else if (operation instanceof QueryOperation) {
        return executeQueryOperation((QueryOperation) operation);
    } else if (operation instanceof CreateTableASOperation) {
        CreateTableASOperation createTableASOperation = (CreateTableASOperation) operation;
        executeInternal(createTableASOperation.getCreateTableOperation());
        return executeInternal(createTableASOperation.toSinkModifyOperation(catalogManager));
    } else if (operation instanceof ExecutePlanOperation) {
        ExecutePlanOperation executePlanOperation = (ExecutePlanOperation) operation;
        return (TableResultInternal) executePlan(PlanReference.fromFile(executePlanOperation.getFilePath()));
    } else if (operation instanceof CompilePlanOperation) {
        CompilePlanOperation compilePlanOperation = (CompilePlanOperation) operation;
        compilePlanAndWrite(compilePlanOperation.getFilePath(), compilePlanOperation.isIfNotExists(), compilePlanOperation.getOperation());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof CompileAndExecutePlanOperation) {
        CompileAndExecutePlanOperation compileAndExecutePlanOperation = (CompileAndExecutePlanOperation) operation;
        CompiledPlan compiledPlan = compilePlanAndWrite(compileAndExecutePlanOperation.getFilePath(), true, compileAndExecutePlanOperation.getOperation());
        return (TableResultInternal) executePlan(compiledPlan);
    } else if (operation instanceof NopOperation) {
        return TableResultImpl.TABLE_RESULT_OK;
    } else {
        throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
    }
}
Also used : NopOperation(org.apache.flink.table.operations.NopOperation) AlterViewOperation(org.apache.flink.table.operations.ddl.AlterViewOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) ShowCurrentCatalogOperation(org.apache.flink.table.operations.ShowCurrentCatalogOperation) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) ArrayList(java.util.ArrayList) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) ExecutePlanOperation(org.apache.flink.table.operations.command.ExecutePlanOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ArrayList(java.util.ArrayList) List(java.util.List) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) CompiledPlan(org.apache.flink.table.api.CompiledPlan) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) Optional(java.util.Optional) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) ExplainDetail(org.apache.flink.table.api.ExplainDetail) ShowCurrentDatabaseOperation(org.apache.flink.table.operations.ShowCurrentDatabaseOperation) AlterTableOperation(org.apache.flink.table.operations.ddl.AlterTableOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) CollectModifyOperation(org.apache.flink.table.operations.CollectModifyOperation) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) Map(java.util.Map) HashMap(java.util.HashMap) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) ShowCurrentDatabaseOperation(org.apache.flink.table.operations.ShowCurrentDatabaseOperation) ExecutePlanOperation(org.apache.flink.table.operations.command.ExecutePlanOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) ShowColumnsOperation(org.apache.flink.table.operations.ShowColumnsOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) CompilePlanOperation(org.apache.flink.table.operations.ddl.CompilePlanOperation) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) CollectModifyOperation(org.apache.flink.table.operations.CollectModifyOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowCurrentCatalogOperation(org.apache.flink.table.operations.ShowCurrentCatalogOperation) AlterTableOperation(org.apache.flink.table.operations.ddl.AlterTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) NopOperation(org.apache.flink.table.operations.NopOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) AlterViewOperation(org.apache.flink.table.operations.ddl.AlterViewOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) TableSourceQueryOperation(org.apache.flink.table.operations.TableSourceQueryOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ShowColumnsOperation(org.apache.flink.table.operations.ShowColumnsOperation) CompilePlanOperation(org.apache.flink.table.operations.ddl.CompilePlanOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) TableSourceQueryOperation(org.apache.flink.table.operations.TableSourceQueryOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) TableException(org.apache.flink.table.api.TableException) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TableException(org.apache.flink.table.api.TableException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) DatabaseNotEmptyException(org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) SqlParserException(org.apache.flink.table.api.SqlParserException) ValidationException(org.apache.flink.table.api.ValidationException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation)

Example 3 with AlterDatabaseOperation

use of org.apache.flink.table.operations.ddl.AlterDatabaseOperation in project flink by apache.

the class SqlToOperationConverter method convertAlterDatabase.

/**
 * Convert ALTER DATABASE statement.
 */
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
    String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
    if (fullDatabaseName.length > 2) {
        throw new ValidationException("alter database identifier format error");
    }
    String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
    String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
    final Map<String, String> properties;
    CatalogDatabase originCatalogDatabase;
    Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
    if (catalog.isPresent()) {
        try {
            originCatalogDatabase = catalog.get().getDatabase(databaseName);
            properties = new HashMap<>(originCatalogDatabase.getProperties());
        } catch (DatabaseNotExistException e) {
            throw new ValidationException(String.format("Database %s not exists", databaseName), e);
        }
    } else {
        throw new ValidationException(String.format("Catalog %s not exists", catalogName));
    }
    // set with properties
    sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
    return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) SqlTableOption(org.apache.flink.sql.parser.ddl.SqlTableOption) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) SqlShowCurrentCatalog(org.apache.flink.sql.parser.dql.SqlShowCurrentCatalog) Catalog(org.apache.flink.table.catalog.Catalog) SqlUseCatalog(org.apache.flink.sql.parser.ddl.SqlUseCatalog) SqlDropCatalog(org.apache.flink.sql.parser.ddl.SqlDropCatalog) SqlCreateCatalog(org.apache.flink.sql.parser.ddl.SqlCreateCatalog) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 4 with AlterDatabaseOperation

use of org.apache.flink.table.operations.ddl.AlterDatabaseOperation in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseLocation.

private Operation convertAlterDatabaseLocation(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    String newLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_LOCATION.name());
    props.put(DATABASE_LOCATION_URI, newLocation);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 5 with AlterDatabaseOperation

use of org.apache.flink.table.operations.ddl.AlterDatabaseOperation in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseProperties.

private Operation convertAlterDatabaseProperties(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        if (childNode.getToken().getType() == HiveASTParser.TOK_DATABASEPROPERTIES) {
            dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
        } else {
            throw new ValidationException("Unknown AST node for ALTER DATABASE PROPERTIES: " + childNode);
        }
    }
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name());
    props.putAll(dbProps);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Aggregations

AlterDatabaseOperation (org.apache.flink.table.operations.ddl.AlterDatabaseOperation)9 HashMap (java.util.HashMap)5 CatalogDatabaseImpl (org.apache.flink.table.catalog.CatalogDatabaseImpl)5 IOException (java.io.IOException)4 TableException (org.apache.flink.table.api.TableException)4 ValidationException (org.apache.flink.table.api.ValidationException)4 CatalogDatabase (org.apache.flink.table.catalog.CatalogDatabase)4 ExplainOperation (org.apache.flink.table.operations.ExplainOperation)4 Operation (org.apache.flink.table.operations.Operation)4 QueryOperation (org.apache.flink.table.operations.QueryOperation)4 ShowFunctionsOperation (org.apache.flink.table.operations.ShowFunctionsOperation)4 UseCatalogOperation (org.apache.flink.table.operations.UseCatalogOperation)4 UseDatabaseOperation (org.apache.flink.table.operations.UseDatabaseOperation)4 CreateDatabaseOperation (org.apache.flink.table.operations.ddl.CreateDatabaseOperation)4 CreateTableOperation (org.apache.flink.table.operations.ddl.CreateTableOperation)4 CreateViewOperation (org.apache.flink.table.operations.ddl.CreateViewOperation)4 DropDatabaseOperation (org.apache.flink.table.operations.ddl.DropDatabaseOperation)4 LinkedHashMap (java.util.LinkedHashMap)3 CatalogSinkModifyOperation (org.apache.flink.table.operations.CatalogSinkModifyOperation)3 DescribeTableOperation (org.apache.flink.table.operations.DescribeTableOperation)3