Search in sources :

Example 6 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink by apache.

the class SqlToOperationConverter method convertAlterDatabase.

/**
 * Convert ALTER DATABASE statement.
 */
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
    String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
    if (fullDatabaseName.length > 2) {
        throw new ValidationException("alter database identifier format error");
    }
    String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
    String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
    final Map<String, String> properties;
    CatalogDatabase originCatalogDatabase;
    Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
    if (catalog.isPresent()) {
        try {
            originCatalogDatabase = catalog.get().getDatabase(databaseName);
            properties = new HashMap<>(originCatalogDatabase.getProperties());
        } catch (DatabaseNotExistException e) {
            throw new ValidationException(String.format("Database %s not exists", databaseName), e);
        }
    } else {
        throw new ValidationException(String.format("Catalog %s not exists", catalogName));
    }
    // set with properties
    sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
    return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) SqlTableOption(org.apache.flink.sql.parser.ddl.SqlTableOption) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) SqlShowCurrentCatalog(org.apache.flink.sql.parser.dql.SqlShowCurrentCatalog) Catalog(org.apache.flink.table.catalog.Catalog) SqlUseCatalog(org.apache.flink.sql.parser.ddl.SqlUseCatalog) SqlDropCatalog(org.apache.flink.sql.parser.ddl.SqlDropCatalog) SqlCreateCatalog(org.apache.flink.sql.parser.ddl.SqlCreateCatalog) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 7 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseOwner.

private Operation convertAlterDatabaseOwner(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    PrincipalDesc principalDesc = HiveParserAuthorizationParseUtils.getPrincipalDesc((HiveParserASTNode) ast.getChild(1));
    // The syntax should not allow these fields to be null, but lets verify
    String nullCmdMsg = "can't be null in alter database set owner command";
    if (principalDesc.getName() == null) {
        throw new ValidationException("Owner name " + nullCmdMsg);
    }
    if (principalDesc.getType() == null) {
        throw new ValidationException("Owner type " + nullCmdMsg);
    }
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_OWNER.name());
    props.put(DATABASE_OWNER_NAME, principalDesc.getName());
    props.put(DATABASE_OWNER_TYPE, principalDesc.getType().name().toLowerCase());
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) PrincipalDesc(org.apache.hadoop.hive.ql.plan.PrincipalDesc) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 8 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.

private Operation convertCreateDatabase(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    boolean ifNotExists = false;
    String dbComment = null;
    String dbLocation = null;
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_DATABASECOMMENT:
                dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveASTParser.TOK_DATABASEPROPERTIES:
                dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
                break;
            case HiveASTParser.TOK_DATABASELOCATION:
                dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
        }
    }
    Map<String, String> props = new HashMap<>();
    if (dbProps != null) {
        props.putAll(dbProps);
    }
    if (dbLocation != null) {
        props.put(DATABASE_LOCATION_URI, dbLocation);
    }
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
    return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 9 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseProperties.

private Operation convertAlterDatabaseProperties(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        if (childNode.getToken().getType() == HiveASTParser.TOK_DATABASEPROPERTIES) {
            dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
        } else {
            throw new ValidationException("Unknown AST node for ALTER DATABASE PROPERTIES: " + childNode);
        }
    }
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name());
    props.putAll(dbProps);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 10 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseLocation.

private Operation convertAlterDatabaseLocation(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    String newLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_LOCATION.name());
    props.put(DATABASE_LOCATION_URI, newLocation);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Aggregations

CatalogDatabase (org.apache.flink.table.catalog.CatalogDatabase)31 CatalogDatabaseImpl (org.apache.flink.table.catalog.CatalogDatabaseImpl)22 HashMap (java.util.HashMap)18 ValidationException (org.apache.flink.table.api.ValidationException)18 LinkedHashMap (java.util.LinkedHashMap)15 AlterDatabaseOperation (org.apache.flink.table.operations.ddl.AlterDatabaseOperation)15 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)12 SqlAlterHiveDatabase (org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase)9 SqlCreateHiveDatabase (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase)9 Catalog (org.apache.flink.table.catalog.Catalog)9 Database (org.apache.hadoop.hive.metastore.api.Database)9 SqlCreateCatalog (org.apache.flink.sql.parser.ddl.SqlCreateCatalog)6 SqlDropCatalog (org.apache.flink.sql.parser.ddl.SqlDropCatalog)6 SqlTableOption (org.apache.flink.sql.parser.ddl.SqlTableOption)6 SqlUseCatalog (org.apache.flink.sql.parser.ddl.SqlUseCatalog)6 SqlShowCurrentCatalog (org.apache.flink.sql.parser.dql.SqlShowCurrentCatalog)6 CreateDatabaseOperation (org.apache.flink.table.operations.ddl.CreateDatabaseOperation)6 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)4 TException (org.apache.thrift.TException)4 ArrayList (java.util.ArrayList)3