Search in sources :

Example 21 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink by apache.

the class HiveCatalog method alterDatabase.

@Override
public void alterDatabase(String databaseName, CatalogDatabase newDatabase, boolean ignoreIfNotExists) throws DatabaseNotExistException, CatalogException {
    checkArgument(!isNullOrWhitespaceOnly(databaseName), "databaseName cannot be null or empty");
    checkNotNull(newDatabase, "newDatabase cannot be null");
    // client.alterDatabase doesn't throw any exception if there is no existing database
    Database hiveDB;
    try {
        hiveDB = getHiveDatabase(databaseName);
    } catch (DatabaseNotExistException e) {
        if (!ignoreIfNotExists) {
            throw new DatabaseNotExistException(getName(), databaseName);
        }
        return;
    }
    try {
        client.alterDatabase(databaseName, alterDatabase(hiveDB, newDatabase));
    } catch (TException e) {
        throw new CatalogException(String.format("Failed to alter database %s", databaseName), e);
    }
}
Also used : TException(org.apache.thrift.TException) CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) SqlAlterHiveDatabase(org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase) SqlCreateHiveDatabase(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase) Database(org.apache.hadoop.hive.metastore.api.Database) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)

Example 22 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseLocation.

private Operation convertAlterDatabaseLocation(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    String newLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_LOCATION.name());
    props.put(DATABASE_LOCATION_URI, newLocation);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 23 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.

private Operation convertCreateDatabase(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    boolean ifNotExists = false;
    String dbComment = null;
    String dbLocation = null;
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_DATABASECOMMENT:
                dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveASTParser.TOK_DATABASEPROPERTIES:
                dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
                break;
            case HiveASTParser.TOK_DATABASELOCATION:
                dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
        }
    }
    Map<String, String> props = new HashMap<>();
    if (dbProps != null) {
        props.putAll(dbProps);
    }
    if (dbLocation != null) {
        props.put(DATABASE_LOCATION_URI, dbLocation);
    }
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
    return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 24 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseProperties.

private Operation convertAlterDatabaseProperties(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        if (childNode.getToken().getType() == HiveASTParser.TOK_DATABASEPROPERTIES) {
            dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
        } else {
            throw new ValidationException("Unknown AST node for ALTER DATABASE PROPERTIES: " + childNode);
        }
    }
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name());
    props.putAll(dbProps);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 25 with CatalogDatabase

use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.

the class HiveCatalog method getDatabase.

// ------ databases ------
@Override
public CatalogDatabase getDatabase(String databaseName) throws DatabaseNotExistException, CatalogException {
    Database hiveDatabase = getHiveDatabase(databaseName);
    Map<String, String> properties = new HashMap<>(hiveDatabase.getParameters());
    properties.put(SqlCreateHiveDatabase.DATABASE_LOCATION_URI, hiveDatabase.getLocationUri());
    return new CatalogDatabaseImpl(properties, hiveDatabase.getDescription());
}
Also used : HashMap(java.util.HashMap) CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) SqlAlterHiveDatabase(org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase) SqlCreateHiveDatabase(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase) Database(org.apache.hadoop.hive.metastore.api.Database) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Aggregations

CatalogDatabase (org.apache.flink.table.catalog.CatalogDatabase)31 CatalogDatabaseImpl (org.apache.flink.table.catalog.CatalogDatabaseImpl)22 HashMap (java.util.HashMap)18 ValidationException (org.apache.flink.table.api.ValidationException)18 LinkedHashMap (java.util.LinkedHashMap)15 AlterDatabaseOperation (org.apache.flink.table.operations.ddl.AlterDatabaseOperation)15 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)12 SqlAlterHiveDatabase (org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveDatabase)9 SqlCreateHiveDatabase (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveDatabase)9 Catalog (org.apache.flink.table.catalog.Catalog)9 Database (org.apache.hadoop.hive.metastore.api.Database)9 SqlCreateCatalog (org.apache.flink.sql.parser.ddl.SqlCreateCatalog)6 SqlDropCatalog (org.apache.flink.sql.parser.ddl.SqlDropCatalog)6 SqlTableOption (org.apache.flink.sql.parser.ddl.SqlTableOption)6 SqlUseCatalog (org.apache.flink.sql.parser.ddl.SqlUseCatalog)6 SqlShowCurrentCatalog (org.apache.flink.sql.parser.dql.SqlShowCurrentCatalog)6 CreateDatabaseOperation (org.apache.flink.table.operations.ddl.CreateDatabaseOperation)6 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)4 TException (org.apache.thrift.TException)4 ArrayList (java.util.ArrayList)3