use of org.apache.flink.table.catalog.CatalogDatabase in project flink by apache.
the class SqlToOperationConverter method convertAlterDatabase.
/**
* Convert ALTER DATABASE statement.
*/
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("alter database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
final Map<String, String> properties;
CatalogDatabase originCatalogDatabase;
Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
if (catalog.isPresent()) {
try {
originCatalogDatabase = catalog.get().getDatabase(databaseName);
properties = new HashMap<>(originCatalogDatabase.getProperties());
} catch (DatabaseNotExistException e) {
throw new ValidationException(String.format("Database %s not exists", databaseName), e);
}
} else {
throw new ValidationException(String.format("Catalog %s not exists", catalogName));
}
// set with properties
sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseOwner.
private Operation convertAlterDatabaseOwner(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
PrincipalDesc principalDesc = HiveParserAuthorizationParseUtils.getPrincipalDesc((HiveParserASTNode) ast.getChild(1));
// The syntax should not allow these fields to be null, but lets verify
String nullCmdMsg = "can't be null in alter database set owner command";
if (principalDesc.getName() == null) {
throw new ValidationException("Owner name " + nullCmdMsg);
}
if (principalDesc.getType() == null) {
throw new ValidationException("Owner type " + nullCmdMsg);
}
CatalogDatabase originDB = getDatabase(dbName);
Map<String, String> props = new HashMap<>(originDB.getProperties());
props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_OWNER.name());
props.put(DATABASE_OWNER_NAME, principalDesc.getName());
props.put(DATABASE_OWNER_TYPE, principalDesc.getType().name().toLowerCase());
CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.
private Operation convertCreateDatabase(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
boolean ifNotExists = false;
String dbComment = null;
String dbLocation = null;
Map<String, String> dbProps = null;
for (int i = 1; i < ast.getChildCount(); i++) {
HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
switch(childNode.getToken().getType()) {
case HiveASTParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveASTParser.TOK_DATABASECOMMENT:
dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
break;
case HiveASTParser.TOK_DATABASEPROPERTIES:
dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
break;
case HiveASTParser.TOK_DATABASELOCATION:
dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
break;
default:
throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
}
}
Map<String, String> props = new HashMap<>();
if (dbProps != null) {
props.putAll(dbProps);
}
if (dbLocation != null) {
props.put(DATABASE_LOCATION_URI, dbLocation);
}
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseProperties.
private Operation convertAlterDatabaseProperties(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
Map<String, String> dbProps = null;
for (int i = 1; i < ast.getChildCount(); i++) {
HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
if (childNode.getToken().getType() == HiveASTParser.TOK_DATABASEPROPERTIES) {
dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
} else {
throw new ValidationException("Unknown AST node for ALTER DATABASE PROPERTIES: " + childNode);
}
}
CatalogDatabase originDB = getDatabase(dbName);
Map<String, String> props = new HashMap<>(originDB.getProperties());
props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name());
props.putAll(dbProps);
CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseLocation.
private Operation convertAlterDatabaseLocation(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
String newLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
CatalogDatabase originDB = getDatabase(dbName);
Map<String, String> props = new HashMap<>(originDB.getProperties());
props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_LOCATION.name());
props.put(DATABASE_LOCATION_URI, newLocation);
CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Aggregations