use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method getDatabase.
private CatalogDatabase getDatabase(String databaseName) {
Catalog catalog = catalogManager.getCatalog(catalogManager.getCurrentCatalog()).get();
CatalogDatabase database;
try {
database = catalog.getDatabase(databaseName);
} catch (DatabaseNotExistException e) {
throw new ValidationException(String.format("Database %s not exists", databaseName), e);
}
return database;
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.
the class HiveCatalog method createDatabase.
@Override
public void createDatabase(String databaseName, CatalogDatabase database, boolean ignoreIfExists) throws DatabaseAlreadyExistException, CatalogException {
checkArgument(!isNullOrWhitespaceOnly(databaseName), "databaseName cannot be null or empty");
checkNotNull(database, "database cannot be null");
Map<String, String> properties = database.getProperties();
String dbLocationUri = properties.remove(SqlCreateHiveDatabase.DATABASE_LOCATION_URI);
Database hiveDatabase = new Database(databaseName, database.getComment(), dbLocationUri, properties);
try {
client.createDatabase(hiveDatabase);
} catch (AlreadyExistsException e) {
if (!ignoreIfExists) {
throw new DatabaseAlreadyExistException(getName(), hiveDatabase.getName());
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to create database %s", hiveDatabase.getName()), e);
}
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink-mirror by flink-ci.
the class SqlToOperationConverter method convertCreateDatabase.
/**
* Convert CREATE DATABASE statement.
*/
private Operation convertCreateDatabase(SqlCreateDatabase sqlCreateDatabase) {
String[] fullDatabaseName = sqlCreateDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("create database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
boolean ignoreIfExists = sqlCreateDatabase.isIfNotExists();
String databaseComment = sqlCreateDatabase.getComment().map(comment -> comment.getNlsString().getValue()).orElse(null);
// set with properties
Map<String, String> properties = new HashMap<>();
sqlCreateDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, databaseComment);
return new CreateDatabaseOperation(catalogName, databaseName, catalogDatabase, ignoreIfExists);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseOwner.
private Operation convertAlterDatabaseOwner(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
PrincipalDesc principalDesc = HiveParserAuthorizationParseUtils.getPrincipalDesc((HiveParserASTNode) ast.getChild(1));
// The syntax should not allow these fields to be null, but lets verify
String nullCmdMsg = "can't be null in alter database set owner command";
if (principalDesc.getName() == null) {
throw new ValidationException("Owner name " + nullCmdMsg);
}
if (principalDesc.getType() == null) {
throw new ValidationException("Owner type " + nullCmdMsg);
}
CatalogDatabase originDB = getDatabase(dbName);
Map<String, String> props = new HashMap<>(originDB.getProperties());
props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_OWNER.name());
props.put(DATABASE_OWNER_NAME, principalDesc.getName());
props.put(DATABASE_OWNER_TYPE, principalDesc.getType().name().toLowerCase());
CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
use of org.apache.flink.table.catalog.CatalogDatabase in project flink by splunk.
the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseLocation.
private Operation convertAlterDatabaseLocation(HiveParserASTNode ast) {
String dbName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
String newLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
CatalogDatabase originDB = getDatabase(dbName);
Map<String, String> props = new HashMap<>(originDB.getProperties());
props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_LOCATION.name());
props.put(DATABASE_LOCATION_URI, newLocation);
CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Aggregations