use of org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc in project hive by apache.
the class CreateDatabaseHook method authorizeDDLWork.
@Override
protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException {
DDLDesc ddlDesc = work.getDDLDesc();
if (ddlDesc instanceof CreateDatabaseDesc) {
CreateDatabaseDesc createDb = (CreateDatabaseDesc) ddlDesc;
Database db = new Database(createDb.getName(), createDb.getComment(), createDb.getLocationUri(), createDb.getDatabaseProperties());
authorize(db, Privilege.CREATE);
}
}
use of org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc in project hive by apache.
the class LoadDatabase method createDbTask.
private Task<?> createDbTask(Database dbObj) throws MetaException {
// note that we do not set location - for repl load, we want that auto-created.
CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(dbObj.getName(), dbObj.getDescription(), getDbLocation(dbObj), getDbManagedLocation(dbObj), false, updateDbProps(dbObj, context.dumpDirectory));
// If it exists, we want this to be an error condition. Repl Load is not intended to replace a
// db.
// TODO: we might revisit this in create-drop-recreate cases, needs some thinking on.
DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), createDbDesc, true, (new Path(context.dumpDirectory)).getParent().toString(), this.metricCollector);
return TaskFactory.get(work, context.hiveConf);
}
use of org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc in project hive by apache.
the class CreateDatabaseHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
MetaData metaData;
try {
FileSystem fs = FileSystem.get(new Path(context.location).toUri(), context.hiveConf);
metaData = EximUtil.readMetaData(fs, new Path(context.location, EximUtil.METADATA_NAME));
} catch (IOException e) {
throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
}
Database db = metaData.getDatabase();
String destinationDBName = context.dbName == null ? db.getName() : context.dbName;
CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters());
Task<DDLWork> createDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
if (!db.getParameters().isEmpty()) {
AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec());
Task<DDLWork> alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
createDBTask.addDependentTask(alterDbProperties);
}
if (StringUtils.isNotEmpty(db.getOwnerName())) {
AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec());
Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
createDBTask.addDependentTask(alterDbTask);
}
updatedMetadata.set(context.dmd.getEventTo().toString(), destinationDBName, null, null);
return Collections.singletonList(createDBTask);
}
Aggregations