use of org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc in project hive by apache.
the class AlterDatabaseHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
AlterDatabaseMessage msg = deserializer.getAlterDatabaseMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
try {
Database oldDb = msg.getDbObjBefore();
Database newDb = msg.getDbObjAfter();
AbstractAlterDatabaseDesc alterDbDesc;
if ((oldDb.getOwnerType() == newDb.getOwnerType()) && oldDb.getOwnerName().equalsIgnoreCase(newDb.getOwnerName())) {
// If owner information is unchanged, then DB properties would've changed
Map<String, String> newDbProps = new HashMap<>();
Map<String, String> dbProps = newDb.getParameters();
for (Map.Entry<String, String> entry : dbProps.entrySet()) {
String key = entry.getKey();
// Ignore the keys which are local to source warehouse
if (key.startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString()) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID_TARGET.toString()) || key.equals(ReplUtils.REPL_CHECKPOINT_KEY) || key.equals(ReplChangeManager.SOURCE_OF_REPLICATION) || key.equals(ReplUtils.REPL_FIRST_INC_PENDING_FLAG) || key.equals(ReplConst.REPL_FAILOVER_ENDPOINT)) {
continue;
}
newDbProps.put(key, entry.getValue());
}
alterDbDesc = new AlterDatabaseSetPropertiesDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec());
} else {
alterDbDesc = new AlterDatabaseSetOwnerDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec());
}
Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName);
// Only database object is updated
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
return Collections.singletonList(alterDbTask);
} catch (Exception e) {
throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
}
}
use of org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc in project hive by apache.
the class CreateDatabaseHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
MetaData metaData;
try {
FileSystem fs = FileSystem.get(new Path(context.location).toUri(), context.hiveConf);
metaData = EximUtil.readMetaData(fs, new Path(context.location, EximUtil.METADATA_NAME));
} catch (IOException e) {
throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
}
Database db = metaData.getDatabase();
String destinationDBName = context.dbName == null ? db.getName() : context.dbName;
CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters());
Task<DDLWork> createDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
if (!db.getParameters().isEmpty()) {
AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec());
Task<DDLWork> alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
createDBTask.addDependentTask(alterDbProperties);
}
if (StringUtils.isNotEmpty(db.getOwnerName())) {
AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec());
Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
createDBTask.addDependentTask(alterDbTask);
}
updatedMetadata.set(context.dmd.getEventTo().toString(), destinationDBName, null, null);
return Collections.singletonList(createDBTask);
}
use of org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc in project hive by apache.
the class LoadDatabase method setOwnerInfoTask.
private Task<?> setOwnerInfoTask(Database dbObj) {
AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null);
DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, (new Path(context.dumpDirectory)).getParent().toString(), this.metricCollector);
return TaskFactory.get(work, context.hiveConf);
}
Aggregations