Search in sources :

Example 1 with AlterDatabaseSetOwnerDesc

use of org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc in project hive by apache.

the class AlterDatabaseHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AlterDatabaseMessage msg = deserializer.getAlterDatabaseMessage(context.dmd.getPayload());
    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    try {
        Database oldDb = msg.getDbObjBefore();
        Database newDb = msg.getDbObjAfter();
        AbstractAlterDatabaseDesc alterDbDesc;
        if ((oldDb.getOwnerType() == newDb.getOwnerType()) && oldDb.getOwnerName().equalsIgnoreCase(newDb.getOwnerName())) {
            // If owner information is unchanged, then DB properties would've changed
            Map<String, String> newDbProps = new HashMap<>();
            Map<String, String> dbProps = newDb.getParameters();
            for (Map.Entry<String, String> entry : dbProps.entrySet()) {
                String key = entry.getKey();
                // Ignore the keys which are local to source warehouse
                if (key.startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString()) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID_TARGET.toString()) || key.equals(ReplUtils.REPL_CHECKPOINT_KEY) || key.equals(ReplChangeManager.SOURCE_OF_REPLICATION) || key.equals(ReplUtils.REPL_FIRST_INC_PENDING_FLAG) || key.equals(ReplConst.REPL_FAILOVER_ENDPOINT)) {
                    continue;
                }
                newDbProps.put(key, entry.getValue());
            }
            alterDbDesc = new AlterDatabaseSetPropertiesDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec());
        } else {
            alterDbDesc = new AlterDatabaseSetOwnerDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec());
        }
        Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName);
        // Only database object is updated
        updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
        return Collections.singletonList(alterDbTask);
    } catch (Exception e) {
        throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
    }
}
Also used : AbstractAlterDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseDesc) HashMap(java.util.HashMap) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) AlterDatabaseMessage(org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) AlterDatabaseSetPropertiesDesc(org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc) AlterDatabaseSetOwnerDesc(org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc) HashMap(java.util.HashMap) Map(java.util.Map) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with AlterDatabaseSetOwnerDesc

use of org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc in project hive by apache.

the class CreateDatabaseHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    MetaData metaData;
    try {
        FileSystem fs = FileSystem.get(new Path(context.location).toUri(), context.hiveConf);
        metaData = EximUtil.readMetaData(fs, new Path(context.location, EximUtil.METADATA_NAME));
    } catch (IOException e) {
        throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
    }
    Database db = metaData.getDatabase();
    String destinationDBName = context.dbName == null ? db.getName() : context.dbName;
    CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters());
    Task<DDLWork> createDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    if (!db.getParameters().isEmpty()) {
        AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec());
        Task<DDLWork> alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        createDBTask.addDependentTask(alterDbProperties);
    }
    if (StringUtils.isNotEmpty(db.getOwnerName())) {
        AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec());
        Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        createDBTask.addDependentTask(alterDbTask);
    }
    updatedMetadata.set(context.dmd.getEventTo().toString(), destinationDBName, null, null);
    return Collections.singletonList(createDBTask);
}
Also used : Path(org.apache.hadoop.fs.Path) IOException(java.io.IOException) PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc) MetaData(org.apache.hadoop.hive.ql.parse.repl.load.MetaData) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) AlterDatabaseSetPropertiesDesc(org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc) AlterDatabaseSetOwnerDesc(org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HashSet(java.util.HashSet)

Example 3 with AlterDatabaseSetOwnerDesc

use of org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc in project hive by apache.

the class LoadDatabase method setOwnerInfoTask.

private Task<?> setOwnerInfoTask(Database dbObj) {
    AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null);
    DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, (new Path(context.dumpDirectory)).getParent().toString(), this.metricCollector);
    return TaskFactory.get(work, context.hiveConf);
}
Also used : Path(org.apache.hadoop.fs.Path) PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterDatabaseSetOwnerDesc(org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)3 AlterDatabaseSetOwnerDesc (org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc)3 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)3 Path (org.apache.hadoop.fs.Path)2 Database (org.apache.hadoop.hive.metastore.api.Database)2 AlterDatabaseSetPropertiesDesc (org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 IOException (java.io.IOException)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Map (java.util.Map)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 AlterDatabaseMessage (org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage)1 AbstractAlterDatabaseDesc (org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseDesc)1 CreateDatabaseDesc (org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc)1 MetaData (org.apache.hadoop.hive.ql.parse.repl.load.MetaData)1