Search in sources :

Example 1 with PrincipalDesc

use of org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc in project hive by apache.

the class AlterTableSetOwnerAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpecFromFramework, ASTNode command) throws SemanticException {
    PrincipalDesc ownerPrincipal = AuthorizationParseUtils.getPrincipalDesc((ASTNode) command.getChild(0));
    if (ownerPrincipal.getType() == null) {
        throw new SemanticException("Owner type can't be null in alter table set owner command");
    }
    if (ownerPrincipal.getName() == null) {
        throw new SemanticException("Owner name can't be null in alter table set owner command");
    }
    AlterTableSetOwnerDesc desc = new AlterTableSetOwnerDesc(tableName, ownerPrincipal);
    addInputsOutputsAlterTable(tableName, null, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf));
    setAcidDdlDesc(getTable(tableName), desc);
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with PrincipalDesc

use of org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc in project hive by apache.

the class AlterDatabaseSetOwnerAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String databaseName = getUnescapedName((ASTNode) root.getChild(0));
    PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1));
    if (principalDesc.getName() == null) {
        throw new SemanticException("Owner name can't be null in alter database set owner command");
    }
    if (principalDesc.getType() == null) {
        throw new SemanticException("Owner type can't be null in alter database set owner command");
    }
    AlterDatabaseSetOwnerDesc desc = new AlterDatabaseSetOwnerDesc(databaseName, principalDesc, null);
    addAlterDatabaseDesc(desc);
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 3 with PrincipalDesc

use of org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc in project hive by apache.

the class AlterDataConnectorSetOwnerAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String connectorName = getUnescapedName((ASTNode) root.getChild(0));
    PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1));
    if (principalDesc.getName() == null) {
        throw new SemanticException("Owner name can't be null in alter connector set owner command");
    }
    if (principalDesc.getType() == null) {
        throw new SemanticException("Owner type can't be null in alter connector set owner command");
    }
    AlterDataConnectorSetOwnerDesc desc = new AlterDataConnectorSetOwnerDesc(connectorName, principalDesc);
    addAlterDataConnectorDesc(desc);
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 4 with PrincipalDesc

use of org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc in project hive by apache.

the class AlterDatabaseHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    AlterDatabaseMessage msg = deserializer.getAlterDatabaseMessage(context.dmd.getPayload());
    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    try {
        Database oldDb = msg.getDbObjBefore();
        Database newDb = msg.getDbObjAfter();
        AbstractAlterDatabaseDesc alterDbDesc;
        if ((oldDb.getOwnerType() == newDb.getOwnerType()) && oldDb.getOwnerName().equalsIgnoreCase(newDb.getOwnerName())) {
            // If owner information is unchanged, then DB properties would've changed
            Map<String, String> newDbProps = new HashMap<>();
            Map<String, String> dbProps = newDb.getParameters();
            for (Map.Entry<String, String> entry : dbProps.entrySet()) {
                String key = entry.getKey();
                // Ignore the keys which are local to source warehouse
                if (key.startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString()) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID_TARGET.toString()) || key.equals(ReplUtils.REPL_CHECKPOINT_KEY) || key.equals(ReplChangeManager.SOURCE_OF_REPLICATION) || key.equals(ReplUtils.REPL_FIRST_INC_PENDING_FLAG) || key.equals(ReplConst.REPL_FAILOVER_ENDPOINT)) {
                    continue;
                }
                newDbProps.put(key, entry.getValue());
            }
            alterDbDesc = new AlterDatabaseSetPropertiesDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec());
        } else {
            alterDbDesc = new AlterDatabaseSetOwnerDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec());
        }
        Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName);
        // Only database object is updated
        updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
        return Collections.singletonList(alterDbTask);
    } catch (Exception e) {
        throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
    }
}
Also used : AbstractAlterDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseDesc) HashMap(java.util.HashMap) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) AlterDatabaseMessage(org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) AlterDatabaseSetPropertiesDesc(org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc) AlterDatabaseSetOwnerDesc(org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc) HashMap(java.util.HashMap) Map(java.util.Map) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 5 with PrincipalDesc

use of org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc in project hive by apache.

the class TestHiveAuthorizationTaskFactory method testRevokeRoleRole.

/**
 * REVOKE ROLE ... FROM ROLE ...
 */
@Test
public void testRevokeRoleRole() throws Exception {
    DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM ROLE " + ROLE);
    RevokeRoleDesc grantDesc = (RevokeRoleDesc) work.getDDLDesc();
    Assert.assertNotNull("Grant should not be null", grantDesc);
    Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption());
    Assert.assertEquals(currentUser, grantDesc.getGrantor());
    for (String role : ListSizeMatcher.inList(grantDesc.getRoles()).ofSize(1)) {
        Assert.assertEquals(ROLE, role);
    }
    for (PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
        Assert.assertEquals(PrincipalType.ROLE, principal.getType());
        Assert.assertEquals(ROLE, principal.getName());
    }
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) RevokeRoleDesc(org.apache.hadoop.hive.ql.ddl.privilege.role.revoke.RevokeRoleDesc) Test(org.junit.Test)

Aggregations

PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)24 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)21 Test (org.junit.Test)12 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 GrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.grant.GrantDesc)5 ShowGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.grant.ShowGrantDesc)5 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)5 RevokeDesc (org.apache.hadoop.hive.ql.ddl.privilege.revoke.RevokeDesc)4 GrantRoleDesc (org.apache.hadoop.hive.ql.ddl.privilege.role.grant.GrantRoleDesc)4 RevokeRoleDesc (org.apache.hadoop.hive.ql.ddl.privilege.role.revoke.RevokeRoleDesc)4 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)4 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)4 AlterDatabaseSetOwnerDesc (org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc)3 PrivilegeObjectDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc)3 Path (org.apache.hadoop.fs.Path)2 Database (org.apache.hadoop.hive.metastore.api.Database)2 AlterDatabaseSetPropertiesDesc (org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc)2 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1