Search in sources :

Example 11 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class SQLStdHiveAccessController method showPrivileges.

@Override
public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException {
    try {
        // First authorize the call
        if (principal == null) {
            // only the admin is allowed to list privileges for any user
            if (!isUserAdmin()) {
                throw new HiveAccessControlException("User : " + currentUserName + " has to specify" + " a user name or role in the show grant. " + ADMIN_ONLY_MSG);
            }
        } else {
            //principal is specified, authorize on it
            if (!isUserAdmin()) {
                ensureShowGrantAllowed(principal);
            }
        }
        IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
        List<HivePrivilegeInfo> resPrivInfos = new ArrayList<HivePrivilegeInfo>();
        String principalName = principal == null ? null : principal.getName();
        PrincipalType principalType = principal == null ? null : AuthorizationUtils.getThriftPrincipalType(principal.getType());
        // get metastore/thrift privilege object using metastore api
        List<HiveObjectPrivilege> msObjPrivs = mClient.list_privileges(principalName, principalType, SQLAuthorizationUtils.getThriftHiveObjectRef(privObj));
        // convert the metastore thrift objects to result objects
        for (HiveObjectPrivilege msObjPriv : msObjPrivs) {
            // result principal
            HivePrincipal resPrincipal = new HivePrincipal(msObjPriv.getPrincipalName(), AuthorizationUtils.getHivePrincipalType(msObjPriv.getPrincipalType()));
            // result privilege
            PrivilegeGrantInfo msGrantInfo = msObjPriv.getGrantInfo();
            HivePrivilege resPrivilege = new HivePrivilege(msGrantInfo.getPrivilege(), null);
            // result object
            HiveObjectRef msObjRef = msObjPriv.getHiveObject();
            if (!isSupportedObjectType(msObjRef.getObjectType())) {
                // ignore them
                continue;
            }
            HivePrivilegeObject resPrivObj = new HivePrivilegeObject(getPluginPrivilegeObjType(msObjRef.getObjectType()), msObjRef.getDbName(), msObjRef.getObjectName(), msObjRef.getPartValues(), msObjRef.getColumnName());
            // result grantor principal
            HivePrincipal grantorPrincipal = new HivePrincipal(msGrantInfo.getGrantor(), AuthorizationUtils.getHivePrincipalType(msGrantInfo.getGrantorType()));
            HivePrivilegeInfo resPrivInfo = new HivePrivilegeInfo(resPrincipal, resPrivilege, resPrivObj, grantorPrincipal, msGrantInfo.isGrantOption(), msGrantInfo.getCreateTime());
            resPrivInfos.add(resPrivInfo);
        }
        return resPrivInfos;
    } catch (Exception e) {
        throw SQLAuthorizationUtils.getPluginException("Error showing privileges", e);
    }
}
Also used : HivePrivilegeInfo(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo) PrivilegeGrantInfo(org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo) HivePrivilege(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) ArrayList(java.util.ArrayList) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) TException(org.apache.thrift.TException) HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) HiveObjectPrivilege(org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege) HivePrincipal(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType)

Example 12 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class SQLStdHiveAccessController method getRoleGrants.

private List<RolePrincipalGrant> getRoleGrants(String principalName, PrincipalType principalType) throws MetaException, TException, HiveAuthzPluginException {
    GetRoleGrantsForPrincipalRequest req = new GetRoleGrantsForPrincipalRequest(principalName, principalType);
    IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();
    GetRoleGrantsForPrincipalResponse resp = metastoreClient.get_role_grants_for_principal(req);
    return resp.getPrincipalGrants();
}
Also used : GetRoleGrantsForPrincipalResponse(org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse) GetRoleGrantsForPrincipalRequest(org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient)

Example 13 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class SQLStdHiveAccessController method grantPrivileges.

@Override
public void grantPrivileges(List<HivePrincipal> hivePrincipals, List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, HiveAccessControlException {
    hivePrivileges = expandAndValidatePrivileges(hivePrivileges);
    IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();
    // authorize the grant
    GrantPrivAuthUtils.authorize(hivePrincipals, hivePrivileges, hivePrivObject, grantOption, metastoreClient, authenticator.getUserName(), getCurrentRoleNames(), isUserAdmin());
    // grant
    PrivilegeBag privBag = SQLAuthorizationUtils.getThriftPrivilegesBag(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal, grantOption);
    try {
        metastoreClient.grant_privileges(privBag);
    } catch (Exception e) {
        throw SQLAuthorizationUtils.getPluginException("Error granting privileges", e);
    }
}
Also used : PrivilegeBag(org.apache.hadoop.hive.metastore.api.PrivilegeBag) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveAccessControlException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) TException(org.apache.thrift.TException)

Example 14 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class FileOutputFormatContainer method checkOutputSpecs.

@Override
public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
    IMetaStoreClient client = null;
    try {
        HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
        client = HCatUtil.getHiveMetastoreClient(hiveConf);
        handleDuplicatePublish(context, jobInfo, client, new Table(jobInfo.getTableInfo().getTable()));
    } catch (MetaException e) {
        throw new IOException(e);
    } catch (TException e) {
        throw new IOException(e);
    } finally {
        HCatUtil.closeHiveClientQuietly(client);
    }
    if (!jobInfo.isDynamicPartitioningUsed()) {
        JobConf jobConf = new JobConf(context.getConfiguration());
        getBaseOutputFormat().checkOutputSpecs(null, jobConf);
        //checkoutputspecs might've set some properties we need to have context reflect that
        HCatUtil.copyConf(jobConf, context.getConfiguration());
    }
}
Also used : TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveConf(org.apache.hadoop.hive.conf.HiveConf) IOException(java.io.IOException) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) JobConf(org.apache.hadoop.mapred.JobConf) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 15 with IMetaStoreClient

use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.

the class DefaultOutputCommitterContainer method cleanupJob.

@Override
public void cleanupJob(JobContext context) throws IOException {
    getBaseOutputCommitter().cleanupJob(HCatMapRedUtil.createJobContext(context));
    //Cancel HCat and JobTracker tokens
    IMetaStoreClient client = null;
    try {
        HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
        client = HCatUtil.getHiveMetastoreClient(hiveConf);
        String tokenStrForm = client.getTokenStrForm();
        if (tokenStrForm != null && context.getConfiguration().get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {
            client.cancelDelegationToken(tokenStrForm);
        }
    } catch (Exception e) {
        LOG.warn("Failed to cancel delegation token", e);
    } finally {
        HCatUtil.closeHiveClientQuietly(client);
    }
}
Also used : HiveConf(org.apache.hadoop.hive.conf.HiveConf) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) IOException(java.io.IOException)

Aggregations

IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)41 TException (org.apache.thrift.TException)12 IOException (java.io.IOException)11 Path (org.apache.hadoop.fs.Path)11 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)11 HiveConf (org.apache.hadoop.hive.conf.HiveConf)10 HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)10 Table (org.apache.hadoop.hive.metastore.api.Table)10 Test (org.junit.Test)10 FileStatus (org.apache.hadoop.fs.FileStatus)9 FileSystem (org.apache.hadoop.fs.FileSystem)9 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)8 CompactionRequest (org.apache.hadoop.hive.metastore.api.CompactionRequest)8 TxnStore (org.apache.hadoop.hive.metastore.txn.TxnStore)8 HiveEndPoint (org.apache.hive.hcatalog.streaming.HiveEndPoint)8 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)7 ArrayList (java.util.ArrayList)6 DelimitedInputWriter (org.apache.hive.hcatalog.streaming.DelimitedInputWriter)6 StreamingConnection (org.apache.hive.hcatalog.streaming.StreamingConnection)6 Table (org.apache.hadoop.hive.ql.metadata.Table)5