use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class SQLStdHiveAccessController method showPrivileges.
@Override
public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException {
try {
// First authorize the call
if (principal == null) {
// only the admin is allowed to list privileges for any user
if (!isUserAdmin()) {
throw new HiveAccessControlException("User : " + currentUserName + " has to specify" + " a user name or role in the show grant. " + ADMIN_ONLY_MSG);
}
} else {
//principal is specified, authorize on it
if (!isUserAdmin()) {
ensureShowGrantAllowed(principal);
}
}
IMetaStoreClient mClient = metastoreClientFactory.getHiveMetastoreClient();
List<HivePrivilegeInfo> resPrivInfos = new ArrayList<HivePrivilegeInfo>();
String principalName = principal == null ? null : principal.getName();
PrincipalType principalType = principal == null ? null : AuthorizationUtils.getThriftPrincipalType(principal.getType());
// get metastore/thrift privilege object using metastore api
List<HiveObjectPrivilege> msObjPrivs = mClient.list_privileges(principalName, principalType, SQLAuthorizationUtils.getThriftHiveObjectRef(privObj));
// convert the metastore thrift objects to result objects
for (HiveObjectPrivilege msObjPriv : msObjPrivs) {
// result principal
HivePrincipal resPrincipal = new HivePrincipal(msObjPriv.getPrincipalName(), AuthorizationUtils.getHivePrincipalType(msObjPriv.getPrincipalType()));
// result privilege
PrivilegeGrantInfo msGrantInfo = msObjPriv.getGrantInfo();
HivePrivilege resPrivilege = new HivePrivilege(msGrantInfo.getPrivilege(), null);
// result object
HiveObjectRef msObjRef = msObjPriv.getHiveObject();
if (!isSupportedObjectType(msObjRef.getObjectType())) {
// ignore them
continue;
}
HivePrivilegeObject resPrivObj = new HivePrivilegeObject(getPluginPrivilegeObjType(msObjRef.getObjectType()), msObjRef.getDbName(), msObjRef.getObjectName(), msObjRef.getPartValues(), msObjRef.getColumnName());
// result grantor principal
HivePrincipal grantorPrincipal = new HivePrincipal(msGrantInfo.getGrantor(), AuthorizationUtils.getHivePrincipalType(msGrantInfo.getGrantorType()));
HivePrivilegeInfo resPrivInfo = new HivePrivilegeInfo(resPrincipal, resPrivilege, resPrivObj, grantorPrincipal, msGrantInfo.isGrantOption(), msGrantInfo.getCreateTime());
resPrivInfos.add(resPrivInfo);
}
return resPrivInfos;
} catch (Exception e) {
throw SQLAuthorizationUtils.getPluginException("Error showing privileges", e);
}
}
use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class SQLStdHiveAccessController method getRoleGrants.
private List<RolePrincipalGrant> getRoleGrants(String principalName, PrincipalType principalType) throws MetaException, TException, HiveAuthzPluginException {
GetRoleGrantsForPrincipalRequest req = new GetRoleGrantsForPrincipalRequest(principalName, principalType);
IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();
GetRoleGrantsForPrincipalResponse resp = metastoreClient.get_role_grants_for_principal(req);
return resp.getPrincipalGrants();
}
use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class SQLStdHiveAccessController method grantPrivileges.
@Override
public void grantPrivileges(List<HivePrincipal> hivePrincipals, List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, HiveAccessControlException {
hivePrivileges = expandAndValidatePrivileges(hivePrivileges);
IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();
// authorize the grant
GrantPrivAuthUtils.authorize(hivePrincipals, hivePrivileges, hivePrivObject, grantOption, metastoreClient, authenticator.getUserName(), getCurrentRoleNames(), isUserAdmin());
// grant
PrivilegeBag privBag = SQLAuthorizationUtils.getThriftPrivilegesBag(hivePrincipals, hivePrivileges, hivePrivObject, grantorPrincipal, grantOption);
try {
metastoreClient.grant_privileges(privBag);
} catch (Exception e) {
throw SQLAuthorizationUtils.getPluginException("Error granting privileges", e);
}
}
use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class FileOutputFormatContainer method checkOutputSpecs.
@Override
public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
IMetaStoreClient client = null;
try {
HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
client = HCatUtil.getHiveMetastoreClient(hiveConf);
handleDuplicatePublish(context, jobInfo, client, new Table(jobInfo.getTableInfo().getTable()));
} catch (MetaException e) {
throw new IOException(e);
} catch (TException e) {
throw new IOException(e);
} finally {
HCatUtil.closeHiveClientQuietly(client);
}
if (!jobInfo.isDynamicPartitioningUsed()) {
JobConf jobConf = new JobConf(context.getConfiguration());
getBaseOutputFormat().checkOutputSpecs(null, jobConf);
//checkoutputspecs might've set some properties we need to have context reflect that
HCatUtil.copyConf(jobConf, context.getConfiguration());
}
}
use of org.apache.hadoop.hive.metastore.IMetaStoreClient in project hive by apache.
the class DefaultOutputCommitterContainer method cleanupJob.
@Override
public void cleanupJob(JobContext context) throws IOException {
getBaseOutputCommitter().cleanupJob(HCatMapRedUtil.createJobContext(context));
//Cancel HCat and JobTracker tokens
IMetaStoreClient client = null;
try {
HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
client = HCatUtil.getHiveMetastoreClient(hiveConf);
String tokenStrForm = client.getTokenStrForm();
if (tokenStrForm != null && context.getConfiguration().get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {
client.cancelDelegationToken(tokenStrForm);
}
} catch (Exception e) {
LOG.warn("Failed to cancel delegation token", e);
} finally {
HCatUtil.closeHiveClientQuietly(client);
}
}
Aggregations