use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.
the class RevokePrivAuthUtils method authorizeAndGetRevokePrivileges.
public static List<HiveObjectPrivilege> authorizeAndGetRevokePrivileges(List<HivePrincipal> principals, List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject, boolean grantOption, IMetaStoreClient mClient, String userName) throws HiveAuthzPluginException, HiveAccessControlException {
List<HiveObjectPrivilege> matchingPrivs = new ArrayList<HiveObjectPrivilege>();
StringBuilder errMsg = new StringBuilder();
for (HivePrincipal principal : principals) {
// get metastore/thrift privilege object for this principal and object, not looking at
// privileges obtained indirectly via roles
List<HiveObjectPrivilege> msObjPrivs;
try {
msObjPrivs = mClient.list_privileges(principal.getName(), AuthorizationUtils.getThriftPrincipalType(principal.getType()), SQLAuthorizationUtils.getThriftHiveObjectRef(hivePrivObject));
} catch (MetaException e) {
throw new HiveAuthzPluginException(e);
} catch (TException e) {
throw new HiveAuthzPluginException(e);
}
// the resulting privileges need to be filtered on privilege type and
// username
// create a Map to capture object privileges corresponding to privilege
// type
Map<String, HiveObjectPrivilege> priv2privObj = new HashMap<String, HiveObjectPrivilege>();
for (HiveObjectPrivilege msObjPriv : msObjPrivs) {
PrivilegeGrantInfo grantInfo = msObjPriv.getGrantInfo();
// check if the grantor matches current user
if (grantInfo.getGrantor() != null && grantInfo.getGrantor().equals(userName) && grantInfo.getGrantorType() == PrincipalType.USER) {
// add to the map
priv2privObj.put(grantInfo.getPrivilege(), msObjPriv);
}
// else skip this one
}
// find the privileges that we are looking for
for (HivePrivilege hivePrivilege : hivePrivileges) {
HiveObjectPrivilege matchedPriv = priv2privObj.get(hivePrivilege.getName());
if (matchedPriv != null) {
matchingPrivs.add(matchedPriv);
} else {
errMsg.append("Cannot find privilege ").append(hivePrivilege).append(" for ").append(principal).append(" on ").append(hivePrivObject).append(" granted by ").append(userName).append(System.getProperty("line.separator"));
}
}
}
if (errMsg.length() != 0) {
throw new HiveAccessControlException(errMsg.toString());
}
return matchingPrivs;
}
use of org.apache.hadoop.hive.metastore.api.MetaException in project metacat by Netflix.
the class HiveConnectorFactory method createThriftClient.
private IMetacatHiveClient createThriftClient() throws MetaException {
final HiveMetastoreClientFactory factory = new HiveMetastoreClientFactory(null, (int) HiveConnectorUtil.toTime(configuration.getOrDefault(HiveConfigConstants.HIVE_METASTORE_TIMEOUT, "20s"), TimeUnit.SECONDS, TimeUnit.MILLISECONDS));
final String metastoreUri = configuration.get(HiveConfigConstants.THRIFT_URI);
URI uri = null;
try {
uri = new URI(metastoreUri);
} catch (Exception e) {
final String message = String.format("Invalid thrift uri %s", metastoreUri);
log.info(message);
throw new IllegalArgumentException(message, e);
}
return new MetacatHiveClient(uri, factory);
}
use of org.apache.hadoop.hive.metastore.api.MetaException in project metacat by Netflix.
the class HiveConnectorTableService method list.
/**
* {@inheritDoc}.
*/
@Override
public List<TableInfo> list(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName name, @Nullable final QualifiedName prefix, @Nullable final Sort sort, @Nullable final Pageable pageable) {
try {
final List<TableInfo> tableInfos = Lists.newArrayList();
for (String tableName : metacatHiveClient.getAllTables(name.getDatabaseName())) {
final QualifiedName qualifiedName = QualifiedName.ofDatabase(name.getCatalogName(), tableName);
if (!qualifiedName.toString().startsWith(prefix.toString())) {
continue;
}
final Table table = metacatHiveClient.getTableByName(name.getDatabaseName(), tableName);
tableInfos.add(hiveMetacatConverters.toTableInfo(name, table));
}
//supporting sort by name only
if (sort != null) {
ConnectorUtils.sort(tableInfos, sort, Comparator.comparing(p -> p.getName().getTableName()));
}
return ConnectorUtils.paginate(tableInfos, pageable);
} catch (MetaException exception) {
throw new DatabaseNotFoundException(name, exception);
} catch (TException exception) {
throw new ConnectorException(String.format("Failed list hive table %s", name), exception);
}
}
use of org.apache.hadoop.hive.metastore.api.MetaException in project metacat by Netflix.
the class HiveConnectorDatabaseService method listNames.
/**
* {@inheritDoc}.
*/
@Override
public List<QualifiedName> listNames(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName name, @Nullable final QualifiedName prefix, @Nullable final Sort sort, @Nullable final Pageable pageable) {
try {
final List<QualifiedName> qualifiedNames = Lists.newArrayList();
final String databaseFilter = (prefix != null) ? prefix.getDatabaseName() : null;
for (String databaseName : metacatHiveClient.getAllDatabases()) {
final QualifiedName qualifiedName = QualifiedName.ofDatabase(name.getCatalogName(), databaseName);
if (databaseFilter != null && !databaseName.startsWith(databaseFilter)) {
continue;
}
qualifiedNames.add(qualifiedName);
}
//supporting sort by qualified name only
if (sort != null) {
ConnectorUtils.sort(qualifiedNames, sort, Comparator.comparing(QualifiedName::toString));
}
return ConnectorUtils.paginate(qualifiedNames, pageable);
} catch (MetaException exception) {
throw new InvalidMetaException(name, exception);
} catch (TException exception) {
throw new ConnectorException(String.format("Failed listName hive database %s", name), exception);
}
}
use of org.apache.hadoop.hive.metastore.api.MetaException in project metacat by Netflix.
the class HiveConnectorTableService method create.
/**
* Create a table.
*
* @param requestContext The request context
* @param tableInfo The resource metadata
*/
@Override
public void create(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final TableInfo tableInfo) {
final QualifiedName tableName = tableInfo.getName();
try {
final Table table = hiveMetacatConverters.fromTableInfo(tableInfo);
updateTable(requestContext, table, tableInfo);
metacatHiveClient.createTable(table);
} catch (AlreadyExistsException exception) {
throw new TableAlreadyExistsException(tableName, exception);
} catch (MetaException exception) {
throw new InvalidMetaException(tableName, exception);
} catch (NoSuchObjectException | InvalidObjectException exception) {
throw new DatabaseNotFoundException(QualifiedName.ofDatabase(tableName.getCatalogName(), tableName.getDatabaseName()), exception);
} catch (TException exception) {
throw new ConnectorException(String.format("Failed create hive table %s", tableName), exception);
}
}
Aggregations