use of org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthRequest in project hive by apache.
the class Hive method getPartitions.
/**
* get all the partitions that the table has
*
* @param tbl
* object for which partition is needed
* @return list of partition objects
*/
public List<Partition> getPartitions(Table tbl) throws HiveException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_PARTITIONS);
try {
if (tbl.isPartitioned()) {
List<org.apache.hadoop.hive.metastore.api.Partition> tParts;
try {
GetPartitionsPsWithAuthRequest req = new GetPartitionsPsWithAuthRequest();
req.setTblName(tbl.getTableName());
req.setDbName(tbl.getDbName());
req.setUserName(getUserName());
req.setMaxParts((short) -1);
req.setGroupNames(getGroupNames());
if (AcidUtils.isTransactionalTable(tbl)) {
ValidWriteIdList validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName());
req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null);
req.setId(tbl.getTTable().getId());
}
GetPartitionsPsWithAuthResponse res = getMSC().listPartitionsWithAuthInfoRequest(req);
tParts = res.getPartitions();
} catch (Exception e) {
LOG.error("Failed getPartitions", e);
throw new HiveException(e);
}
List<Partition> parts = new ArrayList<>(tParts.size());
for (org.apache.hadoop.hive.metastore.api.Partition tpart : tParts) {
parts.add(new Partition(tbl, tpart));
}
return parts;
} else {
return Collections.singletonList(new Partition(tbl));
}
} finally {
perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_PARTITIONS, "HS2-cache");
}
}
Aggregations