Search in sources :

Example 1 with GetPartitionsPsWithAuthResponse

use of org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse in project hive by apache.

the class TestHiveMetaStoreClient method listPartitionsWithAuthInfoRequest.

public GetPartitionsPsWithAuthResponse listPartitionsWithAuthInfoRequest(GetPartitionsPsWithAuthRequest req) throws MetaException, TException, NoSuchObjectException {
    assertNotNull(req.getId());
    assertNotNull(req.getValidWriteIdList());
    GetPartitionsPsWithAuthResponse res = new GetPartitionsPsWithAuthResponse();
    return res;
}
Also used : GetPartitionsPsWithAuthResponse(org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse)

Example 2 with GetPartitionsPsWithAuthResponse

use of org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse in project hive by apache.

the class SessionHiveMetaStoreClient method listPartitionsWithAuthInfoRequest.

@Override
public GetPartitionsPsWithAuthResponse listPartitionsWithAuthInfoRequest(GetPartitionsPsWithAuthRequest req) throws MetaException, TException, NoSuchObjectException {
    org.apache.hadoop.hive.metastore.api.Table table = getTempTable(req.getDbName(), req.getTblName());
    if (table == null) {
        return super.listPartitionsWithAuthInfoRequest(req);
    }
    TempTable tt = getPartitionedTempTable(table);
    List<Partition> partitions = tt.listPartitionsWithAuthInfo(req.getUserName(), req.getGroupNames());
    GetPartitionsPsWithAuthResponse response = new GetPartitionsPsWithAuthResponse();
    response.setPartitions(getPartitionsForMaxParts(req.getTblName(), partitions, req.getMaxParts()));
    return response;
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) GetPartitionsPsWithAuthResponse(org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse)

Example 3 with GetPartitionsPsWithAuthResponse

use of org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse in project hive by apache.

the class SessionHiveMetaStoreClient method listPartitionsWithAuthInfoRequestInternal.

@Override
protected GetPartitionsPsWithAuthResponse listPartitionsWithAuthInfoRequestInternal(GetPartitionsPsWithAuthRequest req) throws TException {
    Map<Object, Object> queryCache = getQueryCache();
    if (queryCache != null) {
        // Retrieve or populate cache
        CacheKey cacheKey = new CacheKey(KeyType.LIST_PARTITIONS_AUTH_INFO_REQ, req);
        GetPartitionsPsWithAuthResponse v = (GetPartitionsPsWithAuthResponse) queryCache.get(cacheKey);
        if (v == null) {
            v = super.listPartitionsWithAuthInfoRequestInternal(req);
            queryCache.put(cacheKey, v);
        } else {
            LOG.debug("Query level HMS cache: method=listPartitionsWithAuthInfoRequestInternal, dbName={}, tblName={}, partVals={}", req.getDbName(), req.getTblName(), req.getPartVals());
        }
        return v;
    }
    return super.listPartitionsWithAuthInfoRequestInternal(req);
}
Also used : GetPartitionsPsWithAuthResponse(org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse)

Example 4 with GetPartitionsPsWithAuthResponse

use of org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse in project hive by apache.

the class Hive method getPartitions.

/**
 * get all the partitions that the table has
 *
 * @param tbl
 *          object for which partition is needed
 * @return list of partition objects
 */
public List<Partition> getPartitions(Table tbl) throws HiveException {
    PerfLogger perfLogger = SessionState.getPerfLogger();
    perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_PARTITIONS);
    try {
        if (tbl.isPartitioned()) {
            List<org.apache.hadoop.hive.metastore.api.Partition> tParts;
            try {
                GetPartitionsPsWithAuthRequest req = new GetPartitionsPsWithAuthRequest();
                req.setTblName(tbl.getTableName());
                req.setDbName(tbl.getDbName());
                req.setUserName(getUserName());
                req.setMaxParts((short) -1);
                req.setGroupNames(getGroupNames());
                if (AcidUtils.isTransactionalTable(tbl)) {
                    ValidWriteIdList validWriteIdList = getValidWriteIdList(tbl.getDbName(), tbl.getTableName());
                    req.setValidWriteIdList(validWriteIdList != null ? validWriteIdList.toString() : null);
                    req.setId(tbl.getTTable().getId());
                }
                GetPartitionsPsWithAuthResponse res = getMSC().listPartitionsWithAuthInfoRequest(req);
                tParts = res.getPartitions();
            } catch (Exception e) {
                LOG.error("Failed getPartitions", e);
                throw new HiveException(e);
            }
            List<Partition> parts = new ArrayList<>(tParts.size());
            for (org.apache.hadoop.hive.metastore.api.Partition tpart : tParts) {
                parts.add(new Partition(tbl, tpart));
            }
            return parts;
        } else {
            return Collections.singletonList(new Partition(tbl));
        }
    } finally {
        perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_PARTITIONS, "HS2-cache");
    }
}
Also used : PerfLogger(org.apache.hadoop.hive.ql.log.PerfLogger) ArrayList(java.util.ArrayList) GetPartitionsPsWithAuthRequest(org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthRequest) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) ExecutionException(java.util.concurrent.ExecutionException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveMetaException(org.apache.hadoop.hive.metastore.HiveMetaException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) TApplicationException(org.apache.thrift.TApplicationException) TException(org.apache.thrift.TException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) FileNotFoundException(java.io.FileNotFoundException) JDODataStoreException(javax.jdo.JDODataStoreException) ValidWriteIdList(org.apache.hadoop.hive.common.ValidWriteIdList) GetPartitionsPsWithAuthResponse(org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse)

Aggregations

GetPartitionsPsWithAuthResponse (org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse)4 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 UnknownHostException (java.net.UnknownHostException)1 ArrayList (java.util.ArrayList)1 ExecutionException (java.util.concurrent.ExecutionException)1 JDODataStoreException (javax.jdo.JDODataStoreException)1 ValidWriteIdList (org.apache.hadoop.hive.common.ValidWriteIdList)1 HiveMetaException (org.apache.hadoop.hive.metastore.HiveMetaException)1 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)1 GetPartitionsPsWithAuthRequest (org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthRequest)1 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)1 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)1 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)1 Partition (org.apache.hadoop.hive.metastore.api.Partition)1 LockException (org.apache.hadoop.hive.ql.lockmgr.LockException)1 PerfLogger (org.apache.hadoop.hive.ql.log.PerfLogger)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)1 TApplicationException (org.apache.thrift.TApplicationException)1