Search in sources :

Example 36 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class NamenodeProtocolServerSideTranslatorPB method getBlocks.

@Override
public GetBlocksResponseProto getBlocks(RpcController unused, GetBlocksRequestProto request) throws ServiceException {
    DatanodeInfo dnInfo = new DatanodeInfoBuilder().setNodeID(PBHelperClient.convert(request.getDatanode())).build();
    BlocksWithLocations blocks;
    try {
        blocks = impl.getBlocks(dnInfo, request.getSize());
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    return GetBlocksResponseProto.newBuilder().setBlocks(PBHelper.convert(blocks)).build();
}
Also used : DatanodeInfo(org.apache.hadoop.hdfs.protocol.DatanodeInfo) BlocksWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations) DatanodeInfoBuilder(org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder) ServiceException(com.google.protobuf.ServiceException) IOException(java.io.IOException)

Example 37 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class NamenodeProtocolServerSideTranslatorPB method getBlockKeys.

@Override
public GetBlockKeysResponseProto getBlockKeys(RpcController unused, GetBlockKeysRequestProto request) throws ServiceException {
    ExportedBlockKeys keys;
    try {
        keys = impl.getBlockKeys();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    GetBlockKeysResponseProto.Builder builder = GetBlockKeysResponseProto.newBuilder();
    if (keys != null) {
        builder.setKeys(PBHelper.convert(keys));
    }
    return builder.build();
}
Also used : ServiceException(com.google.protobuf.ServiceException) GetBlockKeysResponseProto(org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetBlockKeysResponseProto) IOException(java.io.IOException) ExportedBlockKeys(org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys)

Example 38 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientDatanodeProtocolServerSideTranslatorPB method submitDiskBalancerPlan.

/**
   * Submit a disk balancer plan for execution.
   * @param controller  - RpcController
   * @param request   - Request
   * @return   Response
   * @throws ServiceException
   */
@Override
public SubmitDiskBalancerPlanResponseProto submitDiskBalancerPlan(RpcController controller, SubmitDiskBalancerPlanRequestProto request) throws ServiceException {
    try {
        impl.submitDiskBalancerPlan(request.getPlanID(), request.hasPlanVersion() ? request.getPlanVersion() : 1, request.hasPlanFile() ? request.getPlanFile() : "", request.getPlan(), request.hasIgnoreDateCheck() ? request.getIgnoreDateCheck() : false);
        SubmitDiskBalancerPlanResponseProto response = SubmitDiskBalancerPlanResponseProto.newBuilder().build();
        return response;
    } catch (Exception e) {
        throw new ServiceException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) SubmitDiskBalancerPlanResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.SubmitDiskBalancerPlanResponseProto) ServiceException(com.google.protobuf.ServiceException) IOException(java.io.IOException)

Example 39 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class QJournalProtocolServerSideTranslatorPB method canRollBack.

@Override
public CanRollBackResponseProto canRollBack(RpcController controller, CanRollBackRequestProto request) throws ServiceException {
    try {
        StorageInfo si = PBHelper.convert(request.getStorage(), NodeType.JOURNAL_NODE);
        Boolean result = impl.canRollBack(convert(request.getJid()), si, PBHelper.convert(request.getPrevStorage(), NodeType.JOURNAL_NODE), request.getTargetLayoutVersion());
        return CanRollBackResponseProto.newBuilder().setCanRollBack(result).build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) StorageInfo(org.apache.hadoop.hdfs.server.common.StorageInfo) IOException(java.io.IOException)

Example 40 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method getDelegationToken.

@Override
public GetDelegationTokenResponseProto getDelegationToken(RpcController controller, GetDelegationTokenRequestProto req) throws ServiceException {
    try {
        Token<DelegationTokenIdentifier> token = server.getDelegationToken(new Text(req.getRenewer()));
        GetDelegationTokenResponseProto.Builder rspBuilder = GetDelegationTokenResponseProto.newBuilder();
        if (token != null) {
            rspBuilder.setToken(PBHelperClient.convert(token));
        }
        return rspBuilder.build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) DelegationTokenIdentifier(org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier) GetDelegationTokenResponseProto(org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException)

Aggregations

ServiceException (com.google.protobuf.ServiceException)139 IOException (java.io.IOException)66 Test (org.junit.Test)22 ArrayList (java.util.ArrayList)12 Configuration (org.apache.hadoop.conf.Configuration)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 Table (org.apache.hadoop.hbase.client.Table)5 CoprocessorRpcChannel (org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)5 InetSocketAddress (java.net.InetSocketAddress)4 DatanodeCommand (org.apache.hadoop.hdfs.server.protocol.DatanodeCommand)4 ByteString (com.google.protobuf.ByteString)3 InterruptedIOException (java.io.InterruptedIOException)3 ConnectException (java.net.ConnectException)3 SocketTimeoutException (java.net.SocketTimeoutException)3 Callable (java.util.concurrent.Callable)3 ExecutionException (java.util.concurrent.ExecutionException)3 ExecutorService (java.util.concurrent.ExecutorService)3 EncryptionZone (org.apache.hadoop.hdfs.protocol.EncryptionZone)3 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)3 Server (org.apache.hadoop.ipc.Server)3