Search in sources :

Example 41 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientDatanodeProtocolTranslatorPB method cancelDiskBalancePlan.

/**
   * Cancels an executing disk balancer plan.
   *
   * @param planID - A SHA-1 hash of the plan string.
   * @throws IOException on error
   */
@Override
public void cancelDiskBalancePlan(String planID) throws IOException {
    try {
        CancelPlanRequestProto request = CancelPlanRequestProto.newBuilder().setPlanID(planID).build();
        rpcProxy.cancelDiskBalancerPlan(NULL_CONTROLLER, request);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : CancelPlanRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.CancelPlanRequestProto) ServiceException(com.google.protobuf.ServiceException)

Example 42 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientDatanodeProtocolTranslatorPB method getDiskBalancerSetting.

@Override
public String getDiskBalancerSetting(String key) throws IOException {
    try {
        DiskBalancerSettingRequestProto request = DiskBalancerSettingRequestProto.newBuilder().setKey(key).build();
        DiskBalancerSettingResponseProto response = rpcProxy.getDiskBalancerSetting(NULL_CONTROLLER, request);
        return response.hasValue() ? response.getValue() : null;
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : DiskBalancerSettingRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.DiskBalancerSettingRequestProto) DiskBalancerSettingResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.DiskBalancerSettingResponseProto) ServiceException(com.google.protobuf.ServiceException)

Example 43 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolTranslatorPB method setPermission.

@Override
public void setPermission(String src, FsPermission permission) throws IOException {
    SetPermissionRequestProto req = SetPermissionRequestProto.newBuilder().setSrc(src).setPermission(PBHelperClient.convert(permission)).build();
    try {
        if (Client.isAsynchronousMode()) {
            rpcProxy.setPermission(null, req);
            setAsyncReturnValue();
        } else {
            rpcProxy.setPermission(null, req);
        }
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) SetPermissionRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetPermissionRequestProto)

Example 44 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolTranslatorPB method getAclStatus.

@Override
public AclStatus getAclStatus(String src) throws IOException {
    GetAclStatusRequestProto req = GetAclStatusRequestProto.newBuilder().setSrc(src).build();
    try {
        if (Client.isAsynchronousMode()) {
            rpcProxy.getAclStatus(null, req);
            final AsyncGet<Message, Exception> asyncReturnMessage = ProtobufRpcEngine.getAsyncReturnMessage();
            final AsyncGet<AclStatus, Exception> asyncGet = new AsyncGet<AclStatus, Exception>() {

                @Override
                public AclStatus get(long timeout, TimeUnit unit) throws Exception {
                    return PBHelperClient.convert((GetAclStatusResponseProto) asyncReturnMessage.get(timeout, unit));
                }

                @Override
                public boolean isDone() {
                    return asyncReturnMessage.isDone();
                }
            };
            AsyncCallHandler.setLowerLayerAsyncReturn(asyncGet);
            return null;
        } else {
            return PBHelperClient.convert(rpcProxy.getAclStatus(null, req));
        }
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : Message(com.google.protobuf.Message) AclStatus(org.apache.hadoop.fs.permission.AclStatus) ServiceException(com.google.protobuf.ServiceException) GetAclStatusRequestProto(org.apache.hadoop.hdfs.protocol.proto.AclProtos.GetAclStatusRequestProto) AsyncGet(org.apache.hadoop.util.concurrent.AsyncGet) TimeUnit(java.util.concurrent.TimeUnit) IOException(java.io.IOException) ServiceException(com.google.protobuf.ServiceException)

Example 45 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolTranslatorPB method append.

@Override
public LastBlockWithStatus append(String src, String clientName, EnumSetWritable<CreateFlag> flag) throws IOException {
    AppendRequestProto req = AppendRequestProto.newBuilder().setSrc(src).setClientName(clientName).setFlag(PBHelperClient.convertCreateFlag(flag)).build();
    try {
        AppendResponseProto res = rpcProxy.append(null, req);
        LocatedBlock lastBlock = res.hasBlock() ? PBHelperClient.convertLocatedBlockProto(res.getBlock()) : null;
        HdfsFileStatus stat = (res.hasStat()) ? PBHelperClient.convert(res.getStat()) : null;
        return new LastBlockWithStatus(lastBlock, stat);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) LastBlockWithStatus(org.apache.hadoop.hdfs.protocol.LastBlockWithStatus) AppendResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendResponseProto) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) LocatedBlock(org.apache.hadoop.hdfs.protocol.LocatedBlock) AppendRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.AppendRequestProto)

Aggregations

ServiceException (com.google.protobuf.ServiceException)139 IOException (java.io.IOException)66 Test (org.junit.Test)22 ArrayList (java.util.ArrayList)12 Configuration (org.apache.hadoop.conf.Configuration)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 Table (org.apache.hadoop.hbase.client.Table)5 CoprocessorRpcChannel (org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)5 InetSocketAddress (java.net.InetSocketAddress)4 DatanodeCommand (org.apache.hadoop.hdfs.server.protocol.DatanodeCommand)4 ByteString (com.google.protobuf.ByteString)3 InterruptedIOException (java.io.InterruptedIOException)3 ConnectException (java.net.ConnectException)3 SocketTimeoutException (java.net.SocketTimeoutException)3 Callable (java.util.concurrent.Callable)3 ExecutionException (java.util.concurrent.ExecutionException)3 ExecutorService (java.util.concurrent.ExecutorService)3 EncryptionZone (org.apache.hadoop.hdfs.protocol.EncryptionZone)3 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)3 Server (org.apache.hadoop.ipc.Server)3