Search in sources :

Example 31 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolTranslatorPB method setErasureCodingPolicy.

@Override
public void setErasureCodingPolicy(String src, String ecPolicyName) throws IOException {
    final SetErasureCodingPolicyRequestProto.Builder builder = SetErasureCodingPolicyRequestProto.newBuilder();
    builder.setSrc(src);
    builder.setEcPolicyName(ecPolicyName);
    SetErasureCodingPolicyRequestProto req = builder.build();
    try {
        rpcProxy.setErasureCodingPolicy(null, req);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) SetErasureCodingPolicyRequestProto(org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto)

Example 32 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolTranslatorPB method create.

@Override
public HdfsFileStatus create(String src, FsPermission masked, String clientName, EnumSetWritable<CreateFlag> flag, boolean createParent, short replication, long blockSize, CryptoProtocolVersion[] supportedVersions) throws IOException {
    CreateRequestProto.Builder builder = CreateRequestProto.newBuilder().setSrc(src).setMasked(PBHelperClient.convert(masked)).setClientName(clientName).setCreateFlag(PBHelperClient.convertCreateFlag(flag)).setCreateParent(createParent).setReplication(replication).setBlockSize(blockSize);
    FsPermission unmasked = masked.getUnmasked();
    if (unmasked != null) {
        builder.setUnmasked(PBHelperClient.convert(unmasked));
    }
    builder.addAllCryptoProtocolVersion(PBHelperClient.convert(supportedVersions));
    CreateRequestProto req = builder.build();
    try {
        CreateResponseProto res = rpcProxy.create(null, req);
        return res.hasFs() ? PBHelperClient.convert(res.getFs()) : null;
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) CreateResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateResponseProto) FsPermission(org.apache.hadoop.fs.permission.FsPermission) CreateRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateRequestProto)

Example 33 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolTranslatorPB method listXAttrs.

@Override
public List<XAttr> listXAttrs(String src) throws IOException {
    ListXAttrsRequestProto.Builder builder = ListXAttrsRequestProto.newBuilder();
    builder.setSrc(src);
    ListXAttrsRequestProto req = builder.build();
    try {
        return PBHelperClient.convert(rpcProxy.listXAttrs(null, req));
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) ListXAttrsRequestProto(org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto)

Example 34 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class InterDatanodeProtocolServerSideTranslatorPB method initReplicaRecovery.

@Override
public InitReplicaRecoveryResponseProto initReplicaRecovery(RpcController unused, InitReplicaRecoveryRequestProto request) throws ServiceException {
    RecoveringBlock b = PBHelper.convert(request.getBlock());
    ReplicaRecoveryInfo r;
    try {
        r = impl.initReplicaRecovery(b);
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    if (r == null) {
        return InitReplicaRecoveryResponseProto.newBuilder().setReplicaFound(false).build();
    } else {
        return InitReplicaRecoveryResponseProto.newBuilder().setReplicaFound(true).setBlock(PBHelperClient.convert(r)).setState(PBHelper.convert(r.getOriginalReplicaState())).build();
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) ReplicaRecoveryInfo(org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo) RecoveringBlock(org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock) IOException(java.io.IOException)

Example 35 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class InterDatanodeProtocolTranslatorPB method initReplicaRecovery.

@Override
public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock) throws IOException {
    InitReplicaRecoveryRequestProto req = InitReplicaRecoveryRequestProto.newBuilder().setBlock(PBHelper.convert(rBlock)).build();
    InitReplicaRecoveryResponseProto resp;
    try {
        resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
    if (!resp.getReplicaFound()) {
        // No replica found on the remote node.
        return null;
    } else {
        if (!resp.hasBlock() || !resp.hasState()) {
            throw new IOException("Replica was found but missing fields. " + "Req: " + req + "\n" + "Resp: " + resp);
        }
    }
    BlockProto b = resp.getBlock();
    return new ReplicaRecoveryInfo(b.getBlockId(), b.getNumBytes(), b.getGenStamp(), PBHelper.convert(resp.getState()));
}
Also used : InitReplicaRecoveryRequestProto(org.apache.hadoop.hdfs.protocol.proto.InterDatanodeProtocolProtos.InitReplicaRecoveryRequestProto) ServiceException(com.google.protobuf.ServiceException) ReplicaRecoveryInfo(org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo) InitReplicaRecoveryResponseProto(org.apache.hadoop.hdfs.protocol.proto.InterDatanodeProtocolProtos.InitReplicaRecoveryResponseProto) BlockProto(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto) IOException(java.io.IOException)

Aggregations

ServiceException (com.google.protobuf.ServiceException)139 IOException (java.io.IOException)66 Test (org.junit.Test)22 ArrayList (java.util.ArrayList)12 Configuration (org.apache.hadoop.conf.Configuration)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 Table (org.apache.hadoop.hbase.client.Table)5 CoprocessorRpcChannel (org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)5 InetSocketAddress (java.net.InetSocketAddress)4 DatanodeCommand (org.apache.hadoop.hdfs.server.protocol.DatanodeCommand)4 ByteString (com.google.protobuf.ByteString)3 InterruptedIOException (java.io.InterruptedIOException)3 ConnectException (java.net.ConnectException)3 SocketTimeoutException (java.net.SocketTimeoutException)3 Callable (java.util.concurrent.Callable)3 ExecutionException (java.util.concurrent.ExecutionException)3 ExecutorService (java.util.concurrent.ExecutorService)3 EncryptionZone (org.apache.hadoop.hdfs.protocol.EncryptionZone)3 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)3 Server (org.apache.hadoop.ipc.Server)3