Search in sources :

Example 16 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class TestProtoBufRPCCompatibility method testProtocolVersionMismatch.

@Test
public void testProtocolVersionMismatch() throws IOException, ServiceException {
    conf = new Configuration();
    conf.setInt(CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH, 1024);
    // Set RPC engine to protobuf RPC engine
    RPC.setProtocolEngine(conf, NewRpcService.class, ProtobufRpcEngine.class);
    // Create server side implementation
    NewServerImpl serverImpl = new NewServerImpl();
    BlockingService service = NewProtobufRpcProto.newReflectiveBlockingService(serverImpl);
    // Get RPC server for server side implementation
    server = new RPC.Builder(conf).setProtocol(NewRpcService.class).setInstance(service).setBindAddress(ADDRESS).setPort(PORT).build();
    addr = NetUtils.getConnectAddress(server);
    server.start();
    RPC.setProtocolEngine(conf, OldRpcService.class, ProtobufRpcEngine.class);
    OldRpcService proxy = RPC.getProxy(OldRpcService.class, 0, addr, conf);
    // Verify that exception is thrown if protocolVersion is mismatch between
    // client and server.
    EmptyRequestProto emptyRequest = EmptyRequestProto.newBuilder().build();
    try {
        proxy.ping(null, emptyRequest);
        fail("Expected an exception to occur as version mismatch.");
    } catch (Exception e) {
        if (!(e.getMessage().contains("version mismatch"))) {
            // Exception type is not what we expected, re-throw it.
            throw new IOException(e);
        }
    }
    // Verify that missing of optional field is still compatible in RPC call.
    RPC.setProtocolEngine(conf, NewerRpcService.class, ProtobufRpcEngine.class);
    NewerRpcService newProxy = RPC.getProxy(NewerRpcService.class, 0, addr, conf);
    newProxy.echo(null, emptyRequest);
}
Also used : EmptyRequestProto(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) Configuration(org.apache.hadoop.conf.Configuration) BlockingService(com.google.protobuf.BlockingService) IOException(java.io.IOException) ServiceException(com.google.protobuf.ServiceException) IOException(java.io.IOException) Test(org.junit.Test)

Example 17 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class InterDatanodeProtocolServerSideTranslatorPB method initReplicaRecovery.

@Override
public InitReplicaRecoveryResponseProto initReplicaRecovery(RpcController unused, InitReplicaRecoveryRequestProto request) throws ServiceException {
    RecoveringBlock b = PBHelper.convert(request.getBlock());
    ReplicaRecoveryInfo r;
    try {
        r = impl.initReplicaRecovery(b);
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    if (r == null) {
        return InitReplicaRecoveryResponseProto.newBuilder().setReplicaFound(false).build();
    } else {
        return InitReplicaRecoveryResponseProto.newBuilder().setReplicaFound(true).setBlock(PBHelperClient.convert(r)).setState(PBHelper.convert(r.getOriginalReplicaState())).build();
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) ReplicaRecoveryInfo(org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo) RecoveringBlock(org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock) IOException(java.io.IOException)

Example 18 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class InterDatanodeProtocolTranslatorPB method initReplicaRecovery.

@Override
public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock) throws IOException {
    InitReplicaRecoveryRequestProto req = InitReplicaRecoveryRequestProto.newBuilder().setBlock(PBHelper.convert(rBlock)).build();
    InitReplicaRecoveryResponseProto resp;
    try {
        resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
    if (!resp.getReplicaFound()) {
        // No replica found on the remote node.
        return null;
    } else {
        if (!resp.hasBlock() || !resp.hasState()) {
            throw new IOException("Replica was found but missing fields. " + "Req: " + req + "\n" + "Resp: " + resp);
        }
    }
    BlockProto b = resp.getBlock();
    return new ReplicaRecoveryInfo(b.getBlockId(), b.getNumBytes(), b.getGenStamp(), PBHelper.convert(resp.getState()));
}
Also used : InitReplicaRecoveryRequestProto(org.apache.hadoop.hdfs.protocol.proto.InterDatanodeProtocolProtos.InitReplicaRecoveryRequestProto) ServiceException(com.google.protobuf.ServiceException) ReplicaRecoveryInfo(org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo) InitReplicaRecoveryResponseProto(org.apache.hadoop.hdfs.protocol.proto.InterDatanodeProtocolProtos.InitReplicaRecoveryResponseProto) BlockProto(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto) IOException(java.io.IOException)

Example 19 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class NamenodeProtocolServerSideTranslatorPB method getBlocks.

@Override
public GetBlocksResponseProto getBlocks(RpcController unused, GetBlocksRequestProto request) throws ServiceException {
    DatanodeInfo dnInfo = new DatanodeInfoBuilder().setNodeID(PBHelperClient.convert(request.getDatanode())).build();
    BlocksWithLocations blocks;
    try {
        blocks = impl.getBlocks(dnInfo, request.getSize());
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    return GetBlocksResponseProto.newBuilder().setBlocks(PBHelper.convert(blocks)).build();
}
Also used : DatanodeInfo(org.apache.hadoop.hdfs.protocol.DatanodeInfo) BlocksWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations) DatanodeInfoBuilder(org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder) ServiceException(com.google.protobuf.ServiceException) IOException(java.io.IOException)

Example 20 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class NamenodeProtocolServerSideTranslatorPB method getBlockKeys.

@Override
public GetBlockKeysResponseProto getBlockKeys(RpcController unused, GetBlockKeysRequestProto request) throws ServiceException {
    ExportedBlockKeys keys;
    try {
        keys = impl.getBlockKeys();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    GetBlockKeysResponseProto.Builder builder = GetBlockKeysResponseProto.newBuilder();
    if (keys != null) {
        builder.setKeys(PBHelper.convert(keys));
    }
    return builder.build();
}
Also used : ServiceException(com.google.protobuf.ServiceException) GetBlockKeysResponseProto(org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.GetBlockKeysResponseProto) IOException(java.io.IOException) ExportedBlockKeys(org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys)

Aggregations

ServiceException (com.google.protobuf.ServiceException)139 IOException (java.io.IOException)66 Test (org.junit.Test)22 ArrayList (java.util.ArrayList)12 Configuration (org.apache.hadoop.conf.Configuration)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 Table (org.apache.hadoop.hbase.client.Table)5 CoprocessorRpcChannel (org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)5 InetSocketAddress (java.net.InetSocketAddress)4 DatanodeCommand (org.apache.hadoop.hdfs.server.protocol.DatanodeCommand)4 ByteString (com.google.protobuf.ByteString)3 InterruptedIOException (java.io.InterruptedIOException)3 ConnectException (java.net.ConnectException)3 SocketTimeoutException (java.net.SocketTimeoutException)3 Callable (java.util.concurrent.Callable)3 ExecutionException (java.util.concurrent.ExecutionException)3 ExecutorService (java.util.concurrent.ExecutorService)3 EncryptionZone (org.apache.hadoop.hdfs.protocol.EncryptionZone)3 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)3 Server (org.apache.hadoop.ipc.Server)3