Search in sources :

Example 56 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class DatanodeProtocolServerSideTranslatorPB method cacheReport.

@Override
public CacheReportResponseProto cacheReport(RpcController controller, CacheReportRequestProto request) throws ServiceException {
    DatanodeCommand cmd = null;
    try {
        cmd = impl.cacheReport(PBHelper.convert(request.getRegistration()), request.getBlockPoolId(), request.getBlocksList());
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    CacheReportResponseProto.Builder builder = CacheReportResponseProto.newBuilder();
    if (cmd != null) {
        builder.setCmd(PBHelper.convert(cmd));
    }
    return builder.build();
}
Also used : DatanodeCommand(org.apache.hadoop.hdfs.server.protocol.DatanodeCommand) ServiceException(com.google.protobuf.ServiceException) IOException(java.io.IOException) CacheReportResponseProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CacheReportResponseProto)

Example 57 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class TestProtoBufRPCCompatibility method testProtocolVersionMismatch.

@Test
public void testProtocolVersionMismatch() throws IOException, ServiceException {
    conf = new Configuration();
    conf.setInt(CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH, 1024);
    // Set RPC engine to protobuf RPC engine
    RPC.setProtocolEngine(conf, NewRpcService.class, ProtobufRpcEngine.class);
    // Create server side implementation
    NewServerImpl serverImpl = new NewServerImpl();
    BlockingService service = NewProtobufRpcProto.newReflectiveBlockingService(serverImpl);
    // Get RPC server for server side implementation
    server = new RPC.Builder(conf).setProtocol(NewRpcService.class).setInstance(service).setBindAddress(ADDRESS).setPort(PORT).build();
    addr = NetUtils.getConnectAddress(server);
    server.start();
    RPC.setProtocolEngine(conf, OldRpcService.class, ProtobufRpcEngine.class);
    OldRpcService proxy = RPC.getProxy(OldRpcService.class, 0, addr, conf);
    // Verify that exception is thrown if protocolVersion is mismatch between
    // client and server.
    EmptyRequestProto emptyRequest = EmptyRequestProto.newBuilder().build();
    try {
        proxy.ping(null, emptyRequest);
        fail("Expected an exception to occur as version mismatch.");
    } catch (Exception e) {
        if (!(e.getMessage().contains("version mismatch"))) {
            // Exception type is not what we expected, re-throw it.
            throw new IOException(e);
        }
    }
    // Verify that missing of optional field is still compatible in RPC call.
    RPC.setProtocolEngine(conf, NewerRpcService.class, ProtobufRpcEngine.class);
    NewerRpcService newProxy = RPC.getProxy(NewerRpcService.class, 0, addr, conf);
    newProxy.echo(null, emptyRequest);
}
Also used : EmptyRequestProto(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) Configuration(org.apache.hadoop.conf.Configuration) BlockingService(com.google.protobuf.BlockingService) IOException(java.io.IOException) ServiceException(com.google.protobuf.ServiceException) IOException(java.io.IOException) Test(org.junit.Test)

Example 58 with ServiceException

use of com.google.protobuf.ServiceException in project phoenix by apache.

the class StatisticsWriter method commitStats.

public void commitStats(List<Mutation> mutations, StatisticsCollector statsCollector) throws IOException {
    commitLastStatsUpdatedTime(statsCollector);
    if (mutations.size() > 0) {
        byte[] row = mutations.get(0).getRow();
        MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
        for (Mutation m : mutations) {
            mrmBuilder.addMutationRequest(ProtobufUtil.toMutation(getMutationType(m), m));
        }
        MutateRowsRequest mrm = mrmBuilder.build();
        CoprocessorRpcChannel channel = statsWriterTable.coprocessorService(row);
        MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
        try {
            service.mutateRows(null, mrm);
        } catch (ServiceException ex) {
            ProtobufUtil.toIOException(ex);
        }
    }
}
Also used : MutateRowsRequest(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) ServiceException(com.google.protobuf.ServiceException) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) MultiRowMutationService(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) Mutation(org.apache.hadoop.hbase.client.Mutation)

Example 59 with ServiceException

use of com.google.protobuf.ServiceException in project SSM by Intel-bigdata.

the class ClientSmartProtocolServerSideTranslatorPB method listActionsSupported.

@Override
public ListActionsSupportedResponseProto listActionsSupported(RpcController controller, ListActionsSupportedRequestProto req) throws ServiceException {
    try {
        List<ActionDescriptor> adList = server.listActionsSupported();
        List<ActionDescriptorProto> prolist = new ArrayList<>();
        for (ActionDescriptor a : adList) {
            prolist.add(PBHelper.convert(a));
        }
        return ListActionsSupportedResponseProto.newBuilder().addAllActDesList(prolist).build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) ActionDescriptor(org.smartdata.common.actions.ActionDescriptor) ArrayList(java.util.ArrayList) ActionDescriptorProto(org.smartdata.common.protocol.AdminServerProto.ActionDescriptorProto) IOException(java.io.IOException)

Example 60 with ServiceException

use of com.google.protobuf.ServiceException in project SSM by Intel-bigdata.

the class SmartAdminProtocolAdminSideTranslatorPB method getRuleInfo.

@Override
public RuleInfo getRuleInfo(long id) throws IOException {
    try {
        GetRuleInfoRequestProto req = GetRuleInfoRequestProto.newBuilder().setRuleId(id).build();
        GetRuleInfoResponseProto r = rpcProxy.getRuleInfo(null, req);
        return PBHelper.convert(r.getResult());
    } catch (ServiceException e) {
        throw PBHelper.getRemoteException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) GetRuleInfoRequestProto(org.smartdata.common.protocol.AdminServerProto.GetRuleInfoRequestProto) GetRuleInfoResponseProto(org.smartdata.common.protocol.AdminServerProto.GetRuleInfoResponseProto)

Aggregations

ServiceException (com.google.protobuf.ServiceException)139 IOException (java.io.IOException)66 Test (org.junit.Test)22 ArrayList (java.util.ArrayList)12 Configuration (org.apache.hadoop.conf.Configuration)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 Table (org.apache.hadoop.hbase.client.Table)5 CoprocessorRpcChannel (org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)5 InetSocketAddress (java.net.InetSocketAddress)4 DatanodeCommand (org.apache.hadoop.hdfs.server.protocol.DatanodeCommand)4 ByteString (com.google.protobuf.ByteString)3 InterruptedIOException (java.io.InterruptedIOException)3 ConnectException (java.net.ConnectException)3 SocketTimeoutException (java.net.SocketTimeoutException)3 Callable (java.util.concurrent.Callable)3 ExecutionException (java.util.concurrent.ExecutionException)3 ExecutorService (java.util.concurrent.ExecutorService)3 EncryptionZone (org.apache.hadoop.hdfs.protocol.EncryptionZone)3 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)3 Server (org.apache.hadoop.ipc.Server)3