Search in sources :

Example 11 with BlockingServiceAndInterface

use of org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface in project hbase by apache.

the class AbstractTestIPC method testRemoteError.

@Test
public void testRemoteError() throws IOException, ServiceException {
    RpcServer rpcServer = RpcServerFactory.createRpcServer(null, "testRpcServer", Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)), new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 1));
    try (AbstractRpcClient<?> client = createRpcClient(CONF)) {
        rpcServer.start();
        BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
        stub.error(null, EmptyRequestProto.getDefaultInstance());
    } catch (ServiceException e) {
        LOG.info("Caught expected exception: " + e);
        IOException ioe = ProtobufUtil.handleRemoteException(e);
        assertTrue(ioe instanceof DoNotRetryIOException);
        assertTrue(ioe.getMessage().contains("server error!"));
    } finally {
        rpcServer.stop();
    }
}
Also used : BlockingInterface(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface) BlockingServiceAndInterface(org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) InetSocketAddress(java.net.InetSocketAddress) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) Test(org.junit.Test)

Example 12 with BlockingServiceAndInterface

use of org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface in project hbase by apache.

the class AbstractTestIPC method testTimeout.

@Test
public void testTimeout() throws IOException {
    RpcServer rpcServer = RpcServerFactory.createRpcServer(null, "testRpcServer", Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)), new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 1));
    try (AbstractRpcClient<?> client = createRpcClient(CONF)) {
        rpcServer.start();
        BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
        HBaseRpcController pcrc = new HBaseRpcControllerImpl();
        int ms = 1000;
        int timeout = 100;
        for (int i = 0; i < 10; i++) {
            pcrc.reset();
            pcrc.setCallTimeout(timeout);
            long startTime = System.nanoTime();
            try {
                stub.pause(pcrc, PauseRequestProto.newBuilder().setMs(ms).build());
            } catch (ServiceException e) {
                long waitTime = (System.nanoTime() - startTime) / 1000000;
                // expected
                LOG.info("Caught expected exception: " + e);
                IOException ioe = ProtobufUtil.handleRemoteException(e);
                assertTrue(ioe.getCause() instanceof CallTimeoutException);
                // confirm that we got exception before the actual pause.
                assertTrue(waitTime < ms);
            }
        }
    } finally {
        rpcServer.stop();
    }
}
Also used : BlockingServiceAndInterface(org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface) InetSocketAddress(java.net.InetSocketAddress) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) BlockingInterface(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) Test(org.junit.Test)

Example 13 with BlockingServiceAndInterface

use of org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface in project hbase by apache.

the class AbstractTestIPC method testRpcScheduler.

/**
   * Tests that the rpc scheduler is called when requests arrive.
   */
@Test
public void testRpcScheduler() throws IOException, ServiceException, InterruptedException {
    RpcScheduler scheduler = spy(new FifoRpcScheduler(CONF, 1));
    RpcServer rpcServer = RpcServerFactory.createRpcServer(null, "testRpcServer", Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)), new InetSocketAddress("localhost", 0), CONF, scheduler);
    verify(scheduler).init((RpcScheduler.Context) anyObject());
    try (AbstractRpcClient<?> client = createRpcClient(CONF)) {
        rpcServer.start();
        verify(scheduler).start();
        BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
        EchoRequestProto param = EchoRequestProto.newBuilder().setMessage("hello").build();
        for (int i = 0; i < 10; i++) {
            stub.echo(null, param);
        }
        verify(scheduler, times(10)).dispatch((CallRunner) anyObject());
    } finally {
        rpcServer.stop();
        verify(scheduler).stop();
    }
}
Also used : BlockingInterface(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface) BlockingServiceAndInterface(org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface) EchoRequestProto(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto) InetSocketAddress(java.net.InetSocketAddress) Test(org.junit.Test)

Example 14 with BlockingServiceAndInterface

use of org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface in project hbase by apache.

the class AbstractTestIPC method testCompressCellBlock.

/**
   * It is hard to verify the compression is actually happening under the wraps. Hope that if
   * unsupported, we'll get an exception out of some time (meantime, have to trace it manually to
   * confirm that compression is happening down in the client and server).
   */
@Test
public void testCompressCellBlock() throws IOException, ServiceException {
    Configuration conf = new Configuration(HBaseConfiguration.create());
    conf.set("hbase.client.rpc.compressor", GzipCodec.class.getCanonicalName());
    List<Cell> cells = new ArrayList<>();
    int count = 3;
    for (int i = 0; i < count; i++) {
        cells.add(CELL);
    }
    RpcServer rpcServer = RpcServerFactory.createRpcServer(null, "testRpcServer", Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)), new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 1));
    try (AbstractRpcClient<?> client = createRpcClient(conf)) {
        rpcServer.start();
        BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
        HBaseRpcController pcrc = new HBaseRpcControllerImpl(CellUtil.createCellScanner(cells));
        String message = "hello";
        assertEquals(message, stub.echo(pcrc, EchoRequestProto.newBuilder().setMessage(message).build()).getMessage());
        int index = 0;
        CellScanner cellScanner = pcrc.cellScanner();
        assertNotNull(cellScanner);
        while (cellScanner.advance()) {
            assertEquals(CELL, cellScanner.current());
            index++;
        }
        assertEquals(count, index);
    } finally {
        rpcServer.stop();
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) BlockingServiceAndInterface(org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface) InetSocketAddress(java.net.InetSocketAddress) GzipCodec(org.apache.hadoop.io.compress.GzipCodec) ArrayList(java.util.ArrayList) CellScanner(org.apache.hadoop.hbase.CellScanner) BlockingInterface(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 15 with BlockingServiceAndInterface

use of org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface in project hbase by apache.

the class AbstractTestIPC method testAsyncRemoteError.

@Test
public void testAsyncRemoteError() throws IOException {
    AbstractRpcClient<?> client = createRpcClient(CONF);
    RpcServer rpcServer = RpcServerFactory.createRpcServer(null, "testRpcServer", Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)), new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 1));
    try {
        rpcServer.start();
        Interface stub = newStub(client, rpcServer.getListenerAddress());
        BlockingRpcCallback<EmptyResponseProto> callback = new BlockingRpcCallback<>();
        HBaseRpcController pcrc = new HBaseRpcControllerImpl();
        stub.error(pcrc, EmptyRequestProto.getDefaultInstance(), callback);
        assertNull(callback.get());
        assertTrue(pcrc.failed());
        LOG.info("Caught expected exception: " + pcrc.getFailed());
        IOException ioe = ProtobufUtil.handleRemoteException(pcrc.getFailed());
        assertTrue(ioe instanceof DoNotRetryIOException);
        assertTrue(ioe.getMessage().contains("server error!"));
    } finally {
        client.close();
        rpcServer.stop();
    }
}
Also used : BlockingServiceAndInterface(org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) InetSocketAddress(java.net.InetSocketAddress) EmptyResponseProto(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) Interface(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.Interface) BlockingServiceAndInterface(org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface) BlockingInterface(org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface) Test(org.junit.Test)

Aggregations

BlockingServiceAndInterface (org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface)15 InetSocketAddress (java.net.InetSocketAddress)12 BlockingInterface (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface)12 Test (org.junit.Test)12 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5 Configuration (org.apache.hadoop.conf.Configuration)5 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)5 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)5 ServiceException (org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException)4 Interface (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.Interface)3 EchoRequestProto (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto)2 EmptyResponseProto (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto)2 Abortable (org.apache.hadoop.hbase.Abortable)1 Cell (org.apache.hadoop.hbase.Cell)1 CellScanner (org.apache.hadoop.hbase.CellScanner)1 ServiceDescriptor (org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor)1 EchoResponseProto (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto)1 GzipCodec (org.apache.hadoop.io.compress.GzipCodec)1 Ignore (org.junit.Ignore)1