use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.
the class AbstractTestIPC method testConnectionCloseWithOutstandingRPCs.
/** Tests that the connection closing is handled by the client with outstanding RPC calls */
@Test
public void testConnectionCloseWithOutstandingRPCs() throws InterruptedException, IOException {
Configuration conf = new Configuration(CONF);
RpcServer rpcServer = new TestFailingRpcServer(conf);
try (AbstractRpcClient<?> client = createRpcClient(conf)) {
rpcServer.start();
BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
EchoRequestProto param = EchoRequestProto.newBuilder().setMessage("hello").build();
stub.echo(null, param);
fail("RPC should have failed because connection closed");
} catch (ServiceException e) {
LOG.info("Caught expected exception: " + e.toString());
} finally {
rpcServer.stop();
}
}
use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.
the class TestProtobufRpcServiceImpl method echo.
@Override
public EchoResponseProto echo(RpcController controller, EchoRequestProto request) throws ServiceException {
if (controller instanceof HBaseRpcController) {
HBaseRpcController pcrc = (HBaseRpcController) controller;
// If cells, scan them to check we are able to iterate what we were given and since this is an
// echo, just put them back on the controller creating a new block. Tests our block building.
CellScanner cellScanner = pcrc.cellScanner();
List<Cell> list = null;
if (cellScanner != null) {
list = new ArrayList<>();
try {
while (cellScanner.advance()) {
list.add(cellScanner.current());
}
} catch (IOException e) {
throw new ServiceException(e);
}
}
cellScanner = CellUtil.createCellScanner(list);
pcrc.setCellScanner(cellScanner);
}
return EchoResponseProto.newBuilder().setMessage(request.getMessage()).build();
}
use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.
the class AbstractTestIPC method testRemoteError.
@Test
public void testRemoteError() throws IOException, ServiceException {
RpcServer rpcServer = RpcServerFactory.createRpcServer(null, "testRpcServer", Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)), new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 1));
try (AbstractRpcClient<?> client = createRpcClient(CONF)) {
rpcServer.start();
BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
stub.error(null, EmptyRequestProto.getDefaultInstance());
} catch (ServiceException e) {
LOG.info("Caught expected exception: " + e);
IOException ioe = ProtobufUtil.handleRemoteException(e);
assertTrue(ioe instanceof DoNotRetryIOException);
assertTrue(ioe.getMessage().contains("server error!"));
} finally {
rpcServer.stop();
}
}
use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.
the class AbstractTestIPC method testTimeout.
@Test
public void testTimeout() throws IOException {
RpcServer rpcServer = RpcServerFactory.createRpcServer(null, "testRpcServer", Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)), new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 1));
try (AbstractRpcClient<?> client = createRpcClient(CONF)) {
rpcServer.start();
BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
HBaseRpcController pcrc = new HBaseRpcControllerImpl();
int ms = 1000;
int timeout = 100;
for (int i = 0; i < 10; i++) {
pcrc.reset();
pcrc.setCallTimeout(timeout);
long startTime = System.nanoTime();
try {
stub.pause(pcrc, PauseRequestProto.newBuilder().setMs(ms).build());
} catch (ServiceException e) {
long waitTime = (System.nanoTime() - startTime) / 1000000;
// expected
LOG.info("Caught expected exception: " + e);
IOException ioe = ProtobufUtil.handleRemoteException(e);
assertTrue(ioe.getCause() instanceof CallTimeoutException);
// confirm that we got exception before the actual pause.
assertTrue(waitTime < ms);
}
}
} finally {
rpcServer.stop();
}
}
use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.
the class MockRegionServer method scan.
@Override
public ScanResponse scan(RpcController controller, ScanRequest request) throws ServiceException {
ScanResponse.Builder builder = ScanResponse.newBuilder();
try {
if (request.hasScan()) {
byte[] regionName = request.getRegion().getValue().toByteArray();
builder.setScannerId(openScanner(regionName, null));
builder.setMoreResults(true);
} else {
long scannerId = request.getScannerId();
Result result = next(scannerId);
if (result != null) {
builder.addCellsPerResult(result.size());
List<CellScannable> results = new ArrayList<>(1);
results.add(result);
((HBaseRpcController) controller).setCellScanner(CellUtil.createCellScanner(results));
builder.setMoreResults(true);
} else {
builder.setMoreResults(false);
close(scannerId);
}
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
return builder.build();
}
Aggregations